lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/error.rs
crumblingstatue/amf
4b728dc8b2ea7b8389616c8de45d7c402cf387e7
use std::error; use std::fmt; use std::io; use std::string; #[derive(Debug)] pub enum DecodeError { Io(io::Error), String(string::FromUtf8Error), Unknown { marker: u8, }, Unsupported { marker: u8, }, UnexpectedObjectEnd, CircularReference { index: usize, }, OutOfRangeReference { index: usize, }, NonZeroTimeZone { offset: i16, }, InvalidDate { millis: f64, }, ExternalizableType { name: String, }, } impl error::Error for DecodeError { fn source(&self) -> Option<&(dyn error::Error + 'static)> { use self::DecodeError::*; match *self { Io(ref x) => x.source(), String(ref x) => x.source(), _ => None, } } } impl fmt::Display for DecodeError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use self::DecodeError::*; match *self { Io(ref x) => write!(f, "I/O Error: {}", x), String(ref x) => write!(f, "Invalid String: {}", x), Unknown { marker } => write!(f, "Unknown marker: {}", marker), Unsupported { marker } => write!(f, "Unsupported type: maker={}", marker), UnexpectedObjectEnd => write!(f, "Unexpected occurrence of object-end-marker"), CircularReference { index } => { write!(f, "Circular references are unsupported: index={}", index) } OutOfRangeReference { index } => write!(f, "Reference index {} is out-of-range", index), NonZeroTimeZone { offset } => { write!(f, "Non zero time zone offset {} is unsupported", offset) } InvalidDate { millis } => write!(f, "Invalid date value {}", millis), ExternalizableType { ref name } => { write!(f, "Externalizable type {:?} is unsupported", name) } } } } impl PartialEq for DecodeError { fn eq(&self, other: &Self) -> bool { use self::DecodeError::*; match (self, other) { (&Unknown { marker: x }, &Unknown { marker: y }) => x == y, (&Unsupported { marker: x }, &Unsupported { marker: y }) => x == y, (&UnexpectedObjectEnd, &UnexpectedObjectEnd) => true, (&CircularReference { index: x }, &CircularReference { index: y }) => x == y, (&OutOfRangeReference { index: x }, &OutOfRangeReference { index: y }) => x == y, (&NonZeroTimeZone { offset: x }, &NonZeroTimeZone { offset: y }) => x == y, (&InvalidDate { millis: x }, &InvalidDate { millis: y }) => x == y, (&ExternalizableType { name: ref x }, &ExternalizableType { name: ref y }) => x == y, _ => false, } } } impl From<io::Error> for DecodeError { fn from(f: io::Error) -> Self { DecodeError::Io(f) } } impl From<string::FromUtf8Error> for DecodeError { fn from(f: string::FromUtf8Error) -> Self { DecodeError::String(f) } }
use std::error; use std::fmt; use std::io; use std::string; #[derive(Debug)] pub enum DecodeError { Io(io::Error), String(string::FromUtf8Error), Unknown { marker: u8, }, Unsupported { marker: u8, }, UnexpectedObjectEnd, CircularReference { index: usize,
rReference { index: x }, &CircularReference { index: y }) => x == y, (&OutOfRangeReference { index: x }, &OutOfRangeReference { index: y }) => x == y, (&NonZeroTimeZone { offset: x }, &NonZeroTimeZone { offset: y }) => x == y, (&InvalidDate { millis: x }, &InvalidDate { millis: y }) => x == y, (&ExternalizableType { name: ref x }, &ExternalizableType { name: ref y }) => x == y, _ => false, } } } impl From<io::Error> for DecodeError { fn from(f: io::Error) -> Self { DecodeError::Io(f) } } impl From<string::FromUtf8Error> for DecodeError { fn from(f: string::FromUtf8Error) -> Self { DecodeError::String(f) } }
}, OutOfRangeReference { index: usize, }, NonZeroTimeZone { offset: i16, }, InvalidDate { millis: f64, }, ExternalizableType { name: String, }, } impl error::Error for DecodeError { fn source(&self) -> Option<&(dyn error::Error + 'static)> { use self::DecodeError::*; match *self { Io(ref x) => x.source(), String(ref x) => x.source(), _ => None, } } } impl fmt::Display for DecodeError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use self::DecodeError::*; match *self { Io(ref x) => write!(f, "I/O Error: {}", x), String(ref x) => write!(f, "Invalid String: {}", x), Unknown { marker } => write!(f, "Unknown marker: {}", marker), Unsupported { marker } => write!(f, "Unsupported type: maker={}", marker), UnexpectedObjectEnd => write!(f, "Unexpected occurrence of object-end-marker"), CircularReference { index } => { write!(f, "Circular references are unsupported: index={}", index) } OutOfRangeReference { index } => write!(f, "Reference index {} is out-of-range", index), NonZeroTimeZone { offset } => { write!(f, "Non zero time zone offset {} is unsupported", offset) } InvalidDate { millis } => write!(f, "Invalid date value {}", millis), ExternalizableType { ref name } => { write!(f, "Externalizable type {:?} is unsupported", name) } } } } impl PartialEq for DecodeError { fn eq(&self, other: &Self) -> bool { use self::DecodeError::*; match (self, other) { (&Unknown { marker: x }, &Unknown { marker: y }) => x == y, (&Unsupported { marker: x }, &Unsupported { marker: y }) => x == y, (&UnexpectedObjectEnd, &UnexpectedObjectEnd) => true, (&Circula
random
[ { "content": "#[derive(Debug)]\n\nenum SizeOrIndex {\n\n Size(usize),\n\n Index(usize),\n\n}\n\n\n\n/// AMF3 decoder.\n\n#[derive(Debug)]\n\npub struct Decoder<R> {\n\n inner: R,\n\n traits: Vec<Trait>,\n\n strings: Vec<String>,\n\n complexes: Vec<Value>,\n\n}\n\nimpl<R> Decoder<R> {\n\n /// Unwraps this `Decoder`, returning the underlying reader.\n\n pub fn into_inner(self) -> R {\n\n self.inner\n\n }\n\n /// Returns an immutable reference to the underlying reader.\n\n pub fn inner(&mut self) -> &R {\n", "file_path": "src/amf3/decode.rs", "rank": 0, "score": 59114.785185991765 }, { "content": "/// Makes a `Number` value.\n\npub fn number<T>(t: T) -> Value\n\nwhere\n\n f64: From<T>,\n\n{\n\n Value::Number(From::from(t))\n\n}\n\n\n", "file_path": "src/amf0/mod.rs", "rank": 1, "score": 26334.964269207278 }, { "content": "/// Makes a `String` value.\n\npub fn string<T>(t: T) -> Value\n\nwhere\n\n String: From<T>,\n\n{\n\n Value::String(From::from(t))\n\n}\n\n\n", "file_path": "src/amf0/mod.rs", "rank": 2, "score": 26334.964269207278 }, { "content": "/// Makes an anonymous `Object` value.\n\npub fn object<I, K>(entries: I) -> Value\n\nwhere\n\n I: Iterator<Item = (K, Value)>,\n\n String: From<K>,\n\n{\n\n Value::Object {\n\n class_name: None,\n\n entries: entries\n\n .map(|(k, v)| Pair {\n\n key: From::from(k),\n\n value: v,\n\n })\n\n .collect(),\n\n }\n\n}\n\n\n", "file_path": "src/amf0/mod.rs", "rank": 3, "score": 25871.077006974192 }, { "content": "/// Make a strict `Array` value.\n\npub fn array(entries: Vec<Value>) -> Value {\n\n Value::Array { entries }\n\n}\n", "file_path": "src/amf0/mod.rs", "rank": 4, "score": 25421.662701919257 }, { "content": "pub use self::decode::Decoder;\n\npub use self::encode::Encoder;\n\n\n\nmod decode;\n\nmod encode;\n\n\n\nmod marker {\n\n pub const NUMBER: u8 = 0x00;\n\n pub const BOOLEAN: u8 = 0x01;\n\n pub const STRING: u8 = 0x02;\n\n pub const OBJECT: u8 = 0x03;\n\n pub const MOVIECLIP: u8 = 0x04; // reserved, not supported\n\n pub const NULL: u8 = 0x05;\n\n pub const UNDEFINED: u8 = 0x06;\n\n pub const REFERENCE: u8 = 0x07;\n\n pub const ECMA_ARRAY: u8 = 0x08;\n\n pub const OBJECT_END_MARKER: u8 = 0x09;\n\n pub const STRICT_ARRAY: u8 = 0x0A;\n\n pub const DATE: u8 = 0x0B;\n\n pub const LONG_STRING: u8 = 0x0C;\n", "file_path": "src/amf0/mod.rs", "rank": 7, "score": 12.643354504041724 }, { "content": "pub use self::encode::Encoder;\n\n\n\nmod decode;\n\nmod encode;\n\n\n\nmod marker {\n\n pub const UNDEFINED: u8 = 0x00;\n\n pub const NULL: u8 = 0x01;\n\n pub const FALSE: u8 = 0x02;\n\n pub const TRUE: u8 = 0x03;\n\n pub const INTEGER: u8 = 0x04;\n\n pub const DOUBLE: u8 = 0x05;\n\n pub const STRING: u8 = 0x06;\n\n pub const XML_DOC: u8 = 0x07;\n\n pub const DATE: u8 = 0x08;\n\n pub const ARRAY: u8 = 0x09;\n\n pub const OBJECT: u8 = 0x0A;\n\n pub const XML: u8 = 0x0B;\n\n pub const BYTE_ARRAY: u8 = 0x0C;\n\n pub const VECTOR_INT: u8 = 0x0D;\n", "file_path": "src/amf3/mod.rs", "rank": 9, "score": 11.241452671439818 }, { "content": " pub const UNSUPPORTED: u8 = 0x0D;\n\n pub const RECORDSET: u8 = 0x0E; // reserved, not supported\n\n pub const XML_DOCUMENT: u8 = 0x0F;\n\n pub const TYPED_OBJECT: u8 = 0x10;\n\n pub const AVMPLUS_OBJECT: u8 = 0x11;\n\n}\n\n\n\n/// AMF0 value.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use amf::amf0::Value;\n\n///\n\n/// // Encodes a AMF3's number\n\n/// let number = Value::from(Value::Number(12.3));\n\n/// let mut buf = Vec::new();\n\n/// number.write_to(&mut buf).unwrap();\n\n///\n\n/// // Decodes above number\n\n/// let decoded = Value::read_from(&mut &buf[..]).unwrap();\n", "file_path": "src/amf0/mod.rs", "rank": 11, "score": 10.358093569449402 }, { "content": " marker::OBJECT_END_MARKER => Err(DecodeError::UnexpectedObjectEnd),\n\n marker::STRICT_ARRAY => self.decode_strict_array(),\n\n marker::DATE => self.decode_date(),\n\n marker::LONG_STRING => self.decode_long_string(),\n\n marker::UNSUPPORTED => Err(DecodeError::Unsupported { marker }),\n\n marker::RECORDSET => Err(DecodeError::Unsupported { marker }),\n\n marker::XML_DOCUMENT => self.decode_xml_document(),\n\n marker::TYPED_OBJECT => self.decode_typed_object(),\n\n marker::AVMPLUS_OBJECT => self.decode_avmplus(),\n\n _ => Err(DecodeError::Unknown { marker }),\n\n }\n\n }\n\n fn decode_number(&mut self) -> DecodeResult<Value> {\n\n let n = self.inner.read_f64::<BigEndian>()?;\n\n Ok(Value::Number(n))\n\n }\n\n fn decode_boolean(&mut self) -> DecodeResult<Value> {\n\n let b = self.inner.read_u8()? != 0;\n\n Ok(Value::Boolean(b))\n\n }\n", "file_path": "src/amf0/decode.rs", "rank": 12, "score": 9.297256561657928 }, { "content": "use super::marker;\n\nuse super::Value;\n\nuse crate::Pair;\n\nuse byteorder::{BigEndian, WriteBytesExt};\n\nuse std::io;\n\nuse std::time;\n\n\n\n/// AMF3 encoder.\n\n#[derive(Debug)]\n\npub struct Encoder<W> {\n\n inner: W,\n\n}\n\nimpl<W> Encoder<W> {\n\n /// Unwraps this `Encoder`, returning the underlying writer.\n\n pub fn into_inner(self) -> W {\n\n self.inner\n\n }\n\n /// Returns an immutable reference to the underlying writer.\n\n pub fn inner(&self) -> &W {\n\n &self.inner\n", "file_path": "src/amf3/encode.rs", "rank": 13, "score": 8.5459251502609 }, { "content": " /// > Note that object reference indices are local to each message body.\n\n /// > Serializers and deserializers must reset reference indices to 0 each time a new message is processed.\n\n /// >\n\n /// > [AMF 0 Specification: 4.1.3 AMF Message](http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf)\n\n pub fn clear_reference_table(&mut self) {\n\n self.complexes.clear();\n\n }\n\n\n\n fn decode_value(&mut self) -> DecodeResult<Value> {\n\n let marker = self.inner.read_u8()?;\n\n match marker {\n\n marker::NUMBER => self.decode_number(),\n\n marker::BOOLEAN => self.decode_boolean(),\n\n marker::STRING => self.decode_string(),\n\n marker::OBJECT => self.decode_object(),\n\n marker::MOVIECLIP => Err(DecodeError::Unsupported { marker }),\n\n marker::NULL => Ok(Value::Null),\n\n marker::UNDEFINED => Ok(Value::Undefined),\n\n marker::REFERENCE => self.decode_reference(),\n\n marker::ECMA_ARRAY => self.decode_ecma_array(),\n", "file_path": "src/amf0/decode.rs", "rank": 14, "score": 8.49574543577352 }, { "content": " pub const VECTOR_UINT: u8 = 0xE;\n\n pub const VECTOR_DOUBLE: u8 = 0x0F;\n\n pub const VECTOR_OBJECT: u8 = 0x10;\n\n pub const DICTIONARY: u8 = 0x11;\n\n}\n\n\n\n/// AMF3 value.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use amf::amf3::Value;\n\n///\n\n/// // Encodes a AMF3's integer\n\n/// let integer = Value::from(Value::Integer(123));\n\n/// let mut buf = Vec::new();\n\n/// integer.write_to(&mut buf).unwrap();\n\n///\n\n/// // Decodes above integer\n\n/// let decoded = Value::read_from(&mut &buf[..]).unwrap();\n\n/// assert_eq!(integer, decoded);\n", "file_path": "src/amf3/mod.rs", "rank": 15, "score": 8.37092535957046 }, { "content": "use super::marker;\n\nuse super::Value;\n\nuse crate::amf3;\n\nuse crate::Pair;\n\nuse byteorder::{BigEndian, WriteBytesExt};\n\nuse std::io;\n\nuse std::time;\n\n\n\n/// AMF0 encoder.\n\n#[derive(Debug)]\n\npub struct Encoder<W> {\n\n inner: W,\n\n}\n\nimpl<W> Encoder<W> {\n\n /// Unwraps this `Encoder`, returning the underlying writer.\n\n pub fn into_inner(self) -> W {\n\n self.inner\n\n }\n\n}\n\nimpl<W> Encoder<W>\n", "file_path": "src/amf0/encode.rs", "rank": 16, "score": 8.303755447801612 }, { "content": "use super::marker;\n\nuse super::Value;\n\nuse crate::amf3;\n\nuse crate::error::DecodeError;\n\nuse crate::{DecodeResult, Pair};\n\nuse byteorder::{BigEndian, ReadBytesExt};\n\nuse std::io;\n\nuse std::time;\n\n\n\n/// AMF0 decoder.\n\n#[derive(Debug)]\n\npub struct Decoder<R> {\n\n inner: R,\n\n complexes: Vec<Value>,\n\n}\n\nimpl<R> Decoder<R> {\n\n /// Unwraps this `Decoder`, returning the underlying reader.\n\n pub fn into_inner(self) -> R {\n\n self.inner\n\n }\n", "file_path": "src/amf0/decode.rs", "rank": 18, "score": 8.126144271149165 }, { "content": " self.encode(&p.value)?;\n\n }\n\n self.inner.write_u16::<BigEndian>(0)?;\n\n self.inner.write_u8(marker::OBJECT_END_MARKER)?;\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #![allow(clippy::approx_constant)]\n\n use super::super::Value;\n\n use crate::amf3;\n\n use crate::Pair;\n\n use std::iter;\n\n use std::time;\n\n\n\n macro_rules! encode_eq {\n\n ($value:expr, $file:expr) => {{\n\n let expected = include_bytes!(concat!(\"../testdata/\", $file));\n", "file_path": "src/amf0/encode.rs", "rank": 19, "score": 7.959269104783198 }, { "content": " fn decode_complex_type<F>(&mut self, f: F) -> DecodeResult<Value>\n\n where\n\n F: FnOnce(&mut Self) -> DecodeResult<Value>,\n\n {\n\n let index = self.complexes.len();\n\n self.complexes.push(Value::Null);\n\n let value = f(self)?;\n\n self.complexes[index] = value.clone();\n\n Ok(value)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #![allow(clippy::approx_constant)]\n\n use super::super::marker;\n\n use super::super::Value;\n\n use crate::amf3;\n\n use crate::error::DecodeError;\n\n use crate::Pair;\n", "file_path": "src/amf0/decode.rs", "rank": 21, "score": 7.839018139287724 }, { "content": " if (b & 0b1000_0000) == 0 {\n\n return Ok(n);\n\n }\n\n }\n\n let b = self.inner.read_u8()? as u32;\n\n n = (n << 8) | b;\n\n Ok(n)\n\n }\n\n fn decode_size_or_index(&mut self) -> DecodeResult<SizeOrIndex> {\n\n let u29 = self.decode_u29()? as usize;\n\n let is_reference = (u29 & 0b01) == 0;\n\n let value = u29 >> 1;\n\n if is_reference {\n\n Ok(SizeOrIndex::Index(value))\n\n } else {\n\n Ok(SizeOrIndex::Size(value))\n\n }\n\n }\n\n fn decode_complex_type<F>(&mut self, f: F) -> DecodeResult<Value>\n\n where\n", "file_path": "src/amf3/decode.rs", "rank": 22, "score": 7.746567073494301 }, { "content": " assert_eq!(\n\n decode!(\"amf3-bad-object-ref.bin\"),\n\n Err(DecodeError::OutOfRangeReference { index: 10 })\n\n );\n\n assert_eq!(\n\n decode!(\"amf3-bad-trait-ref.bin\"),\n\n Err(DecodeError::OutOfRangeReference { index: 4 })\n\n );\n\n assert_eq!(\n\n decode!(\"amf3-bad-string-ref.bin\"),\n\n Err(DecodeError::OutOfRangeReference { index: 8 })\n\n );\n\n assert_eq!(\n\n decode!(\"amf3-unknown-marker.bin\"),\n\n Err(DecodeError::Unknown { marker: 123 })\n\n );\n\n assert_eq!(\n\n decode!(\"amf3-date-invalid-millis.bin\"),\n\n Err(DecodeError::InvalidDate {\n\n millis: f64::INFINITY\n", "file_path": "src/amf3/decode.rs", "rank": 23, "score": 7.61454642749443 }, { "content": "\n\n fn encode_undefined(&mut self) -> io::Result<()> {\n\n self.inner.write_u8(marker::UNDEFINED)?;\n\n Ok(())\n\n }\n\n fn encode_null(&mut self) -> io::Result<()> {\n\n self.inner.write_u8(marker::NULL)?;\n\n Ok(())\n\n }\n\n fn encode_boolean(&mut self, b: bool) -> io::Result<()> {\n\n if b {\n\n self.inner.write_u8(marker::TRUE)?;\n\n } else {\n\n self.inner.write_u8(marker::FALSE)?;\n\n }\n\n Ok(())\n\n }\n\n fn encode_integer(&mut self, i: i32) -> io::Result<()> {\n\n self.inner.write_u8(marker::INTEGER)?;\n\n let u29 = if i >= 0 {\n", "file_path": "src/amf3/encode.rs", "rank": 24, "score": 7.168977789214191 }, { "content": " fields,\n\n };\n\n self.traits.push(t.clone());\n\n Ok(t)\n\n }\n\n }\n\n fn read_bytes(&mut self, len: usize) -> DecodeResult<Vec<u8>> {\n\n let mut buf = vec![0; len];\n\n self.inner.read_exact(&mut buf)?;\n\n Ok(buf)\n\n }\n\n fn read_utf8(&mut self, len: usize) -> DecodeResult<String> {\n\n self.read_bytes(len).and_then(|b| Ok(String::from_utf8(b)?))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::Value;\n\n use crate::error::DecodeError;\n", "file_path": "src/amf3/decode.rs", "rank": 25, "score": 7.112077711435039 }, { "content": " Err(DecodeError::Unsupported {\n\n marker: marker::RECORDSET\n\n })\n\n );\n\n assert_eq!(\n\n decode!(\"amf0-unsupported.bin\"),\n\n Err(DecodeError::Unsupported {\n\n marker: marker::UNSUPPORTED\n\n })\n\n );\n\n }\n\n #[test]\n\n fn decodes_ecma_array() {\n\n let entries = es(&[(\"0\", s(\"a\")), (\"1\", s(\"b\")), (\"2\", s(\"c\")), (\"3\", s(\"d\"))][..]);\n\n decode_eq!(\n\n \"amf0-ecma-ordinal-array.bin\",\n\n Value::EcmaArray { entries: entries }\n\n );\n\n decode_unexpected_eof!(\"amf0-ecma-array-partial.bin\");\n\n\n", "file_path": "src/amf0/decode.rs", "rank": 26, "score": 7.024123774732509 }, { "content": "extern crate byteorder;\n\n\n\nuse std::io;\n\n\n\npub use amf0::Value as Amf0Value;\n\npub use amf3::Value as Amf3Value;\n\n\n\npub mod amf0;\n\npub mod amf3;\n\npub mod error;\n\n\n\n/// AMF decoding result.\n\npub type DecodeResult<T> = Result<T, error::DecodeError>;\n\n\n\n/// Format version.\n\n#[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash)]\n\npub enum Version {\n\n /// Version 0.\n\n Amf0,\n\n\n", "file_path": "src/lib.rs", "rank": 27, "score": 6.794827798275704 }, { "content": " &mut self,\n\n class_name: &Option<String>,\n\n sealed_count: usize,\n\n entries: &[Pair<String, Value>],\n\n ) -> io::Result<()> {\n\n self.inner.write_u8(marker::OBJECT)?;\n\n self.encode_trait(class_name, sealed_count, entries)?;\n\n for e in entries.iter().take(sealed_count) {\n\n self.encode(&e.value)?;\n\n }\n\n if entries.len() > sealed_count {\n\n self.encode_pairs(&entries[sealed_count..])?;\n\n }\n\n Ok(())\n\n }\n\n fn encode_xml(&mut self, xml: &str) -> io::Result<()> {\n\n self.inner.write_u8(marker::XML)?;\n\n self.encode_utf8(xml)?;\n\n Ok(())\n\n }\n", "file_path": "src/amf3/encode.rs", "rank": 28, "score": 6.78360562249873 }, { "content": "use crate::error::DecodeError;\n\nuse crate::{DecodeResult, Pair};\n\nuse byteorder::{BigEndian, ReadBytesExt};\n\nuse std::io;\n\nuse std::time;\n\n\n\nuse super::marker;\n\nuse super::Value;\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "src/amf3/decode.rs", "rank": 29, "score": 6.36514195580723 }, { "content": " fn encode_byte_array(&mut self, bytes: &[u8]) -> io::Result<()> {\n\n self.inner.write_u8(marker::BYTE_ARRAY)?;\n\n self.encode_size(bytes.len())?;\n\n self.inner.write_all(bytes)?;\n\n Ok(())\n\n }\n\n fn encode_int_vector(&mut self, is_fixed: bool, vec: &[i32]) -> io::Result<()> {\n\n self.inner.write_u8(marker::VECTOR_INT)?;\n\n self.encode_size(vec.len())?;\n\n self.inner.write_u8(is_fixed as u8)?;\n\n for &x in vec {\n\n self.inner.write_i32::<BigEndian>(x)?;\n\n }\n\n Ok(())\n\n }\n\n fn encode_uint_vector(&mut self, is_fixed: bool, vec: &[u32]) -> io::Result<()> {\n\n self.inner.write_u8(marker::VECTOR_UINT)?;\n\n self.encode_size(vec.len())?;\n\n self.inner.write_u8(is_fixed as u8)?;\n\n for &x in vec {\n", "file_path": "src/amf3/encode.rs", "rank": 30, "score": 6.313012963338082 }, { "content": " self.write_str_u16(&s)?;\n\n } else {\n\n self.inner.write_u8(marker::LONG_STRING)?;\n\n self.write_str_u32(&s)?;\n\n }\n\n Ok(())\n\n }\n\n fn encode_object(\n\n &mut self,\n\n class_name: &Option<String>,\n\n entries: &[Pair<String, Value>],\n\n ) -> io::Result<()> {\n\n assert!(entries.len() <= 0xFFFF_FFFF);\n\n if let Some(class_name) = class_name.as_ref() {\n\n self.inner.write_u8(marker::TYPED_OBJECT)?;\n\n self.write_str_u16(class_name)?;\n\n } else {\n\n self.inner.write_u8(marker::OBJECT)?;\n\n }\n\n self.encode_pairs(entries)?;\n", "file_path": "src/amf0/encode.rs", "rank": 31, "score": 6.161518822213736 }, { "content": " let s = String::from_utf8(bytes)?;\n\n if !s.is_empty() {\n\n self.strings.push(s.clone());\n\n }\n\n Ok(s)\n\n }\n\n SizeOrIndex::Index(index) => {\n\n let s = self\n\n .strings\n\n .get(index)\n\n .ok_or(DecodeError::OutOfRangeReference { index })?;\n\n Ok(s.clone())\n\n }\n\n }\n\n }\n\n fn decode_u29(&mut self) -> DecodeResult<u32> {\n\n let mut n = 0;\n\n for _ in 0..3 {\n\n let b = self.inner.read_u8()? as u32;\n\n n = (n << 7) | (b & 0b0111_1111);\n", "file_path": "src/amf3/decode.rs", "rank": 32, "score": 6.093870891504617 }, { "content": " Ok(())\n\n }\n\n fn encode_null(&mut self) -> io::Result<()> {\n\n self.inner.write_u8(marker::NULL)?;\n\n Ok(())\n\n }\n\n fn encode_undefined(&mut self) -> io::Result<()> {\n\n self.inner.write_u8(marker::UNDEFINED)?;\n\n Ok(())\n\n }\n\n fn encode_ecma_array(&mut self, entries: &[Pair<String, Value>]) -> io::Result<()> {\n\n assert!(entries.len() <= 0xFFFF_FFFF);\n\n self.inner.write_u8(marker::ECMA_ARRAY)?;\n\n self.inner.write_u32::<BigEndian>(entries.len() as u32)?;\n\n self.encode_pairs(entries)?;\n\n Ok(())\n\n }\n\n fn encode_strict_array(&mut self, entries: &[Value]) -> io::Result<()> {\n\n assert!(entries.len() <= 0xFFFF_FFFF);\n\n self.inner.write_u8(marker::STRICT_ARRAY)?;\n", "file_path": "src/amf0/encode.rs", "rank": 33, "score": 6.080817145247239 }, { "content": " i as u32\n\n } else {\n\n ((1 << 29) + i) as u32\n\n };\n\n self.encode_u29(u29)?;\n\n Ok(())\n\n }\n\n fn encode_double(&mut self, d: f64) -> io::Result<()> {\n\n self.inner.write_u8(marker::DOUBLE)?;\n\n self.inner.write_f64::<BigEndian>(d)?;\n\n Ok(())\n\n }\n\n fn encode_string(&mut self, s: &str) -> io::Result<()> {\n\n self.inner.write_u8(marker::STRING)?;\n\n self.encode_utf8(s)?;\n\n Ok(())\n\n }\n\n fn encode_xml_document(&mut self, xml: &str) -> io::Result<()> {\n\n self.inner.write_u8(marker::XML_DOC)?;\n\n self.encode_utf8(xml)?;\n", "file_path": "src/amf3/encode.rs", "rank": 34, "score": 6.059414667476098 }, { "content": " let entries = (0..count)\n\n .map(|_| {\n\n Ok(Pair {\n\n key: this.decode_value()?,\n\n value: this.decode_value()?,\n\n })\n\n })\n\n .collect::<DecodeResult<_>>()?;\n\n Ok(Value::Dictionary { is_weak, entries })\n\n })\n\n }\n\n\n\n /// Decode an AMF3 string.\n\n ///\n\n /// Use this if you need to decode an AMF3 string outside of value context.\n\n /// An example for this is reading keys in Local Shared Object file.\n\n pub fn decode_utf8(&mut self) -> DecodeResult<String> {\n\n match self.decode_size_or_index()? {\n\n SizeOrIndex::Size(len) => {\n\n let bytes = self.read_bytes(len)?;\n", "file_path": "src/amf3/decode.rs", "rank": 35, "score": 6.058395225256817 }, { "content": " Value::Array { ref entries } => self.encode_strict_array(entries),\n\n Value::Date { unix_time } => self.encode_date(unix_time),\n\n Value::XmlDocument(ref x) => self.encode_xml_document(x),\n\n Value::AvmPlus(ref x) => self.encode_avmplus(x),\n\n }\n\n }\n\n\n\n fn encode_number(&mut self, n: f64) -> io::Result<()> {\n\n self.inner.write_u8(marker::NUMBER)?;\n\n self.inner.write_f64::<BigEndian>(n)?;\n\n Ok(())\n\n }\n\n fn encode_boolean(&mut self, b: bool) -> io::Result<()> {\n\n self.inner.write_u8(marker::BOOLEAN)?;\n\n self.inner.write_u8(b as u8)?;\n\n Ok(())\n\n }\n\n fn encode_string(&mut self, s: &str) -> io::Result<()> {\n\n if s.len() <= 0xFFFF {\n\n self.inner.write_u8(marker::STRING)?;\n", "file_path": "src/amf0/encode.rs", "rank": 36, "score": 5.99250668109497 }, { "content": " F: FnOnce(&mut Self, usize) -> DecodeResult<Value>,\n\n {\n\n match self.decode_size_or_index()? {\n\n SizeOrIndex::Index(index) => self\n\n .complexes\n\n .get(index)\n\n .ok_or(DecodeError::OutOfRangeReference { index })\n\n .and_then(|v| {\n\n if *v == Value::Null {\n\n Err(DecodeError::CircularReference { index })\n\n } else {\n\n Ok(v.clone())\n\n }\n\n }),\n\n SizeOrIndex::Size(u28) => {\n\n let index = self.complexes.len();\n\n self.complexes.push(Value::Null);\n\n let value = f(self, u28)?;\n\n self.complexes[index] = value.clone();\n\n Ok(value)\n", "file_path": "src/amf3/decode.rs", "rank": 37, "score": 5.988499009215669 }, { "content": " self.inner.write_u32::<BigEndian>(x)?;\n\n }\n\n Ok(())\n\n }\n\n fn encode_double_vector(&mut self, is_fixed: bool, vec: &[f64]) -> io::Result<()> {\n\n self.inner.write_u8(marker::VECTOR_DOUBLE)?;\n\n self.encode_size(vec.len())?;\n\n self.inner.write_u8(is_fixed as u8)?;\n\n for &x in vec {\n\n self.inner.write_f64::<BigEndian>(x)?;\n\n }\n\n Ok(())\n\n }\n\n fn encode_object_vector(\n\n &mut self,\n\n class_name: &Option<String>,\n\n is_fixed: bool,\n\n vec: &[Value],\n\n ) -> io::Result<()> {\n\n self.inner.write_u8(marker::VECTOR_OBJECT)?;\n", "file_path": "src/amf3/encode.rs", "rank": 38, "score": 5.800870712052303 }, { "content": " },\n\n\n\n /// See [2.17 XML Document Type]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=7&zoom=auto,90,147).\n\n XmlDocument(String),\n\n\n\n /// See [3.1 AVM+ Type Marker]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=8&zoom=auto,90,518).\n\n AvmPlus(amf3::Value),\n\n}\n\nimpl Value {\n\n /// Reads an AMF0 encoded `Value` from `reader`.\n\n ///\n\n /// Note that reference objects are copied in the decoding phase\n\n /// for the sake of simplicity of the resulting value representation.\n\n /// And circular reference are unsupported (i.e., those are treated as errors).\n\n pub fn read_from<R>(reader: R) -> DecodeResult<Self>\n\n where\n\n R: io::Read,\n\n {\n", "file_path": "src/amf0/mod.rs", "rank": 39, "score": 5.694854703235167 }, { "content": " fn decodes_typed_object() {\n\n decode_eq!(\n\n \"amf0-typed-object.bin\",\n\n obj(\n\n Some(\"org.amf.ASClass\"),\n\n &[(\"foo\", s(\"bar\")), (\"baz\", Value::Null)]\n\n )\n\n );\n\n decode_unexpected_eof!(\"amf0-typed-object-partial.bin\");\n\n }\n\n #[test]\n\n fn decodes_unsupported() {\n\n assert_eq!(\n\n decode!(\"amf0-movieclip.bin\"),\n\n Err(DecodeError::Unsupported {\n\n marker: marker::MOVIECLIP\n\n })\n\n );\n\n assert_eq!(\n\n decode!(\"amf0-recordset.bin\"),\n", "file_path": "src/amf0/decode.rs", "rank": 40, "score": 5.605186760341589 }, { "content": " self.encode_size(vec.len())?;\n\n self.inner.write_u8(is_fixed as u8)?;\n\n self.encode_utf8(class_name.as_ref().map_or(\"*\", |s| &s))?;\n\n for x in vec {\n\n self.encode(x)?;\n\n }\n\n Ok(())\n\n }\n\n fn encode_dictionary(\n\n &mut self,\n\n is_weak: bool,\n\n entries: &[Pair<Value, Value>],\n\n ) -> io::Result<()> {\n\n self.inner.write_u8(marker::DICTIONARY)?;\n\n self.encode_size(entries.len())?;\n\n self.inner.write_u8(is_weak as u8)?;\n\n for e in entries {\n\n self.encode(&e.key)?;\n\n self.encode(&e.value)?;\n\n }\n", "file_path": "src/amf3/encode.rs", "rank": 41, "score": 5.530382693058394 }, { "content": " fn decode_string(&mut self) -> DecodeResult<Value> {\n\n let len = self.inner.read_u16::<BigEndian>()? as usize;\n\n self.read_utf8(len).map(Value::String)\n\n }\n\n fn decode_object(&mut self) -> DecodeResult<Value> {\n\n self.decode_complex_type(|this| {\n\n let entries = this.decode_pairs()?;\n\n Ok(Value::Object {\n\n class_name: None,\n\n entries,\n\n })\n\n })\n\n }\n\n fn decode_reference(&mut self) -> DecodeResult<Value> {\n\n let index = self.inner.read_u16::<BigEndian>()? as usize;\n\n self.complexes\n\n .get(index)\n\n .ok_or(DecodeError::OutOfRangeReference { index })\n\n .and_then(|v| {\n\n if *v == Value::Null {\n", "file_path": "src/amf0/decode.rs", "rank": 42, "score": 5.521635073457411 }, { "content": " };\n\n decode_eq!(\"amf0-avmplus-object.bin\", Value::AvmPlus(expected));\n\n }\n\n #[test]\n\n fn other_errors() {\n\n decode_unexpected_eof!(\"amf0-empty.bin\");\n\n assert_eq!(\n\n decode!(\"amf0-unknown-marker.bin\"),\n\n Err(DecodeError::Unknown { marker: 97 })\n\n );\n\n }\n\n\n\n fn s(s: &str) -> Value {\n\n Value::String(s.to_string())\n\n }\n\n fn n(n: f64) -> Value {\n\n Value::Number(n)\n\n }\n\n fn obj(name: Option<&str>, entries: &[(&str, Value)]) -> Value {\n\n Value::Object {\n", "file_path": "src/amf0/decode.rs", "rank": 43, "score": 5.461440268050172 }, { "content": " match marker {\n\n marker::UNDEFINED => Ok(Value::Undefined),\n\n marker::NULL => Ok(Value::Null),\n\n marker::FALSE => Ok(Value::Boolean(false)),\n\n marker::TRUE => Ok(Value::Boolean(true)),\n\n marker::INTEGER => self.decode_integer(),\n\n marker::DOUBLE => self.decode_double(),\n\n marker::STRING => self.decode_string(),\n\n marker::XML_DOC => self.decode_xml_doc(),\n\n marker::DATE => self.decode_date(),\n\n marker::ARRAY => self.decode_array(),\n\n marker::OBJECT => self.decode_object(),\n\n marker::XML => self.decode_xml(),\n\n marker::BYTE_ARRAY => self.decode_byte_array(),\n\n marker::VECTOR_INT => self.decode_vector_int(),\n\n marker::VECTOR_UINT => self.decode_vector_uint(),\n\n marker::VECTOR_DOUBLE => self.decode_vector_double(),\n\n marker::VECTOR_OBJECT => self.decode_vector_object(),\n\n marker::DICTIONARY => self.decode_dictionary(),\n\n _ => Err(DecodeError::Unknown { marker }),\n", "file_path": "src/amf3/decode.rs", "rank": 44, "score": 5.446769841264372 }, { "content": " }\n\n Ok(())\n\n }\n\n fn encode_size(&mut self, size: usize) -> io::Result<()> {\n\n assert!(size < (1 << 28));\n\n let not_reference = 1;\n\n self.encode_u29(((size << 1) | not_reference) as u32)\n\n }\n\n #[allow(clippy::zero_prefixed_literal, clippy::identity_op)]\n\n fn encode_u29(&mut self, u29: u32) -> io::Result<()> {\n\n if u29 < 0x80 {\n\n self.inner.write_u8(u29 as u8)?;\n\n } else if u29 < 0x4000 {\n\n let b1 = ((u29 >> 0) & 0b0111_1111) as u8;\n\n let b2 = ((u29 >> 7) | 0b1000_0000) as u8;\n\n for b in &[b2, b1] {\n\n self.inner.write_u8(*b)?;\n\n }\n\n } else if u29 < 0x20_0000 {\n\n let b1 = ((u29 >> 00) & 0b0111_1111) as u8;\n", "file_path": "src/amf3/encode.rs", "rank": 45, "score": 5.374570541973109 }, { "content": "//! An [AMF0](http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf) implementation.\n\n//!\n\n//! # Examples\n\n//! ```\n\n//! use amf::amf0::Value;\n\n//!\n\n//! // Encodes a AMF3's number\n\n//! let number = Value::from(Value::Number(12.3));\n\n//! let mut buf = Vec::new();\n\n//! number.write_to(&mut buf).unwrap();\n\n//!\n\n//! // Decodes above number\n\n//! let decoded = Value::read_from(&mut &buf[..]).unwrap();\n\n//! assert_eq!(number, decoded);\n\n//! ```\n\nuse crate::amf3;\n\nuse crate::{DecodeResult, Pair};\n\nuse std::io;\n\nuse std::time;\n\n\n", "file_path": "src/amf0/mod.rs", "rank": 46, "score": 5.18103294317721 }, { "content": "\n\n /// Decodes a AMF3 value.\n\n pub fn decode(&mut self) -> DecodeResult<Value> {\n\n self.decode_value()\n\n }\n\n\n\n /// Clear the reference tables of this decoder.\n\n ///\n\n /// > Similar to AFM 0, AMF 3 object reference tables, object trait reference tables\n\n /// > and string reference tables must be reset each time a new context header or message is processed.\n\n /// >\n\n /// > [AMF 3 Specification: 4.1 NetConnection and AMF 3](https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf)\n\n pub fn clear_reference_table(&mut self) {\n\n self.traits.clear();\n\n self.strings.clear();\n\n self.complexes.clear();\n\n }\n\n\n\n fn decode_value(&mut self) -> DecodeResult<Value> {\n\n let marker = self.inner.read_u8()?;\n", "file_path": "src/amf3/decode.rs", "rank": 47, "score": 5.137171583705291 }, { "content": "//! An [AMF3](https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf) implementation.\n\n//!\n\n//! # Examples\n\n//! ```\n\n//! use amf::amf3::Value;\n\n//!\n\n//! // Encodes a AMF3's integer\n\n//! let integer = Value::from(Value::Integer(123));\n\n//! let mut buf = Vec::new();\n\n//! integer.write_to(&mut buf).unwrap();\n\n//!\n\n//! // Decodes above integer\n\n//! let decoded = Value::read_from(&mut &buf[..]).unwrap();\n\n//! assert_eq!(integer, decoded);\n\n//! ```\n\nuse crate::{DecodeResult, Pair};\n\nuse std::io;\n\nuse std::time;\n\n\n\npub use self::decode::Decoder;\n", "file_path": "src/amf3/mod.rs", "rank": 48, "score": 5.0159465041814215 }, { "content": " Ok(())\n\n }\n\n fn encode_date(&mut self, unix_time: time::Duration) -> io::Result<()> {\n\n let millis = unix_time.as_secs() * 1000 + (unix_time.subsec_nanos() as u64) / 1_000_000;\n\n self.inner.write_u8(marker::DATE)?;\n\n self.encode_size(0)?;\n\n self.inner.write_f64::<BigEndian>(millis as f64)?;\n\n Ok(())\n\n }\n\n fn encode_array(&mut self, assoc: &[Pair<String, Value>], dense: &[Value]) -> io::Result<()> {\n\n self.inner.write_u8(marker::ARRAY)?;\n\n self.encode_size(dense.len())?;\n\n self.encode_pairs(assoc)?;\n\n dense\n\n .iter()\n\n .map(|v| self.encode(v))\n\n .collect::<io::Result<Vec<_>>>()?;\n\n Ok(())\n\n }\n\n fn encode_object(\n", "file_path": "src/amf3/encode.rs", "rank": 49, "score": 4.67173810912724 }, { "content": " self.inner.write_u32::<BigEndian>(entries.len() as u32)?;\n\n for e in entries {\n\n self.encode(e)?;\n\n }\n\n Ok(())\n\n }\n\n fn encode_date(&mut self, unix_time: time::Duration) -> io::Result<()> {\n\n let millis = unix_time.as_secs() * 1000 + (unix_time.subsec_nanos() as u64) / 1_000_000;\n\n\n\n self.inner.write_u8(marker::DATE)?;\n\n self.inner.write_f64::<BigEndian>(millis as f64)?;\n\n self.inner.write_i16::<BigEndian>(0)?;\n\n Ok(())\n\n }\n\n fn encode_xml_document(&mut self, xml: &str) -> io::Result<()> {\n\n self.inner.write_u8(marker::XML_DOCUMENT)?;\n\n self.write_str_u32(xml)?;\n\n Ok(())\n\n }\n\n fn encode_avmplus(&mut self, value: &amf3::Value) -> io::Result<()> {\n", "file_path": "src/amf0/encode.rs", "rank": 50, "score": 4.499286078536552 }, { "content": " /// Version 3.\n\n Amf3,\n\n}\n\n\n\n/// AMF value.\n\n#[derive(Debug, Clone, PartialEq, PartialOrd)]\n\npub enum Value {\n\n /// AMF0 value.\n\n Amf0(Amf0Value),\n\n\n\n /// AMF3 value.\n\n Amf3(Amf3Value),\n\n}\n\nimpl Value {\n\n /// Reads an AMF encoded `Value` from `reader`.\n\n ///\n\n /// Note that reference objects are copied in the decoding phase\n\n /// for the sake of simplicity of the resulting value representation.\n\n /// And circular reference are unsupported (i.e., those are treated as errors).\n\n pub fn read_from<R>(reader: R, version: Version) -> DecodeResult<Self>\n", "file_path": "src/lib.rs", "rank": 51, "score": 4.321744881824932 }, { "content": " /// Use this if you need to encode an AMF3 string outside of value context.\n\n /// An example of this is writing keys in Local Shared Object file.\n\n pub fn encode_utf8(&mut self, s: &str) -> io::Result<()> {\n\n self.encode_size(s.len())?;\n\n self.inner.write_all(s.as_bytes())?;\n\n Ok(())\n\n }\n\n fn encode_pairs(&mut self, pairs: &[Pair<String, Value>]) -> io::Result<()> {\n\n for p in pairs {\n\n self.encode_utf8(&p.key)?;\n\n self.encode(&p.value)?;\n\n }\n\n self.encode_utf8(\"\")?;\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::Value;\n", "file_path": "src/amf3/encode.rs", "rank": 52, "score": 4.303862023075839 }, { "content": " Err(DecodeError::CircularReference { index })\n\n } else {\n\n Ok(v.clone())\n\n }\n\n })\n\n }\n\n fn decode_ecma_array(&mut self) -> DecodeResult<Value> {\n\n self.decode_complex_type(|this| {\n\n let _count = this.inner.read_u32::<BigEndian>()? as usize;\n\n let entries = this.decode_pairs()?;\n\n Ok(Value::EcmaArray { entries })\n\n })\n\n }\n\n fn decode_strict_array(&mut self) -> DecodeResult<Value> {\n\n self.decode_complex_type(|this| {\n\n let count = this.inner.read_u32::<BigEndian>()? as usize;\n\n let entries = (0..count)\n\n .map(|_| this.decode_value())\n\n .collect::<DecodeResult<_>>()?;\n\n Ok(Value::Array { entries })\n", "file_path": "src/amf0/decode.rs", "rank": 53, "score": 4.1815112174817965 }, { "content": " let b2 = ((u29 >> 07) | 0b1000_0000) as u8;\n\n let b3 = ((u29 >> 14) | 0b1000_0000) as u8;\n\n for b in &[b3, b2, b1] {\n\n self.inner.write_u8(*b)?;\n\n }\n\n } else if u29 < 0x4000_0000 {\n\n let b1 = ((u29 >> 00) & 0b1111_1111) as u8;\n\n let b2 = ((u29 >> 08) | 0b1000_0000) as u8;\n\n let b3 = ((u29 >> 15) | 0b1000_0000) as u8;\n\n let b4 = ((u29 >> 22) | 0b1000_0000) as u8;\n\n for b in &[b4, b3, b2, b1] {\n\n self.inner.write_u8(*b)?;\n\n }\n\n } else {\n\n panic!(\"Too large number: {}\", u29);\n\n }\n\n Ok(())\n\n }\n\n /// Encode an AMF3 string.\n\n ///\n", "file_path": "src/amf3/encode.rs", "rank": 54, "score": 4.001557888898154 }, { "content": "extern crate amf;\n\n\n\nuse amf::{Value, Version};\n\nuse std::io;\n\n\n", "file_path": "examples/decode_amf0.rs", "rank": 55, "score": 3.759983853437847 }, { "content": " use std::f64;\n\n use std::io;\n\n use std::iter;\n\n use std::time;\n\n\n\n macro_rules! decode {\n\n ($file:expr) => {{\n\n let input = include_bytes!(concat!(\"../testdata/\", $file));\n\n Value::read_from(&mut &input[..])\n\n }};\n\n }\n\n macro_rules! decode_eq {\n\n ($file:expr, $expected: expr) => {{\n\n let value = decode!($file).unwrap();\n\n assert_eq!(value, $expected)\n\n }};\n\n }\n\n macro_rules! decode_unexpected_eof {\n\n ($file:expr) => {{\n\n let result = decode!($file);\n", "file_path": "src/amf0/decode.rs", "rank": 56, "score": 3.390901534936176 }, { "content": " use crate::Pair;\n\n use std::f64;\n\n use std::io;\n\n use std::time;\n\n\n\n macro_rules! decode {\n\n ($file:expr) => {{\n\n let input = include_bytes!(concat!(\"../testdata/\", $file));\n\n Value::read_from(&mut &input[..])\n\n }};\n\n }\n\n macro_rules! decode_eq {\n\n ($file:expr, $expected: expr) => {{\n\n let value = decode!($file).unwrap();\n\n assert_eq!(value, $expected)\n\n }};\n\n }\n\n macro_rules! decode_unexpected_eof {\n\n ($file:expr) => {{\n\n let result = decode!($file);\n", "file_path": "src/amf3/decode.rs", "rank": 57, "score": 3.390901534936176 }, { "content": "impl From<Amf0Value> for Value {\n\n fn from(f: Amf0Value) -> Value {\n\n Value::Amf0(f)\n\n }\n\n}\n\nimpl From<Amf3Value> for Value {\n\n fn from(f: Amf3Value) -> Value {\n\n Value::Amf3(f)\n\n }\n\n}\n\n\n\n/// Key-value pair.\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct Pair<K, V> {\n\n /// The key of the pair.\n\n pub key: K,\n\n\n\n /// The value of the pair.\n\n pub value: V,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 58, "score": 3.155500048266605 }, { "content": "/// assert_eq!(number, decoded);\n\n/// ```\n\n#[derive(Debug, Clone, PartialEq, PartialOrd)]\n\npub enum Value {\n\n /// See [2.2 Number Type]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=5&zoom=auto,90,667).\n\n Number(f64),\n\n\n\n /// See [2.3 Boolean Type]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=5&zoom=auto,90,569).\n\n Boolean(bool),\n\n\n\n /// See [2.4 String Type]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=5&zoom=auto,90,432)\n\n /// and\n\n /// [2.14 Long String Type]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=7&zoom=auto,90,360).\n\n String(String),\n\n\n\n /// See [2.5 Object Type]\n", "file_path": "src/amf0/mod.rs", "rank": 59, "score": 3.1195870502999785 }, { "content": " Ok(())\n\n }\n\n fn encode_trait(\n\n &mut self,\n\n class_name: &Option<String>,\n\n sealed_count: usize,\n\n entries: &[Pair<String, Value>],\n\n ) -> io::Result<()> {\n\n assert!(sealed_count <= entries.len());\n\n let not_reference = 1;\n\n let is_externalizable = false as usize;\n\n let is_dynamic = (sealed_count < entries.len()) as usize;\n\n let u28 =\n\n (sealed_count << 3) | (is_dynamic << 2) | (is_externalizable << 1) | not_reference;\n\n self.encode_size(u28)?;\n\n\n\n let class_name = class_name.as_ref().map_or(\"\", |s| &s);\n\n self.encode_utf8(class_name)?;\n\n for e in entries.iter().take(sealed_count) {\n\n self.encode_utf8(&e.key)?;\n", "file_path": "src/amf3/encode.rs", "rank": 60, "score": 3.1026910518248005 }, { "content": " /// (https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf#page=13&zoom=auto,88,601).\n\n Dictionary {\n\n /// If `true`, the keys of `entries` are weakly referenced.\n\n is_weak: bool,\n\n\n\n /// The entries of the dictionary.\n\n entries: Vec<Pair<Value, Value>>,\n\n },\n\n}\n\nimpl Value {\n\n /// Reads an AMF3 encoded `Value` from `reader`.\n\n ///\n\n /// Note that reference objects are copied in the decoding phase\n\n /// for the sake of simplicity of the resulting value representation.\n\n /// And circular reference are unsupported (i.e., those are treated as errors).\n\n pub fn read_from<R>(reader: R) -> DecodeResult<Self>\n\n where\n\n R: io::Read,\n\n {\n\n Decoder::new(reader).decode()\n", "file_path": "src/amf3/mod.rs", "rank": 61, "score": 3.082867718609044 }, { "content": " }\n\n /// Returns a mutable reference to the underlying writer.\n\n pub fn inner_mut(&mut self) -> &mut W {\n\n &mut self.inner\n\n }\n\n}\n\nimpl<W> Encoder<W>\n\nwhere\n\n W: io::Write,\n\n{\n\n /// Makes a new instance.\n\n pub fn new(inner: W) -> Self {\n\n Encoder { inner }\n\n }\n\n\n\n /// Encodes a AMF3 value.\n\n pub fn encode(&mut self, value: &Value) -> io::Result<()> {\n\n match *value {\n\n Value::Undefined => self.encode_undefined(),\n\n Value::Null => self.encode_null(),\n", "file_path": "src/amf3/encode.rs", "rank": 62, "score": 3.066379896531333 }, { "content": " self.inner.write_u8(marker::AVMPLUS_OBJECT)?;\n\n amf3::Encoder::new(&mut self.inner).encode(value)?;\n\n Ok(())\n\n }\n\n\n\n fn write_str_u32(&mut self, s: &str) -> io::Result<()> {\n\n assert!(s.len() <= 0xFFFF_FFFF);\n\n self.inner.write_u32::<BigEndian>(s.len() as u32)?;\n\n self.inner.write_all(s.as_bytes())?;\n\n Ok(())\n\n }\n\n fn write_str_u16(&mut self, s: &str) -> io::Result<()> {\n\n assert!(s.len() <= 0xFFFF);\n\n self.inner.write_u16::<BigEndian>(s.len() as u16)?;\n\n self.inner.write_all(s.as_bytes())?;\n\n Ok(())\n\n }\n\n fn encode_pairs(&mut self, pairs: &[Pair<String, Value>]) -> io::Result<()> {\n\n for p in pairs {\n\n self.write_str_u16(&p.key)?;\n", "file_path": "src/amf0/encode.rs", "rank": 63, "score": 3.020853297743811 }, { "content": " /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=5&zoom=auto,90,320)\n\n /// and\n\n /// [2.18 Typed Object Type]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=8&zoom=auto,90,682).\n\n Object {\n\n /// The class name of the object.\n\n /// `None` means it is an anonymous object.\n\n class_name: Option<String>,\n\n\n\n /// Properties of the object.\n\n entries: Vec<Pair<String, Value>>,\n\n },\n\n\n\n /// See [2.7 null Type]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=6&zoom=auto,90,720).\n\n Null,\n\n\n\n /// See [2.8 undefined Type]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=6&zoom=auto,90,637).\n\n Undefined,\n", "file_path": "src/amf0/mod.rs", "rank": 64, "score": 2.984404620711077 }, { "content": " /// Tries to convert the value as a `str` reference.\n\n pub fn try_as_str(&self) -> Option<&str> {\n\n match *self {\n\n Value::Amf0(ref x) => x.try_as_str(),\n\n Value::Amf3(ref x) => x.try_as_str(),\n\n }\n\n }\n\n\n\n /// Tries to convert the value as a `f64`.\n\n pub fn try_as_f64(&self) -> Option<f64> {\n\n match *self {\n\n Value::Amf0(ref x) => x.try_as_f64(),\n\n Value::Amf3(ref x) => x.try_as_f64(),\n\n }\n\n }\n\n\n\n /// Tries to convert the value as an iterator of the contained values.\n\n pub fn try_into_values(self) -> Result<Box<dyn Iterator<Item = Value>>, Self> {\n\n match self {\n\n Value::Amf0(x) => x.try_into_values().map_err(Value::Amf0),\n", "file_path": "src/lib.rs", "rank": 65, "score": 2.9338056845123317 }, { "content": "}\n\nimpl<R> Decoder<R>\n\nwhere\n\n R: io::Read,\n\n{\n\n /// Makes a new instance.\n\n pub fn new(inner: R) -> Self {\n\n Decoder {\n\n inner,\n\n complexes: Vec::new(),\n\n }\n\n }\n\n\n\n /// Decodes a AMF0 value.\n\n pub fn decode(&mut self) -> DecodeResult<Value> {\n\n self.decode_value()\n\n }\n\n\n\n /// Clear the reference table of this decoder.\n\n ///\n", "file_path": "src/amf0/decode.rs", "rank": 66, "score": 2.9294618054121386 }, { "content": "//! A Rust Implementation of AMF (Action Media Format).\n\n//!\n\n//! # Examples\n\n//! ```\n\n//! use amf::{Value, Amf0Value, Version};\n\n//!\n\n//! // Encodes a AMF0's number\n\n//! let number = Value::from(Amf0Value::Number(1.23));\n\n//! let mut buf = Vec::new();\n\n//! number.write_to(&mut buf).unwrap();\n\n//!\n\n//! // Decodes above number\n\n//! let decoded = Value::read_from(&mut &buf[..], Version::Amf0).unwrap();\n\n//! assert_eq!(number, decoded);\n\n//! ```\n\n//!\n\n//! # References\n\n//! - [AMF0 Specification](http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf)\n\n//! - [AMF3 Specification](https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf)\n\n#![warn(missing_docs)]\n", "file_path": "src/lib.rs", "rank": 67, "score": 2.8825210920770474 }, { "content": " let len = self.inner.read_u32::<BigEndian>()? as usize;\n\n self.read_utf8(len).map(Value::XmlDocument)\n\n }\n\n fn decode_typed_object(&mut self) -> DecodeResult<Value> {\n\n self.decode_complex_type(|this| {\n\n let len = this.inner.read_u16::<BigEndian>()? as usize;\n\n let class_name = this.read_utf8(len)?;\n\n let entries = this.decode_pairs()?;\n\n Ok(Value::Object {\n\n class_name: Some(class_name),\n\n entries,\n\n })\n\n })\n\n }\n\n fn decode_avmplus(&mut self) -> DecodeResult<Value> {\n\n let value = amf3::Decoder::new(&mut self.inner).decode()?;\n\n Ok(Value::AvmPlus(value))\n\n }\n\n\n\n fn read_utf8(&mut self, len: usize) -> DecodeResult<String> {\n", "file_path": "src/amf0/decode.rs", "rank": 68, "score": 2.866489041216109 }, { "content": " }\n\n }\n\n }\n\n fn decode_pairs(&mut self) -> DecodeResult<Vec<Pair<String, Value>>> {\n\n let mut pairs = Vec::new();\n\n loop {\n\n let key = self.decode_utf8()?;\n\n if key.is_empty() {\n\n return Ok(pairs);\n\n }\n\n let value = self.decode_value()?;\n\n pairs.push(Pair { key, value });\n\n }\n\n }\n\n fn decode_trait(&mut self, u28: usize) -> DecodeResult<Trait> {\n\n if (u28 & 0b1) == 0 {\n\n let i = (u28 >> 1) as usize;\n\n let t = self\n\n .traits\n\n .get(i)\n", "file_path": "src/amf3/decode.rs", "rank": 69, "score": 2.8625029565172424 }, { "content": " &self.inner\n\n }\n\n /// Returns a mutable reference to the underlying reader.\n\n pub fn inner_mut(&mut self) -> &mut R {\n\n &mut self.inner\n\n }\n\n}\n\nimpl<R> Decoder<R>\n\nwhere\n\n R: io::Read,\n\n{\n\n /// Makes a new instance.\n\n pub fn new(inner: R) -> Self {\n\n Decoder {\n\n inner,\n\n traits: Vec::new(),\n\n strings: Vec::new(),\n\n complexes: Vec::new(),\n\n }\n\n }\n", "file_path": "src/amf3/decode.rs", "rank": 70, "score": 2.8155108246184066 }, { "content": " self.decode_complex_type(|this, count| {\n\n let is_fixed = this.inner.read_u8()? != 0;\n\n let class_name = this.decode_utf8()?;\n\n let entries = (0..count)\n\n .map(|_| this.decode_value())\n\n .collect::<DecodeResult<_>>()?;\n\n Ok(Value::ObjectVector {\n\n class_name: if class_name == \"*\" {\n\n None\n\n } else {\n\n Some(class_name)\n\n },\n\n is_fixed,\n\n entries,\n\n })\n\n })\n\n }\n\n fn decode_dictionary(&mut self) -> DecodeResult<Value> {\n\n self.decode_complex_type(|this, count| {\n\n let is_weak = this.inner.read_u8()? == 1;\n", "file_path": "src/amf3/decode.rs", "rank": 71, "score": 2.7874184331353504 }, { "content": " assert_eq!(\n\n decode!(\"amf0-bad-reference.bin\"),\n\n Err(DecodeError::OutOfRangeReference { index: 0 })\n\n );\n\n assert_eq!(\n\n decode!(\"amf0-circular-reference.bin\"),\n\n Err(DecodeError::CircularReference { index: 0 })\n\n );\n\n }\n\n #[test]\n\n fn decodes_date() {\n\n decode_eq!(\n\n \"amf0-date.bin\",\n\n Value::Date {\n\n unix_time: time::Duration::from_millis(1_590_796_800_000)\n\n }\n\n );\n\n decode_eq!(\n\n \"amf0-time.bin\",\n\n Value::Date {\n", "file_path": "src/amf0/decode.rs", "rank": 72, "score": 2.7576804222515277 }, { "content": " }\n\n\n\n /// Writes the AMF3 encoded bytes of this value to `writer`.\n\n pub fn write_to<W>(&self, writer: W) -> io::Result<()>\n\n where\n\n W: io::Write,\n\n {\n\n Encoder::new(writer).encode(self)\n\n }\n\n\n\n /// Tries to convert the value as a `str` reference.\n\n pub fn try_as_str(&self) -> Option<&str> {\n\n match *self {\n\n Value::String(ref x) => Some(x.as_str()),\n\n Value::XmlDocument(ref x) => Some(x.as_str()),\n\n Value::Xml(ref x) => Some(x.as_str()),\n\n _ => None,\n\n }\n\n }\n\n\n", "file_path": "src/amf3/mod.rs", "rank": 73, "score": 2.730540204069447 }, { "content": "\n\n /// See [2.10 ECMA Array Type]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=6&zoom=auto,90,349).\n\n EcmaArray {\n\n /// Entries of the associative array.\n\n entries: Vec<Pair<String, Value>>,\n\n },\n\n\n\n /// [2.12 Strict Array Type]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=7&zoom=auto,90,684)\n\n Array {\n\n /// Entries of the array.\n\n entries: Vec<Value>,\n\n },\n\n\n\n /// See [2.13 Date Type]\n\n /// (http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf#page=7&zoom=auto,90,546).\n\n Date {\n\n /// Unix timestamp with milliseconds precision.\n\n unix_time: time::Duration,\n", "file_path": "src/amf0/mod.rs", "rank": 74, "score": 2.7003110601745015 }, { "content": " Decoder::new(reader).decode()\n\n }\n\n\n\n /// Writes the AMF0 encoded bytes of this value to `writer`.\n\n pub fn write_to<W>(&self, writer: W) -> io::Result<()>\n\n where\n\n W: io::Write,\n\n {\n\n Encoder::new(writer).encode(self)\n\n }\n\n\n\n /// Tries to convert the value as a `str` reference.\n\n pub fn try_as_str(&self) -> Option<&str> {\n\n match *self {\n\n Value::String(ref x) => Some(x.as_ref()),\n\n Value::XmlDocument(ref x) => Some(x.as_ref()),\n\n Value::AvmPlus(ref x) => x.try_as_str(),\n\n _ => None,\n\n }\n\n }\n", "file_path": "src/amf0/mod.rs", "rank": 75, "score": 2.631277026058613 }, { "content": " /// Sealed members are located in front of the `entries`.\n\n sealed_count: usize,\n\n\n\n /// Members of the object.\n\n entries: Vec<Pair<String, Value>>,\n\n },\n\n\n\n /// See [3.13 XML Type]\n\n /// (https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf#page=11&zoom=auto,88,360).\n\n Xml(String),\n\n\n\n /// See [3.14 ByteArray Type]\n\n /// (https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf#page=11&zoom=auto,88,167).\n\n ByteArray(Vec<u8>),\n\n\n\n /// See [3.15 Vector Type]\n\n /// (https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf#page=12&zoom=auto,88,534).\n\n IntVector {\n\n /// If `true`, this is a fixed-length vector.\n\n is_fixed: bool,\n", "file_path": "src/amf3/mod.rs", "rank": 76, "score": 2.6160556594345126 }, { "content": " use crate::Pair;\n\n use std::time;\n\n\n\n macro_rules! encode_eq {\n\n ($value:expr, $file:expr) => {{\n\n let expected = include_bytes!(concat!(\"../testdata/\", $file));\n\n let mut buf = Vec::new();\n\n $value.write_to(&mut buf).unwrap();\n\n assert_eq!(buf, &expected[..]);\n\n }};\n\n }\n\n macro_rules! encode_and_decode {\n\n ($value:expr) => {{\n\n let v = $value;\n\n let mut buf = Vec::new();\n\n v.write_to(&mut buf).unwrap();\n\n assert_eq!(v, Value::read_from(&mut &buf[..]).unwrap());\n\n }};\n\n }\n\n\n", "file_path": "src/amf3/encode.rs", "rank": 77, "score": 2.581227014643392 }, { "content": "where\n\n W: io::Write,\n\n{\n\n /// Makes a new instance.\n\n pub fn new(inner: W) -> Self {\n\n Encoder { inner }\n\n }\n\n /// Encodes a AMF0 value.\n\n pub fn encode(&mut self, value: &Value) -> io::Result<()> {\n\n match *value {\n\n Value::Number(x) => self.encode_number(x),\n\n Value::Boolean(x) => self.encode_boolean(x),\n\n Value::String(ref x) => self.encode_string(x),\n\n Value::Object {\n\n ref class_name,\n\n ref entries,\n\n } => self.encode_object(class_name, entries),\n\n Value::Null => self.encode_null(),\n\n Value::Undefined => self.encode_undefined(),\n\n Value::EcmaArray { ref entries } => self.encode_ecma_array(entries),\n", "file_path": "src/amf0/encode.rs", "rank": 78, "score": 2.3570793433092523 }, { "content": " }\n\n fn decode_vector_uint(&mut self) -> DecodeResult<Value> {\n\n self.decode_complex_type(|this, count| {\n\n let is_fixed = this.inner.read_u8()? != 0;\n\n let entries = (0..count)\n\n .map(|_| this.inner.read_u32::<BigEndian>())\n\n .collect::<Result<_, _>>()?;\n\n Ok(Value::UintVector { is_fixed, entries })\n\n })\n\n }\n\n fn decode_vector_double(&mut self) -> DecodeResult<Value> {\n\n self.decode_complex_type(|this, count| {\n\n let is_fixed = this.inner.read_u8()? != 0;\n\n let entries = (0..count)\n\n .map(|_| this.inner.read_f64::<BigEndian>())\n\n .collect::<Result<_, _>>()?;\n\n Ok(Value::DoubleVector { is_fixed, entries })\n\n })\n\n }\n\n fn decode_vector_object(&mut self) -> DecodeResult<Value> {\n", "file_path": "src/amf3/decode.rs", "rank": 79, "score": 2.3273912807503803 }, { "content": "\n\n /// Tries to convert the value as a `f64`.\n\n pub fn try_as_f64(&self) -> Option<f64> {\n\n match *self {\n\n Value::Number(x) => Some(x),\n\n Value::AvmPlus(ref x) => x.try_as_f64(),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Tries to convert the value as an iterator of the contained values.\n\n pub fn try_into_values(self) -> Result<Box<dyn Iterator<Item = super::Value>>, Self> {\n\n match self {\n\n Value::Array { entries } => Ok(Box::new(entries.into_iter().map(super::Value::Amf0))),\n\n Value::AvmPlus(x) => x\n\n .try_into_values()\n\n .map(|iter| iter.map(super::Value::Amf3))\n\n .map(super::iter_boxed)\n\n .map_err(Value::AvmPlus),\n\n _ => Err(self),\n", "file_path": "src/amf0/mod.rs", "rank": 80, "score": 2.32677109205842 }, { "content": " /// Tries to convert the value as a `f64`.\n\n pub fn try_as_f64(&self) -> Option<f64> {\n\n match *self {\n\n Value::Integer(x) => Some(x as f64),\n\n Value::Double(x) => Some(x),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Tries to convert the value as an iterator of the contained values.\n\n pub fn try_into_values(self) -> Result<Box<dyn Iterator<Item = Value>>, Self> {\n\n match self {\n\n Value::Array { dense_entries, .. } => Ok(Box::new(dense_entries.into_iter())),\n\n Value::IntVector { entries, .. } => {\n\n Ok(Box::new(entries.into_iter().map(Value::Integer)))\n\n }\n\n Value::UintVector { entries, .. } => Ok(Box::new(\n\n entries.into_iter().map(|n| Value::Double(n as f64)),\n\n )),\n\n Value::DoubleVector { entries, .. } => {\n", "file_path": "src/amf3/mod.rs", "rank": 81, "score": 2.28274249376055 }, { "content": " typed_obj(\n\n \"org.amf.ASClass\",\n\n &[(\"foo\", s(\"baz\")), (\"baz\", Value::Null)][..],\n\n ),\n\n ];\n\n decode_eq!(\n\n \"amf3-vector-object.bin\",\n\n Value::ObjectVector {\n\n class_name: Some(\"org.amf.ASClass\".to_string()),\n\n is_fixed: false,\n\n entries: objects,\n\n }\n\n );\n\n }\n\n #[test]\n\n fn other_errors() {\n\n assert_eq!(\n\n decode!(\"amf3-graph-member.bin\"),\n\n Err(DecodeError::CircularReference { index: 0 })\n\n );\n", "file_path": "src/amf3/decode.rs", "rank": 82, "score": 2.0908245033400044 }, { "content": "amf\n\n===\n\n\n\n[![Crates.io: amf](http://meritbadge.herokuapp.com/amf)](https://crates.io/crates/amf)\n\n[![Documentation](https://docs.rs/amf/badge.svg)](https://docs.rs/amf)\n\n[![Build Status](https://travis-ci.org/sile/amf.svg?branch=master)](https://travis-ci.org/sile/amf)\n\n[![Code Coverage](https://codecov.io/gh/sile/amf/branch/master/graph/badge.svg)](https://codecov.io/gh/sile/amf/branch/master)\n\n[![License: MIT](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE)\n\n\n\nA Rust Implementation of AMF (Action Media Format).\n\n\n\n\n\nDocumentation\n\n-------------\n\n\n\nSee [RustDoc Documentation](https://docs.rs/amf/).\n\n\n\nExample\n\n-------\n\n\n\nFollowing code decodes a AMF0 encoded value read from the standard input:\n\n\n\n```rust\n\n// file: examples/decode_amf0.rs\n\nextern crate amf;\n\n\n\nuse std::io;\n\nuse amf::{Value, Version};\n\n\n\nfn main() {\n\n let mut input = io::stdin();\n\n let amf0_value = Value::read_from(&mut input, Version::Amf0).unwrap();\n\n println!(\"VALUE: {:?}\", amf0_value);\n\n}\n\n```\n\n\n\nAn execution result:\n\n\n\n```bash\n\n$ cat src/testdata/amf0-number.bin | cargo run --example decode_amf0\n\nVALUE: Amf0(Number(3.5))\n\n```\n\n\n\nReferences\n\n----------\n\n\n\n- [AMF0 Specification](http://download.macromedia.com/pub/labs/amf/amf0_spec_121207.pdf)\n\n- [AMF3 Specification](https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf)\n\n- [Action Message Format - Wikipedia](https://en.wikipedia.org/wiki/Action_Message_Format)\n", "file_path": "README.md", "rank": 83, "score": 2.0375863830833825 }, { "content": " .ok_or(DecodeError::OutOfRangeReference { index: i })?;\n\n Ok(t.clone())\n\n } else if (u28 & 0b10) != 0 {\n\n let class_name = self.decode_utf8()?;\n\n Err(DecodeError::ExternalizableType { name: class_name })\n\n } else {\n\n let is_dynamic = (u28 & 0b100) != 0;\n\n let field_num = u28 >> 3;\n\n let class_name = self.decode_utf8()?;\n\n let fields = (0..field_num)\n\n .map(|_| self.decode_utf8())\n\n .collect::<DecodeResult<_>>()?;\n\n\n\n let t = Trait {\n\n class_name: if class_name.is_empty() {\n\n None\n\n } else {\n\n Some(class_name)\n\n },\n\n is_dynamic,\n", "file_path": "src/amf3/decode.rs", "rank": 84, "score": 1.9808327415891918 }, { "content": " where\n\n R: io::Read,\n\n {\n\n match version {\n\n Version::Amf0 => Amf0Value::read_from(reader).map(Value::Amf0),\n\n Version::Amf3 => Amf3Value::read_from(reader).map(Value::Amf3),\n\n }\n\n }\n\n\n\n /// Writes the AMF encoded bytes of this value to `writer`.\n\n pub fn write_to<W>(&self, writer: W) -> io::Result<()>\n\n where\n\n W: io::Write,\n\n {\n\n match *self {\n\n Value::Amf0(ref x) => x.write_to(writer),\n\n Value::Amf3(ref x) => x.write_to(writer),\n\n }\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 85, "score": 1.8087283649913553 }, { "content": " Value::Amf3(x) => x\n\n .try_into_values()\n\n .map(|iter| iter.map(Value::Amf3))\n\n .map(iter_boxed)\n\n .map_err(Value::Amf3),\n\n }\n\n }\n\n\n\n /// Tries to convert the value as an iterator of the contained pairs.\n\n pub fn try_into_pairs(self) -> Result<Box<dyn Iterator<Item = (String, Value)>>, Self> {\n\n match self {\n\n Value::Amf0(x) => x.try_into_pairs().map_err(Value::Amf0),\n\n Value::Amf3(x) => x\n\n .try_into_pairs()\n\n .map(|iter| iter.map(|(k, v)| (k, Value::Amf3(v))))\n\n .map(iter_boxed)\n\n .map_err(Value::Amf3),\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 86, "score": 1.706390569020655 }, { "content": " let mut buf = vec![0; len];\n\n self.inner.read_exact(&mut buf)?;\n\n let utf8 = String::from_utf8(buf)?;\n\n Ok(utf8)\n\n }\n\n fn decode_pairs(&mut self) -> DecodeResult<Vec<Pair<String, Value>>> {\n\n let mut entries = Vec::new();\n\n loop {\n\n let len = self.inner.read_u16::<BigEndian>()? as usize;\n\n let key = self.read_utf8(len)?;\n\n match self.decode_value() {\n\n Ok(value) => {\n\n entries.push(Pair { key, value });\n\n }\n\n Err(DecodeError::UnexpectedObjectEnd) if key.is_empty() => break,\n\n Err(e) => return Err(e),\n\n }\n\n }\n\n Ok(entries)\n\n }\n", "file_path": "src/amf0/decode.rs", "rank": 87, "score": 1.665426337998578 }, { "content": " Ok(Box::new(entries.into_iter().map(Value::Double)))\n\n }\n\n Value::ObjectVector { entries, .. } => Ok(Box::new(entries.into_iter())),\n\n _ => Err(self),\n\n }\n\n }\n\n\n\n /// Tries to convert the value as an iterator of the contained pairs.\n\n pub fn try_into_pairs(self) -> Result<Box<dyn Iterator<Item = (String, Value)>>, Self> {\n\n match self {\n\n Value::Array { assoc_entries, .. } => Ok(Box::new(\n\n assoc_entries.into_iter().map(|p| (p.key, p.value)),\n\n )),\n\n Value::Object { entries, .. } => {\n\n Ok(Box::new(entries.into_iter().map(|p| (p.key, p.value))))\n\n }\n\n _ => Err(self),\n\n }\n\n }\n\n}\n", "file_path": "src/amf3/mod.rs", "rank": 88, "score": 1.6443651488909934 }, { "content": " }\n\n }\n\n\n\n /// Tries to convert the value as an iterator of the contained pairs.\n\n pub fn try_into_pairs(self) -> Result<Box<dyn Iterator<Item = (String, super::Value)>>, Self> {\n\n match self {\n\n Value::EcmaArray { entries } => Ok(Box::new(\n\n entries\n\n .into_iter()\n\n .map(|p| (p.key, super::Value::Amf0(p.value))),\n\n )),\n\n Value::Object { entries, .. } => Ok(Box::new(\n\n entries\n\n .into_iter()\n\n .map(|p| (p.key, super::Value::Amf0(p.value))),\n\n )),\n\n Value::AvmPlus(x) => x\n\n .try_into_pairs()\n\n .map(|ps| ps.map(|(k, v)| (k, super::Value::Amf3(v))))\n\n .map(super::iter_boxed)\n\n .map_err(Value::AvmPlus),\n\n _ => Err(self),\n\n }\n\n }\n\n}\n\n\n\n/// Makes a `String` value.\n", "file_path": "src/amf0/mod.rs", "rank": 89, "score": 1.4949324828083252 }, { "content": " })\n\n }\n\n fn decode_date(&mut self) -> DecodeResult<Value> {\n\n let millis = self.inner.read_f64::<BigEndian>()?;\n\n let time_zone = self.inner.read_i16::<BigEndian>()?;\n\n if time_zone != 0 {\n\n Err(DecodeError::NonZeroTimeZone { offset: time_zone })\n\n } else if !(millis.is_finite() && millis.is_sign_positive()) {\n\n Err(DecodeError::InvalidDate { millis })\n\n } else {\n\n Ok(Value::Date {\n\n unix_time: time::Duration::from_millis(millis as u64),\n\n })\n\n }\n\n }\n\n fn decode_long_string(&mut self) -> DecodeResult<Value> {\n\n let len = self.inner.read_u32::<BigEndian>()? as usize;\n\n self.read_utf8(len).map(Value::String)\n\n }\n\n fn decode_xml_document(&mut self) -> DecodeResult<Value> {\n", "file_path": "src/amf0/decode.rs", "rank": 90, "score": 1.4589332161380888 }, { "content": " class_name: amf_trait.class_name,\n\n sealed_count: amf_trait.fields.len(),\n\n entries,\n\n })\n\n })\n\n }\n\n fn decode_xml(&mut self) -> DecodeResult<Value> {\n\n self.decode_complex_type(|this, len| this.read_utf8(len).map(Value::Xml))\n\n }\n\n fn decode_byte_array(&mut self) -> DecodeResult<Value> {\n\n self.decode_complex_type(|this, len| this.read_bytes(len).map(Value::ByteArray))\n\n }\n\n fn decode_vector_int(&mut self) -> DecodeResult<Value> {\n\n self.decode_complex_type(|this, count| {\n\n let is_fixed = this.inner.read_u8()? != 0;\n\n let entries = (0..count)\n\n .map(|_| this.inner.read_i32::<BigEndian>())\n\n .collect::<Result<_, _>>()?;\n\n Ok(Value::IntVector { is_fixed, entries })\n\n })\n", "file_path": "src/amf3/decode.rs", "rank": 91, "score": 1.4544782380400187 }, { "content": "/// ```\n\n#[derive(Debug, Clone, PartialEq, PartialOrd)]\n\npub enum Value {\n\n /// See [3.2 undefined Type]\n\n /// (https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf#page=6&zoom=auto,88,264).\n\n Undefined,\n\n\n\n /// See [3.3 null Type]\n\n /// (https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf#page=6&zoom=auto,88,139).\n\n Null,\n\n\n\n /// See [3.4 false Type]\n\n /// (https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf#page=7&zoom=auto,88,694)\n\n /// and\n\n /// [3.5 true Type]\n\n /// (https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf#page=7&zoom=auto,88,596).\n\n Boolean(bool),\n\n\n\n /// See [3.6 integer Type]\n\n /// (https://www.adobe.com/content/dam/acom/en/devnet/pdf/amf-file-format-spec.pdf#page=7&zoom=auto,88,499).\n", "file_path": "src/amf3/mod.rs", "rank": 92, "score": 1.0832646048049357 } ]
Rust
src/atsame70q21b/twihs0/twihs_sr.rs
tstellanova/atsame7x-pac
f7e24c71181651c141d0727379147c388661ce0e
#[doc = "Reader of register TWIHS_SR"] pub type R = crate::R<u32, super::TWIHS_SR>; #[doc = "Reader of field `TXCOMP`"] pub type TXCOMP_R = crate::R<bool, bool>; #[doc = "Reader of field `RXRDY`"] pub type RXRDY_R = crate::R<bool, bool>; #[doc = "Reader of field `TXRDY`"] pub type TXRDY_R = crate::R<bool, bool>; #[doc = "Reader of field `SVREAD`"] pub type SVREAD_R = crate::R<bool, bool>; #[doc = "Reader of field `SVACC`"] pub type SVACC_R = crate::R<bool, bool>; #[doc = "Reader of field `GACC`"] pub type GACC_R = crate::R<bool, bool>; #[doc = "Reader of field `OVRE`"] pub type OVRE_R = crate::R<bool, bool>; #[doc = "Reader of field `UNRE`"] pub type UNRE_R = crate::R<bool, bool>; #[doc = "Reader of field `NACK`"] pub type NACK_R = crate::R<bool, bool>; #[doc = "Reader of field `ARBLST`"] pub type ARBLST_R = crate::R<bool, bool>; #[doc = "Reader of field `SCLWS`"] pub type SCLWS_R = crate::R<bool, bool>; #[doc = "Reader of field `EOSACC`"] pub type EOSACC_R = crate::R<bool, bool>; #[doc = "Reader of field `MCACK`"] pub type MCACK_R = crate::R<bool, bool>; #[doc = "Reader of field `TOUT`"] pub type TOUT_R = crate::R<bool, bool>; #[doc = "Reader of field `PECERR`"] pub type PECERR_R = crate::R<bool, bool>; #[doc = "Reader of field `SMBDAM`"] pub type SMBDAM_R = crate::R<bool, bool>; #[doc = "Reader of field `SMBHHM`"] pub type SMBHHM_R = crate::R<bool, bool>; #[doc = "Reader of field `SCL`"] pub type SCL_R = crate::R<bool, bool>; #[doc = "Reader of field `SDA`"] pub type SDA_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - Transmission Completed (cleared by writing TWIHS_THR)"] #[inline(always)] pub fn txcomp(&self) -> TXCOMP_R { TXCOMP_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Receive Holding Register Ready (cleared by reading TWIHS_RHR)"] #[inline(always)] pub fn rxrdy(&self) -> RXRDY_R { RXRDY_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Transmit Holding Register Ready (cleared by writing TWIHS_THR)"] #[inline(always)] pub fn txrdy(&self) -> TXRDY_R { TXRDY_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - Slave Read"] #[inline(always)] pub fn svread(&self) -> SVREAD_R { SVREAD_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - Slave Access"] #[inline(always)] pub fn svacc(&self) -> SVACC_R { SVACC_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - General Call Access (cleared on read)"] #[inline(always)] pub fn gacc(&self) -> GACC_R { GACC_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - Overrun Error (cleared on read)"] #[inline(always)] pub fn ovre(&self) -> OVRE_R { OVRE_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - Underrun Error (cleared on read)"] #[inline(always)] pub fn unre(&self) -> UNRE_R { UNRE_R::new(((self.bits >> 7) & 0x01) != 0) } #[doc = "Bit 8 - Not Acknowledged (cleared on read)"] #[inline(always)] pub fn nack(&self) -> NACK_R { NACK_R::new(((self.bits >> 8) & 0x01) != 0) } #[doc = "Bit 9 - Arbitration Lost (cleared on read)"] #[inline(always)] pub fn arblst(&self) -> ARBLST_R { ARBLST_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - Clock Wait State"] #[inline(always)] pub fn sclws(&self) -> SCLWS_R { SCLWS_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - End Of Slave Access (cleared on read)"] #[inline(always)] pub fn eosacc(&self) -> EOSACC_R { EOSACC_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 16 - Master Code Acknowledge (cleared on read)"] #[inline(always)] pub fn mcack(&self) -> MCACK_R { MCACK_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 18 - Timeout Error (cleared on read)"] #[inline(always)] pub fn tout(&self) -> TOUT_R { TOUT_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - PEC Error (cleared on read)"] #[inline(always)] pub fn pecerr(&self) -> PECERR_R { PECERR_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 20 - SMBus Default Address Match (cleared on read)"] #[inline(always)] pub fn smbdam(&self) -> SMBDAM_R { SMBDAM_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bit 21 - SMBus Host Header Address Match (cleared on read)"] #[inline(always)] pub fn smbhhm(&self) -> SMBHHM_R { SMBHHM_R::new(((self.bits >> 21) & 0x01) != 0) } #[doc = "Bit 24 - SCL Line Value"] #[inline(always)] pub fn scl(&self) -> SCL_R { SCL_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 25 - SDA Line Value"] #[inline(always)] pub fn sda(&self) -> SDA_R { SDA_R::new(((self.bits >> 25) & 0x01) != 0) } }
#[doc = "Reader of register TWIHS_SR"] pub type R = crate::R<u32, super::TWIHS_SR>; #[doc = "Reader of field `TXCOMP`"] pub type TXCOMP_R = crate::R<bool, bool>; #[doc = "Reader of field `RXRDY`"] pub type RXRDY_R = crate::R<bool, bool>; #[doc = "Reader of field `TXRDY`"] pub type TXRDY_R = crate::R<bool, bool>; #[doc = "Reader of field `SVREAD`"] pub type SVREAD_R = crate::R<bool, bool>; #[doc = "Reader of field `SVACC`"] pub type SVACC_R = crate::R<bool, bool>; #[doc = "Reader of field `GACC`"] pub type GACC_R = crate::R<bool, bool>; #[doc = "Reader of field `OVRE`"] pub type OVRE_R = crate::R<bool, bool>; #[doc = "Reader of field `UNRE`"] pub type UNRE_R = crate::R<bool, bool>; #[doc = "Reader of field `NACK`"] pub type NACK_R = crate::R<bool, bool>; #[doc = "Reader of field `ARBLST`"] pub type ARBLST_R = crate::R<bool, bool>; #[doc = "Reader of field `SCLWS`"] pub type SCLWS_R = crate::R<bool, bool>; #[doc = "Reader of field `EOSACC`"] pub type EOSACC_R = crate::R<bool, bool>; #[doc = "Reader of field `MCACK`"] pub type MCACK_R = crate::R<bool, bool>; #[doc = "Reader of field `TOUT`"] pub type TOUT_R = crate::R<bool, bool>; #[doc = "Reader of field `PECERR`"] pub type PECERR_R = crate::R<bool, bool>; #[doc = "Reader of field `SMBDAM`"] pub type SMBDAM_R = crate::R<bool, bool>; #[doc = "Reader of field `SMBHHM`"] pub type SMBHHM_R = crate::R<bool, bool>; #[doc = "Reader of field `SCL`"] pub type SCL_R = crate::R<bool, bool>; #[doc = "Reader of field `SDA`"] pub type SDA_R = crate::R<bool, bool>; impl R { #[doc = "Bit 0 - Transmission Completed (cleared by writing TWIHS_THR)"] #[inline(always)] pub fn txcomp(&self) -> TXCOMP_R { TXCOMP_R::new((self.bits & 0x01) != 0) } #[doc = "Bit 1 - Receive Holding Register Ready (cleared by reading TWIHS_RHR)"] #[inline(always)] pub fn rxrdy(&self) -> RXRDY_R { RXRDY_R::new(((self.bits >> 1) & 0x01) != 0) } #[doc = "Bit 2 - Transmit Holding Register Ready (cleared by writing TWIHS_THR)"] #[inline(always)] pub fn txrdy(&self) -> TXRDY_R { TXRDY_R::new(((self.bits >> 2) & 0x01) != 0) } #[doc = "Bit 3 - Slave Read"] #[inline(always)] pub fn svread(&self) -> SVREAD_R { SVREAD_R::new(((self.bits >> 3) & 0x01) != 0) } #[doc = "Bit 4 - Slave Access"] #[inline(always)] pub fn svacc(&self) -> SVACC_R { SVACC_R::new(((self.bits >> 4) & 0x01) != 0) } #[doc = "Bit 5 - General Call Access (cleared on read)"] #[inline(always)] pub fn gacc(&self) -> GACC_R { GACC_R::new(((self.bits >> 5) & 0x01) != 0) } #[doc = "Bit 6 - Overrun Error (cleared on read)"] #[inline(always)] pub fn ovre(&self) -> OVRE_R { OVRE_R::new(((self.bits >> 6) & 0x01) != 0) } #[doc = "Bit 7 - Underrun Error (cleared on read)"] #[inline(always)] pub fn unre(&self) -> UNRE_R { UNRE_R::new(((self.bits >>
#[doc = "Bit 9 - Arbitration Lost (cleared on read)"] #[inline(always)] pub fn arblst(&self) -> ARBLST_R { ARBLST_R::new(((self.bits >> 9) & 0x01) != 0) } #[doc = "Bit 10 - Clock Wait State"] #[inline(always)] pub fn sclws(&self) -> SCLWS_R { SCLWS_R::new(((self.bits >> 10) & 0x01) != 0) } #[doc = "Bit 11 - End Of Slave Access (cleared on read)"] #[inline(always)] pub fn eosacc(&self) -> EOSACC_R { EOSACC_R::new(((self.bits >> 11) & 0x01) != 0) } #[doc = "Bit 16 - Master Code Acknowledge (cleared on read)"] #[inline(always)] pub fn mcack(&self) -> MCACK_R { MCACK_R::new(((self.bits >> 16) & 0x01) != 0) } #[doc = "Bit 18 - Timeout Error (cleared on read)"] #[inline(always)] pub fn tout(&self) -> TOUT_R { TOUT_R::new(((self.bits >> 18) & 0x01) != 0) } #[doc = "Bit 19 - PEC Error (cleared on read)"] #[inline(always)] pub fn pecerr(&self) -> PECERR_R { PECERR_R::new(((self.bits >> 19) & 0x01) != 0) } #[doc = "Bit 20 - SMBus Default Address Match (cleared on read)"] #[inline(always)] pub fn smbdam(&self) -> SMBDAM_R { SMBDAM_R::new(((self.bits >> 20) & 0x01) != 0) } #[doc = "Bit 21 - SMBus Host Header Address Match (cleared on read)"] #[inline(always)] pub fn smbhhm(&self) -> SMBHHM_R { SMBHHM_R::new(((self.bits >> 21) & 0x01) != 0) } #[doc = "Bit 24 - SCL Line Value"] #[inline(always)] pub fn scl(&self) -> SCL_R { SCL_R::new(((self.bits >> 24) & 0x01) != 0) } #[doc = "Bit 25 - SDA Line Value"] #[inline(always)] pub fn sda(&self) -> SDA_R { SDA_R::new(((self.bits >> 25) & 0x01) != 0) } }
7) & 0x01) != 0) } #[doc = "Bit 8 - Not Acknowledged (cleared on read)"] #[inline(always)] pub fn nack(&self) -> NACK_R { NACK_R::new(((self.bits >> 8) & 0x01) != 0) }
random
[ { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "src/generic.rs", "rank": 0, "score": 58126.51002228791 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "src/generic.rs", "rank": 1, "score": 58126.360091788854 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "src/generic.rs", "rank": 2, "score": 56310.90996117873 }, { "content": "#[doc = \"Reader of register GMAC_SCL\"]\n\npub type R = crate::R<u32, super::GMAC_SCL>;\n\n#[doc = \"Writer for register GMAC_SCL\"]\n\npub type W = crate::W<u32, super::GMAC_SCL>;\n\n#[doc = \"Register GMAC_SCL `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::GMAC_SCL {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `SEC`\"]\n\npub type SEC_R = crate::R<u32, u32>;\n\n#[doc = \"Write proxy for field `SEC`\"]\n\npub struct SEC_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SEC_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n", "file_path": "src/atsame70q21b/gmac/gmac_scl.rs", "rank": 3, "score": 54772.84082711345 }, { "content": " #[inline(always)]\n\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:31 - 1588 Timer Second Comparison Value\"]\n\n #[inline(always)]\n\n pub fn sec(&self) -> SEC_R {\n\n SEC_R::new((self.bits & 0xffff_ffff) as u32)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 0:31 - 1588 Timer Second Comparison Value\"]\n\n #[inline(always)]\n\n pub fn sec(&mut self) -> SEC_W {\n\n SEC_W { w: self }\n\n }\n\n}\n", "file_path": "src/atsame70q21b/gmac/gmac_scl.rs", "rank": 4, "score": 54747.26847644003 }, { "content": "#[doc = \"Reader of field `SCL_WS`\"]\n\npub type SCL_WS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `EOSACC`\"]\n\npub type EOSACC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `MCACK`\"]\n\npub type MCACK_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TOUT`\"]\n\npub type TOUT_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `PECERR`\"]\n\npub type PECERR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `SMBDAM`\"]\n\npub type SMBDAM_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `SMBHHM`\"]\n\npub type SMBHHM_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Transmission Completed Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn txcomp(&self) -> TXCOMP_R {\n\n TXCOMP_R::new((self.bits & 0x01) != 0)\n\n }\n", "file_path": "src/atsame70q21b/twihs0/twihs_imr.rs", "rank": 7, "score": 79.63031314031937 }, { "content": " #[doc = \"Bit 1 - Receive Holding Register Ready Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 2 - Transmit Holding Register Ready Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 2) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 4 - Slave Access Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn svacc(&self) -> SVACC_R {\n\n SVACC_R::new(((self.bits >> 4) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - General Call Access Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn gacc(&self) -> GACC_R {\n\n GACC_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n", "file_path": "src/atsame70q21b/twihs0/twihs_imr.rs", "rank": 8, "score": 78.39462893879654 }, { "content": "#[doc = \"Reader of field `DCDIC`\"]\n\npub type DCDIC_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Receiver Ready (cleared by reading US_RHR)\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmitter Ready (cleared by writing US_THR)\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Overrun Error (cleared by writing a one to bit US_CR.RSTSTA)\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 6 - Framing Error (cleared by writing a one to bit US_CR.RSTSTA)\"]\n\n #[inline(always)]\n", "file_path": "src/atsame70q21b/usart0/us_csr_usart_lin_mode.rs", "rank": 9, "score": 77.94318473265747 }, { "content": " }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - Transmission Completed Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn txcomp(&mut self) -> TXCOMP_W {\n\n TXCOMP_W { w: self }\n\n }\n\n #[doc = \"Bit 1 - Receive Holding Register Ready Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&mut self) -> RXRDY_W {\n\n RXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 2 - Transmit Holding Register Ready Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn txrdy(&mut self) -> TXRDY_W {\n\n TXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 4 - Slave Access Interrupt Disable\"]\n\n #[inline(always)]\n", "file_path": "src/atsame70q21b/twihs0/twihs_idr.rs", "rank": 10, "score": 74.55005185417605 }, { "content": " }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - Transmission Completed Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn txcomp(&mut self) -> TXCOMP_W {\n\n TXCOMP_W { w: self }\n\n }\n\n #[doc = \"Bit 1 - Receive Holding Register Ready Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&mut self) -> RXRDY_W {\n\n RXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 2 - Transmit Holding Register Ready Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn txrdy(&mut self) -> TXRDY_W {\n\n TXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 4 - Slave Access Interrupt Enable\"]\n\n #[inline(always)]\n", "file_path": "src/atsame70q21b/twihs0/twihs_ier.rs", "rank": 11, "score": 74.55005185417605 }, { "content": "#[doc = \"Reader of register TWIHS_IMR\"]\n\npub type R = crate::R<u32, super::TWIHS_IMR>;\n\n#[doc = \"Reader of field `TXCOMP`\"]\n\npub type TXCOMP_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RXRDY`\"]\n\npub type RXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY`\"]\n\npub type TXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `SVACC`\"]\n\npub type SVACC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `GACC`\"]\n\npub type GACC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `UNRE`\"]\n\npub type UNRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `NACK`\"]\n\npub type NACK_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ARBLST`\"]\n\npub type ARBLST_R = crate::R<bool, bool>;\n", "file_path": "src/atsame70q21b/twihs0/twihs_imr.rs", "rank": 13, "score": 71.57248452673963 }, { "content": "impl R {\n\n #[doc = \"Bit 0 - Receiver Ready (cleared by reading US_RHR)\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmitter Ready (cleared by writing US_THR)\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Overrun Error (cleared by writing a one to bit US_CR.RSTSTA)\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 9 - Transmitter Empty (cleared by writing US_THR)\"]\n\n #[inline(always)]\n\n pub fn txempty(&self) -> TXEMPTY_R {\n\n TXEMPTY_R::new(((self.bits >> 9) & 0x01) != 0)\n", "file_path": "src/atsame70q21b/usart0/us_csr_spi_mode.rs", "rank": 14, "score": 69.31441767272135 }, { "content": "#[doc = \"Reader of field `XFRDONE`\"]\n\npub type XFRDONE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ACKRCV`\"]\n\npub type ACKRCV_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ACKRCVE`\"]\n\npub type ACKRCVE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `UNRE`\"]\n\npub type UNRE_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Command Ready (cleared by writing in HSMCI_CMDR)\"]\n\n #[inline(always)]\n\n pub fn cmdrdy(&self) -> CMDRDY_R {\n\n CMDRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Receiver Ready (cleared by reading HSMCI_RDR)\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n", "file_path": "src/atsame70q21b/hsmci/hsmci_sr.rs", "rank": 16, "score": 68.09020301902237 }, { "content": "#[doc = \"Reader of register US_CSR_LON_SPI_MODE\"]\n\npub type R = crate::R<u32, super::US_CSR_LON_SPI_MODE>;\n\n#[doc = \"Reader of field `RXRDY`\"]\n\npub type RXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY`\"]\n\npub type TXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEMPTY`\"]\n\npub type TXEMPTY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RIIC`\"]\n\npub type RIIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DSRIC`\"]\n\npub type DSRIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DCDIC`\"]\n\npub type DCDIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `UNRE`\"]\n\npub type UNRE_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Receiver Ready (cleared by reading US_RHR)\"]\n", "file_path": "src/atsame70q21b/usart0/us_csr_lon_spi_mode.rs", "rank": 17, "score": 64.34949170193563 }, { "content": "#[doc = \"Reader of field `LTXD`\"]\n\npub type LTXD_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LCOL`\"]\n\npub type LCOL_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LFET`\"]\n\npub type LFET_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LRXD`\"]\n\npub type LRXD_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LBLOVFE`\"]\n\npub type LBLOVFE_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Receiver Ready (cleared by reading US_RHR)\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmitter Ready (cleared by writing US_THR)\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n", "file_path": "src/atsame70q21b/usart0/us_csr_lon_mode.rs", "rank": 18, "score": 64.02498091779637 }, { "content": "#[doc = \"Reader of field `RXEN`\"]\n\npub type RXEN_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Transmit Ready\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmit Empty\"]\n\n #[inline(always)]\n\n pub fn txempty(&self) -> TXEMPTY_R {\n\n TXEMPTY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 4 - Receive Ready\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new(((self.bits >> 4) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Receive Overrun\"]\n\n #[inline(always)]\n", "file_path": "src/atsame70q21b/ssc/ssc_sr.rs", "rank": 19, "score": 63.67480978154772 }, { "content": " #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmitter Ready (cleared by writing US_THR)\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Overrun Error (cleared by writing a one to bit US_CR.RSTSTA)\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 9 - Transmitter Empty (cleared by writing US_THR)\"]\n\n #[inline(always)]\n\n pub fn txempty(&self) -> TXEMPTY_R {\n\n TXEMPTY_R::new(((self.bits >> 9) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 16 - Ring Indicator Input Change Flag (cleared on read)\"]\n", "file_path": "src/atsame70q21b/usart0/us_csr_lin_mode.rs", "rank": 20, "score": 61.009770799296675 }, { "content": " #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmitter Ready (cleared by writing US_THR)\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Overrun Error (cleared by writing a one to bit US_CR.RSTSTA)\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 9 - Transmitter Empty (cleared by writing US_THR)\"]\n\n #[inline(always)]\n\n pub fn txempty(&self) -> TXEMPTY_R {\n\n TXEMPTY_R::new(((self.bits >> 9) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 16 - Ring Indicator Input Change Flag (cleared on read)\"]\n", "file_path": "src/atsame70q21b/usart0/us_csr_lon_spi_mode.rs", "rank": 21, "score": 61.009770799296675 }, { "content": "#[doc = \"Reader of register I2SC_IMR\"]\n\npub type R = crate::R<u32, super::I2SC_IMR>;\n\n#[doc = \"Reader of field `RXRDY`\"]\n\npub type RXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RXOR`\"]\n\npub type RXOR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY`\"]\n\npub type TXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXUR`\"]\n\npub type TXUR_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 1 - Receiver Ready Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 2 - Receiver Overrun Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn rxor(&self) -> RXOR_R {\n\n RXOR_R::new(((self.bits >> 2) & 0x01) != 0)\n", "file_path": "src/atsame70q21b/i2sc0/i2sc_imr.rs", "rank": 23, "score": 59.55738395852629 }, { "content": " #[inline(always)]\n\n pub fn rdrf(&self) -> RDRF_R {\n\n RDRF_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmit Data Register Empty (cleared by writing SPI_TDR)\"]\n\n #[inline(always)]\n\n pub fn tdre(&self) -> TDRE_R {\n\n TDRE_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 2 - Transmission Registers Empty (cleared by writing SPI_TDR)\"]\n\n #[inline(always)]\n\n pub fn txempty(&self) -> TXEMPTY_R {\n\n TXEMPTY_R::new(((self.bits >> 2) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 3 - Overrun Error Status (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn ovres(&self) -> OVRES_R {\n\n OVRES_R::new(((self.bits >> 3) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 8 - Chip Select Rise (cleared on read)\"]\n", "file_path": "src/atsame70q21b/qspi/qspi_sr.rs", "rank": 24, "score": 58.96780718696279 }, { "content": "#[doc = \"Reader of field `DCDIC`\"]\n\npub type DCDIC_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - RXRDY Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - TXRDY Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Overrun Error Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 6 - Framing Error Interrupt Mask\"]\n\n #[inline(always)]\n", "file_path": "src/atsame70q21b/usart0/us_imr_usart_lin_mode.rs", "rank": 25, "score": 58.50375018033541 }, { "content": "#[doc = \"Reader of field `XFRDONE`\"]\n\npub type XFRDONE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ACKRCV`\"]\n\npub type ACKRCV_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ACKRCVE`\"]\n\npub type ACKRCVE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `UNRE`\"]\n\npub type UNRE_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Command Ready Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn cmdrdy(&self) -> CMDRDY_R {\n\n CMDRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Receiver Ready Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n", "file_path": "src/atsame70q21b/hsmci/hsmci_imr.rs", "rank": 26, "score": 57.75513814176992 }, { "content": " pub fn svacc(&mut self) -> SVACC_W {\n\n SVACC_W { w: self }\n\n }\n\n #[doc = \"Bit 5 - General Call Access Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn gacc(&mut self) -> GACC_W {\n\n GACC_W { w: self }\n\n }\n\n #[doc = \"Bit 6 - Overrun Error Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn ovre(&mut self) -> OVRE_W {\n\n OVRE_W { w: self }\n\n }\n\n #[doc = \"Bit 7 - Underrun Error Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn unre(&mut self) -> UNRE_W {\n\n UNRE_W { w: self }\n\n }\n\n #[doc = \"Bit 8 - Not Acknowledge Interrupt Disable\"]\n\n #[inline(always)]\n", "file_path": "src/atsame70q21b/twihs0/twihs_idr.rs", "rank": 27, "score": 57.495765842922935 }, { "content": " pub fn svacc(&mut self) -> SVACC_W {\n\n SVACC_W { w: self }\n\n }\n\n #[doc = \"Bit 5 - General Call Access Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn gacc(&mut self) -> GACC_W {\n\n GACC_W { w: self }\n\n }\n\n #[doc = \"Bit 6 - Overrun Error Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn ovre(&mut self) -> OVRE_W {\n\n OVRE_W { w: self }\n\n }\n\n #[doc = \"Bit 7 - Underrun Error Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn unre(&mut self) -> UNRE_W {\n\n UNRE_W { w: self }\n\n }\n\n #[doc = \"Bit 8 - Not Acknowledge Interrupt Enable\"]\n\n #[inline(always)]\n", "file_path": "src/atsame70q21b/twihs0/twihs_ier.rs", "rank": 28, "score": 57.49576584292294 }, { "content": "#[doc = \"Reader of register UART_SR\"]\n\npub type R = crate::R<u32, super::UART_SR>;\n\n#[doc = \"Reader of field `RXRDY`\"]\n\npub type RXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY`\"]\n\npub type TXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `FRAME`\"]\n\npub type FRAME_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `PARE`\"]\n\npub type PARE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEMPTY`\"]\n\npub type TXEMPTY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CMP`\"]\n\npub type CMP_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Receiver Ready\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n", "file_path": "src/atsame70q21b/uart0/uart_sr.rs", "rank": 29, "score": 57.40368723915381 }, { "content": " }\n\n #[doc = \"Bit 30 - Overrun (if FERRCTRL = 1, cleared by writing in HSMCI_CMDR or cleared on read if FERRCTRL = 0)\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 30) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 31 - Underrun (if FERRCTRL = 1, cleared by writing in HSMCI_CMDR or cleared on read if FERRCTRL = 0)\"]\n\n #[inline(always)]\n\n pub fn unre(&self) -> UNRE_R {\n\n UNRE_R::new(((self.bits >> 31) & 0x01) != 0)\n\n }\n\n}\n", "file_path": "src/atsame70q21b/hsmci/hsmci_sr.rs", "rank": 30, "score": 57.288091470676335 }, { "content": " #[doc = \"Bit 6 - Overrun Error Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 6) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 7 - Underrun Error Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn unre(&self) -> UNRE_R {\n\n UNRE_R::new(((self.bits >> 7) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 8 - Not Acknowledge Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn nack(&self) -> NACK_R {\n\n NACK_R::new(((self.bits >> 8) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 9 - Arbitration Lost Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn arblst(&self) -> ARBLST_R {\n\n ARBLST_R::new(((self.bits >> 9) & 0x01) != 0)\n\n }\n", "file_path": "src/atsame70q21b/twihs0/twihs_imr.rs", "rank": 32, "score": 55.86226702554169 }, { "content": " #[inline(always)]\n\n pub fn rdrf(&self) -> RDRF_R {\n\n RDRF_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmit Data Register Empty (cleared by writing SPI_TDR)\"]\n\n #[inline(always)]\n\n pub fn tdre(&self) -> TDRE_R {\n\n TDRE_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 2 - Mode Fault Error (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn modf(&self) -> MODF_R {\n\n MODF_R::new(((self.bits >> 2) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 3 - Overrun Error Status (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn ovres(&self) -> OVRES_R {\n\n OVRES_R::new(((self.bits >> 3) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 8 - NSS Rising (cleared on read)\"]\n", "file_path": "src/atsame70q21b/spi0/spi_sr.rs", "rank": 33, "score": 55.36732315707981 }, { "content": " #[doc = \"Bit 1 - Transmitter Ready (cleared by writing US_THR)\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 2 - Break Received/End of Break (cleared by writing a one to bit US_CR.RSTSTA)\"]\n\n #[inline(always)]\n\n pub fn rxbrk(&self) -> RXBRK_R {\n\n RXBRK_R::new(((self.bits >> 2) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Overrun Error (cleared by writing a one to bit US_CR.RSTSTA)\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 9 - Transmitter Empty (cleared by writing US_THR)\"]\n\n #[inline(always)]\n\n pub fn txempty(&self) -> TXEMPTY_R {\n\n TXEMPTY_R::new(((self.bits >> 9) & 0x01) != 0)\n\n }\n", "file_path": "src/atsame70q21b/usart0/us_csr_usart_mode.rs", "rank": 34, "score": 54.71795193909519 }, { "content": "#[doc = \"Reader of register PIO_PCISR\"]\n\npub type R = crate::R<u32, super::PIO_PCISR>;\n\n#[doc = \"Reader of field `DRDY`\"]\n\npub type DRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Parallel Capture Mode Data Ready\"]\n\n #[inline(always)]\n\n pub fn drdy(&self) -> DRDY_R {\n\n DRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Parallel Capture Mode Overrun Error\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n}\n", "file_path": "src/atsame70q21b/pioa/pio_pcisr.rs", "rank": 35, "score": 54.48957447343924 }, { "content": "#[doc = \"Reader of register US_IMR_LON_SPI_MODE\"]\n\npub type R = crate::R<u32, super::US_IMR_LON_SPI_MODE>;\n\n#[doc = \"Reader of field `RXRDY`\"]\n\npub type RXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY`\"]\n\npub type TXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEMPTY`\"]\n\npub type TXEMPTY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RIIC`\"]\n\npub type RIIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DSRIC`\"]\n\npub type DSRIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DCDIC`\"]\n\npub type DCDIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `UNRE`\"]\n\npub type UNRE_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - RXRDY Interrupt Mask\"]\n", "file_path": "src/atsame70q21b/usart0/us_imr_lon_spi_mode.rs", "rank": 36, "score": 53.69641590173603 }, { "content": " #[doc = \"Bit 10 - Clock Wait State Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn scl_ws(&self) -> SCL_WS_R {\n\n SCL_WS_R::new(((self.bits >> 10) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 11 - End Of Slave Access Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn eosacc(&self) -> EOSACC_R {\n\n EOSACC_R::new(((self.bits >> 11) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 16 - Master Code Acknowledge Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn mcack(&self) -> MCACK_R {\n\n MCACK_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 18 - Timeout Error Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn tout(&self) -> TOUT_R {\n\n TOUT_R::new(((self.bits >> 18) & 0x01) != 0)\n\n }\n", "file_path": "src/atsame70q21b/twihs0/twihs_imr.rs", "rank": 37, "score": 52.88430506679989 }, { "content": "#[doc = \"Reader of register PIO_PCIMR\"]\n\npub type R = crate::R<u32, super::PIO_PCIMR>;\n\n#[doc = \"Reader of field `DRDY`\"]\n\npub type DRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ENDRX`\"]\n\npub type ENDRX_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RXBUFF`\"]\n\npub type RXBUFF_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Parallel Capture Mode Data Ready Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn drdy(&self) -> DRDY_R {\n\n DRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Parallel Capture Mode Overrun Error Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 1) & 0x01) != 0)\n", "file_path": "src/atsame70q21b/pioa/pio_pcimr.rs", "rank": 38, "score": 52.384881568088566 }, { "content": "#[doc = \"Alignment Errors Register\"]\n\npub mod gmac_ae;\n\n#[doc = \"Receive Resource Errors Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_rre](gmac_rre) module\"]\n\npub type GMAC_RRE = crate::Reg<u32, _GMAC_RRE>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_RRE;\n\n#[doc = \"`read()` method returns [gmac_rre::R](gmac_rre::R) reader structure\"]\n\nimpl crate::Readable for GMAC_RRE {}\n\n#[doc = \"Receive Resource Errors Register\"]\n\npub mod gmac_rre;\n\n#[doc = \"Receive Overrun Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_roe](gmac_roe) module\"]\n\npub type GMAC_ROE = crate::Reg<u32, _GMAC_ROE>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_ROE;\n\n#[doc = \"`read()` method returns [gmac_roe::R](gmac_roe::R) reader structure\"]\n\nimpl crate::Readable for GMAC_ROE {}\n\n#[doc = \"Receive Overrun Register\"]\n\npub mod gmac_roe;\n", "file_path": "src/atsame70q21b/gmac.rs", "rank": 39, "score": 51.760175923474 }, { "content": "#[doc = \"Reader of field `DCDIC`\"]\n\npub type DCDIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CTSIC`\"]\n\npub type CTSIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RI`\"]\n\npub type RI_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DSR`\"]\n\npub type DSR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DCD`\"]\n\npub type DCD_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CTS`\"]\n\npub type CTS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `MANERR`\"]\n\npub type MANERR_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Receiver Ready (cleared by reading US_RHR)\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n", "file_path": "src/atsame70q21b/usart0/us_csr_usart_mode.rs", "rank": 40, "score": 51.64008700666409 }, { "content": "impl W {\n\n #[doc = \"Bit 0 - RXRDY Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&mut self) -> RXRDY_W {\n\n RXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 1 - TXRDY Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn txrdy(&mut self) -> TXRDY_W {\n\n TXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 5 - Overrun Error Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn ovre(&mut self) -> OVRE_W {\n\n OVRE_W { w: self }\n\n }\n\n #[doc = \"Bit 9 - TXEMPTY Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn txempty(&mut self) -> TXEMPTY_W {\n\n TXEMPTY_W { w: self }\n\n }\n\n #[doc = \"Bit 10 - Underrun Error Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn unre(&mut self) -> UNRE_W {\n\n UNRE_W { w: self }\n\n }\n\n}\n", "file_path": "src/atsame70q21b/usart0/us_ier_lon_spi_mode.rs", "rank": 41, "score": 51.37651702076515 }, { "content": "#[doc = \"Reader of register UART_IMR\"]\n\npub type R = crate::R<u32, super::UART_IMR>;\n\n#[doc = \"Reader of field `RXRDY`\"]\n\npub type RXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY`\"]\n\npub type TXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `FRAME`\"]\n\npub type FRAME_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `PARE`\"]\n\npub type PARE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEMPTY`\"]\n\npub type TXEMPTY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CMP`\"]\n\npub type CMP_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Mask RXRDY Interrupt\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n", "file_path": "src/atsame70q21b/uart0/uart_imr.rs", "rank": 42, "score": 51.091444551723086 }, { "content": "#[doc = \"Reader of register WDT_SR\"]\n\npub type R = crate::R<u32, super::WDT_SR>;\n\n#[doc = \"Reader of field `WDUNF`\"]\n\npub type WDUNF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `WDERR`\"]\n\npub type WDERR_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Watchdog Underflow (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn wdunf(&self) -> WDUNF_R {\n\n WDUNF_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Watchdog Error (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn wderr(&self) -> WDERR_R {\n\n WDERR_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n}\n", "file_path": "src/atsame70q21b/wdt/wdt_sr.rs", "rank": 43, "score": 50.46944898081974 }, { "content": "#[doc = \"Reader of register SSC_IMR\"]\n\npub type R = crate::R<u32, super::SSC_IMR>;\n\n#[doc = \"Reader of field `TXRDY`\"]\n\npub type TXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEMPTY`\"]\n\npub type TXEMPTY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RXRDY`\"]\n\npub type RXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRUN`\"]\n\npub type OVRUN_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CP0`\"]\n\npub type CP0_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CP1`\"]\n\npub type CP1_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXSYN`\"]\n\npub type TXSYN_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RXSYN`\"]\n\npub type RXSYN_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Transmit Ready Interrupt Mask\"]\n", "file_path": "src/atsame70q21b/ssc/ssc_imr.rs", "rank": 44, "score": 50.43415673752621 }, { "content": "#[doc = \"`read()` method returns [gmac_lffe::R](gmac_lffe::R) reader structure\"]\n\nimpl crate::Readable for GMAC_LFFE {}\n\n#[doc = \"Length Field Frame Errors Register\"]\n\npub mod gmac_lffe;\n\n#[doc = \"Receive Symbol Errors Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_rse](gmac_rse) module\"]\n\npub type GMAC_RSE = crate::Reg<u32, _GMAC_RSE>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_RSE;\n\n#[doc = \"`read()` method returns [gmac_rse::R](gmac_rse::R) reader structure\"]\n\nimpl crate::Readable for GMAC_RSE {}\n\n#[doc = \"Receive Symbol Errors Register\"]\n\npub mod gmac_rse;\n\n#[doc = \"Alignment Errors Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_ae](gmac_ae) module\"]\n\npub type GMAC_AE = crate::Reg<u32, _GMAC_AE>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_AE;\n\n#[doc = \"`read()` method returns [gmac_ae::R](gmac_ae::R) reader structure\"]\n\nimpl crate::Readable for GMAC_AE {}\n", "file_path": "src/atsame70q21b/gmac.rs", "rank": 45, "score": 50.37969841629152 }, { "content": "pub struct _SSC_TFMR;\n\n#[doc = \"`read()` method returns [ssc_tfmr::R](ssc_tfmr::R) reader structure\"]\n\nimpl crate::Readable for SSC_TFMR {}\n\n#[doc = \"`write(|w| ..)` method takes [ssc_tfmr::W](ssc_tfmr::W) writer structure\"]\n\nimpl crate::Writable for SSC_TFMR {}\n\n#[doc = \"Transmit Frame Mode Register\"]\n\npub mod ssc_tfmr;\n\n#[doc = \"Receive Holding Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [ssc_rhr](ssc_rhr) module\"]\n\npub type SSC_RHR = crate::Reg<u32, _SSC_RHR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _SSC_RHR;\n\n#[doc = \"`read()` method returns [ssc_rhr::R](ssc_rhr::R) reader structure\"]\n\nimpl crate::Readable for SSC_RHR {}\n\n#[doc = \"Receive Holding Register\"]\n\npub mod ssc_rhr;\n\n#[doc = \"Transmit Holding Register\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [ssc_thr](ssc_thr) module\"]\n\npub type SSC_THR = crate::Reg<u32, _SSC_THR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n", "file_path": "src/atsame70q21b/ssc.rs", "rank": 46, "score": 49.90797393310682 }, { "content": "#[doc = \"Reader of register SDRAMC_ISR\"]\n\npub type R = crate::R<u32, super::SDRAMC_ISR>;\n\n#[doc = \"Reader of field `RES`\"]\n\npub type RES_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Refresh Error Status (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn res(&self) -> RES_R {\n\n RES_R::new((self.bits & 0x01) != 0)\n\n }\n\n}\n", "file_path": "src/atsame70q21b/sdramc/sdramc_isr.rs", "rank": 47, "score": 49.831416163580876 }, { "content": "#[doc = \"Reader of field `DCDIC`\"]\n\npub type DCDIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CTSIC`\"]\n\npub type CTSIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `MANE`\"]\n\npub type MANE_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - RXRDY Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - TXRDY Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 2 - Receiver Break Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn rxbrk(&self) -> RXBRK_R {\n", "file_path": "src/atsame70q21b/usart0/us_imr_usart_mode.rs", "rank": 48, "score": 49.7475881011832 }, { "content": "#[doc = \"Reader of register SPI_SR\"]\n\npub type R = crate::R<u32, super::SPI_SR>;\n\n#[doc = \"Reader of field `RDRF`\"]\n\npub type RDRF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TDRE`\"]\n\npub type TDRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `MODF`\"]\n\npub type MODF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRES`\"]\n\npub type OVRES_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `NSSR`\"]\n\npub type NSSR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEMPTY`\"]\n\npub type TXEMPTY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `UNDES`\"]\n\npub type UNDES_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `SPIENS`\"]\n\npub type SPIENS_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Receive Data Register Full (cleared by reading SPI_RDR)\"]\n", "file_path": "src/atsame70q21b/spi0/spi_sr.rs", "rank": 49, "score": 49.596852968235126 }, { "content": "#[doc = \"Reader of register QSPI_SR\"]\n\npub type R = crate::R<u32, super::QSPI_SR>;\n\n#[doc = \"Reader of field `RDRF`\"]\n\npub type RDRF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TDRE`\"]\n\npub type TDRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEMPTY`\"]\n\npub type TXEMPTY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRES`\"]\n\npub type OVRES_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CSR`\"]\n\npub type CSR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CSS`\"]\n\npub type CSS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `INSTRE`\"]\n\npub type INSTRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `QSPIENS`\"]\n\npub type QSPIENS_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Receive Data Register Full (cleared by reading SPI_RDR)\"]\n", "file_path": "src/atsame70q21b/qspi/qspi_sr.rs", "rank": 50, "score": 49.596852968235126 }, { "content": "#[doc = \"Reader of register US_IMR_SPI_MODE\"]\n\npub type R = crate::R<u32, super::US_IMR_SPI_MODE>;\n\n#[doc = \"Reader of field `RXRDY`\"]\n\npub type RXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY`\"]\n\npub type TXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEMPTY`\"]\n\npub type TXEMPTY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RIIC`\"]\n\npub type RIIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DSRIC`\"]\n\npub type DSRIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DCDIC`\"]\n\npub type DCDIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `NSSE`\"]\n\npub type NSSE_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - RXRDY Interrupt Mask\"]\n", "file_path": "src/atsame70q21b/usart0/us_imr_spi_mode.rs", "rank": 51, "score": 49.30938160852448 }, { "content": "pub struct _SSC_THR;\n\n#[doc = \"`write(|w| ..)` method takes [ssc_thr::W](ssc_thr::W) writer structure\"]\n\nimpl crate::Writable for SSC_THR {}\n\n#[doc = \"Transmit Holding Register\"]\n\npub mod ssc_thr;\n\n#[doc = \"Receive Sync. Holding Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [ssc_rshr](ssc_rshr) module\"]\n\npub type SSC_RSHR = crate::Reg<u32, _SSC_RSHR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _SSC_RSHR;\n\n#[doc = \"`read()` method returns [ssc_rshr::R](ssc_rshr::R) reader structure\"]\n\nimpl crate::Readable for SSC_RSHR {}\n\n#[doc = \"Receive Sync. Holding Register\"]\n\npub mod ssc_rshr;\n\n#[doc = \"Transmit Sync. Holding Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [ssc_tshr](ssc_tshr) module\"]\n\npub type SSC_TSHR = crate::Reg<u32, _SSC_TSHR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _SSC_TSHR;\n\n#[doc = \"`read()` method returns [ssc_tshr::R](ssc_tshr::R) reader structure\"]\n", "file_path": "src/atsame70q21b/ssc.rs", "rank": 52, "score": 49.251897841059474 }, { "content": " }\n\n #[doc = \"Bit 5 - Overrun Error (cleared by writing a one to bit US_CR.RSTSTA)\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 9 - Transmitter Empty (cleared by writing US_THR)\"]\n\n #[inline(always)]\n\n pub fn txempty(&self) -> TXEMPTY_R {\n\n TXEMPTY_R::new(((self.bits >> 9) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 16 - Ring Indicator Input Change Flag (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn riic(&self) -> RIIC_R {\n\n RIIC_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 17 - Data Set Ready Input Change Flag (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn dsric(&self) -> DSRIC_R {\n\n DSRIC_R::new(((self.bits >> 17) & 0x01) != 0)\n", "file_path": "src/atsame70q21b/usart0/us_csr_lon_mode.rs", "rank": 53, "score": 49.22285323794779 }, { "content": " RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmitter Ready\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Overrun Error\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 6 - Framing Error\"]\n\n #[inline(always)]\n\n pub fn frame(&self) -> FRAME_R {\n\n FRAME_R::new(((self.bits >> 6) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 7 - Parity Error\"]\n\n #[inline(always)]\n\n pub fn pare(&self) -> PARE_R {\n", "file_path": "src/atsame70q21b/uart0/uart_sr.rs", "rank": 54, "score": 49.22264741200479 }, { "content": " #[inline(always)]\n\n pub fn nssr(&self) -> NSSR_R {\n\n NSSR_R::new(((self.bits >> 8) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 9 - Transmission Registers Empty (cleared by writing SPI_TDR)\"]\n\n #[inline(always)]\n\n pub fn txempty(&self) -> TXEMPTY_R {\n\n TXEMPTY_R::new(((self.bits >> 9) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 10 - Underrun Error Status (Slave mode only) (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn undes(&self) -> UNDES_R {\n\n UNDES_R::new(((self.bits >> 10) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 16 - SPI Enable Status\"]\n\n #[inline(always)]\n\n pub fn spiens(&self) -> SPIENS_R {\n\n SPIENS_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n}\n", "file_path": "src/atsame70q21b/spi0/spi_sr.rs", "rank": 55, "score": 48.93989920896734 }, { "content": "impl crate::Readable for UART_SR {}\n\n#[doc = \"Status Register\"]\n\npub mod uart_sr;\n\n#[doc = \"Receive Holding Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [uart_rhr](uart_rhr) module\"]\n\npub type UART_RHR = crate::Reg<u32, _UART_RHR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _UART_RHR;\n\n#[doc = \"`read()` method returns [uart_rhr::R](uart_rhr::R) reader structure\"]\n\nimpl crate::Readable for UART_RHR {}\n\n#[doc = \"Receive Holding Register\"]\n\npub mod uart_rhr;\n\n#[doc = \"Transmit Holding Register\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [uart_thr](uart_thr) module\"]\n\npub type UART_THR = crate::Reg<u32, _UART_THR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _UART_THR;\n\n#[doc = \"`write(|w| ..)` method takes [uart_thr::W](uart_thr::W) writer structure\"]\n\nimpl crate::Writable for UART_THR {}\n\n#[doc = \"Transmit Holding Register\"]\n", "file_path": "src/atsame70q21b/uart0.rs", "rank": 56, "score": 48.74188380791521 }, { "content": "#[doc = \"Reader of register SMC_MODE\"]\n\npub type R = crate::R<u32, super::SMC_MODE>;\n\n#[doc = \"Writer for register SMC_MODE\"]\n\npub type W = crate::W<u32, super::SMC_MODE>;\n\n#[doc = \"Register SMC_MODE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::SMC_MODE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `READ_MODE`\"]\n\npub type READ_MODE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `READ_MODE`\"]\n\npub struct READ_MODE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> READ_MODE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/atsame70q21b/smc/smc_cs_number/smc_mode.rs", "rank": 57, "score": 48.41742998946284 }, { "content": "#[doc = \"Reader of register US_IMR_USART_MODE\"]\n\npub type R = crate::R<u32, super::US_IMR_USART_MODE>;\n\n#[doc = \"Reader of field `RXRDY`\"]\n\npub type RXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY`\"]\n\npub type TXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RXBRK`\"]\n\npub type RXBRK_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEMPTY`\"]\n\npub type TXEMPTY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ITER`\"]\n\npub type ITER_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `NACK`\"]\n\npub type NACK_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RIIC`\"]\n\npub type RIIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DSRIC`\"]\n\npub type DSRIC_R = crate::R<bool, bool>;\n", "file_path": "src/atsame70q21b/usart0/us_imr_usart_mode.rs", "rank": 58, "score": 47.691560279758036 }, { "content": "#[doc = \"Reader of register US_CSR_USART_MODE\"]\n\npub type R = crate::R<u32, super::US_CSR_USART_MODE>;\n\n#[doc = \"Reader of field `RXRDY`\"]\n\npub type RXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY`\"]\n\npub type TXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RXBRK`\"]\n\npub type RXBRK_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE`\"]\n\npub type OVRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEMPTY`\"]\n\npub type TXEMPTY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `ITER`\"]\n\npub type ITER_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `NACK`\"]\n\npub type NACK_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RIIC`\"]\n\npub type RIIC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `DSRIC`\"]\n\npub type DSRIC_R = crate::R<bool, bool>;\n", "file_path": "src/atsame70q21b/usart0/us_csr_usart_mode.rs", "rank": 59, "score": 47.69156027975804 }, { "content": "#[doc = \"Reader of field `C_OVR`\"]\n\npub type C_OVR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CRC_ERR`\"]\n\npub type CRC_ERR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `FR_OVR`\"]\n\npub type FR_OVR_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Module Enable\"]\n\n #[inline(always)]\n\n pub fn enable(&self) -> ENABLE_R {\n\n ENABLE_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Module Disable Request has Terminated (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn dis_done(&self) -> DIS_DONE_R {\n\n DIS_DONE_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 2 - Module Software Reset Request has Terminated (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn srst(&self) -> SRST_R {\n", "file_path": "src/atsame70q21b/isi/isi_sr.rs", "rank": 60, "score": 47.505443124723115 }, { "content": " #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmit Empty Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn txempty(&self) -> TXEMPTY_R {\n\n TXEMPTY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 4 - Receive Ready Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new(((self.bits >> 4) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Receive Overrun Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn ovrun(&self) -> OVRUN_R {\n\n OVRUN_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 8 - Compare 0 Interrupt Mask\"]\n", "file_path": "src/atsame70q21b/ssc/ssc_imr.rs", "rank": 61, "score": 47.021705906036765 }, { "content": "#[doc = \"Interrupt Mask Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [twihs_imr](twihs_imr) module\"]\n\npub type TWIHS_IMR = crate::Reg<u32, _TWIHS_IMR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _TWIHS_IMR;\n\n#[doc = \"`read()` method returns [twihs_imr::R](twihs_imr::R) reader structure\"]\n\nimpl crate::Readable for TWIHS_IMR {}\n\n#[doc = \"Interrupt Mask Register\"]\n\npub mod twihs_imr;\n\n#[doc = \"Receive Holding Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [twihs_rhr](twihs_rhr) module\"]\n\npub type TWIHS_RHR = crate::Reg<u32, _TWIHS_RHR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _TWIHS_RHR;\n\n#[doc = \"`read()` method returns [twihs_rhr::R](twihs_rhr::R) reader structure\"]\n\nimpl crate::Readable for TWIHS_RHR {}\n\n#[doc = \"Receive Holding Register\"]\n\npub mod twihs_rhr;\n\n#[doc = \"Transmit Holding Register\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [twihs_thr](twihs_thr) module\"]\n\npub type TWIHS_THR = crate::Reg<u32, _TWIHS_THR>;\n", "file_path": "src/atsame70q21b/twihs0.rs", "rank": 62, "score": 47.0073117331023 }, { "content": "#[doc = \"Reader of register I2SC_SR\"]\n\npub type R = crate::R<u32, super::I2SC_SR>;\n\n#[doc = \"Reader of field `RXEN`\"]\n\npub type RXEN_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RXRDY`\"]\n\npub type RXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RXOR`\"]\n\npub type RXOR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEN`\"]\n\npub type TXEN_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY`\"]\n\npub type TXRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXUR`\"]\n\npub type TXUR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RXORCH`\"]\n\npub type RXORCH_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `TXURCH`\"]\n\npub type TXURCH_R = crate::R<u8, u8>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Receiver Enabled\"]\n", "file_path": "src/atsame70q21b/i2sc0/i2sc_sr.rs", "rank": 63, "score": 46.98474608592735 }, { "content": "#[doc(hidden)]\n\npub struct _GMAC_JR;\n\n#[doc = \"`read()` method returns [gmac_jr::R](gmac_jr::R) reader structure\"]\n\nimpl crate::Readable for GMAC_JR {}\n\n#[doc = \"Jabbers Received Register\"]\n\npub mod gmac_jr;\n\n#[doc = \"Frame Check Sequence Errors Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_fcse](gmac_fcse) module\"]\n\npub type GMAC_FCSE = crate::Reg<u32, _GMAC_FCSE>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_FCSE;\n\n#[doc = \"`read()` method returns [gmac_fcse::R](gmac_fcse::R) reader structure\"]\n\nimpl crate::Readable for GMAC_FCSE {}\n\n#[doc = \"Frame Check Sequence Errors Register\"]\n\npub mod gmac_fcse;\n\n#[doc = \"Length Field Frame Errors Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_lffe](gmac_lffe) module\"]\n\npub type GMAC_LFFE = crate::Reg<u32, _GMAC_LFFE>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_LFFE;\n", "file_path": "src/atsame70q21b/gmac.rs", "rank": 64, "score": 46.88688299528103 }, { "content": "#[doc = \"Reader of field `MTIOA`\"]\n\npub type MTIOA_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `MTIOB`\"]\n\npub type MTIOB_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Counter Overflow Status (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn covfs(&self) -> COVFS_R {\n\n COVFS_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Load Overrun Status (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn lovrs(&self) -> LOVRS_R {\n\n LOVRS_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 2 - RA Compare Status (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn cpas(&self) -> CPAS_R {\n\n CPAS_R::new(((self.bits >> 2) & 0x01) != 0)\n\n }\n", "file_path": "src/atsame70q21b/tc0/tc_channel/tc_sr.rs", "rank": 65, "score": 46.80877030983345 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `TXCOMP`\"]\n\npub type TXCOMP_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `TXCOMP`\"]\n\npub struct TXCOMP_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TXCOMP_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "src/atsame70q21b/gmac/gmac_tsr.rs", "rank": 66, "score": 46.727081241925745 }, { "content": "#[doc = \"Reader of field `LTXD`\"]\n\npub type LTXD_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LCOL`\"]\n\npub type LCOL_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LFET`\"]\n\npub type LFET_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LRXD`\"]\n\npub type LRXD_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LBLOVFE`\"]\n\npub type LBLOVFE_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - RXRDY Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - TXRDY Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n", "file_path": "src/atsame70q21b/usart0/us_imr_lon_mode.rs", "rank": 67, "score": 46.44962361305238 }, { "content": "impl crate::Readable for MCAN_TXBCR {}\n\n#[doc = \"`write(|w| ..)` method takes [mcan_txbcr::W](mcan_txbcr::W) writer structure\"]\n\nimpl crate::Writable for MCAN_TXBCR {}\n\n#[doc = \"Transmit Buffer Cancellation Request Register\"]\n\npub mod mcan_txbcr;\n\n#[doc = \"Transmit Buffer Transmission Occurred Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [mcan_txbto](mcan_txbto) module\"]\n\npub type MCAN_TXBTO = crate::Reg<u32, _MCAN_TXBTO>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _MCAN_TXBTO;\n\n#[doc = \"`read()` method returns [mcan_txbto::R](mcan_txbto::R) reader structure\"]\n\nimpl crate::Readable for MCAN_TXBTO {}\n\n#[doc = \"Transmit Buffer Transmission Occurred Register\"]\n\npub mod mcan_txbto;\n\n#[doc = \"Transmit Buffer Cancellation Finished Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [mcan_txbcf](mcan_txbcf) module\"]\n\npub type MCAN_TXBCF = crate::Reg<u32, _MCAN_TXBCF>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _MCAN_TXBCF;\n\n#[doc = \"`read()` method returns [mcan_txbcf::R](mcan_txbcf::R) reader structure\"]\n", "file_path": "src/atsame70q21b/mcan0.rs", "rank": 68, "score": 46.25416850975469 }, { "content": "#[doc = \"Reader of register RTT_SR\"]\n\npub type R = crate::R<u32, super::RTT_SR>;\n\n#[doc = \"Reader of field `ALMS`\"]\n\npub type ALMS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `RTTINC`\"]\n\npub type RTTINC_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Real-time Alarm Status (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn alms(&self) -> ALMS_R {\n\n ALMS_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Prescaler Roll-over Status (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn rttinc(&self) -> RTTINC_R {\n\n RTTINC_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n}\n", "file_path": "src/atsame70q21b/rtt/rtt_sr.rs", "rank": 69, "score": 46.05740415550771 }, { "content": "#[doc = \"Reader of register DACC_IMR\"]\n\npub type R = crate::R<u32, super::DACC_IMR>;\n\n#[doc = \"Reader of field `TXRDY0`\"]\n\npub type TXRDY0_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY1`\"]\n\npub type TXRDY1_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `EOC0`\"]\n\npub type EOC0_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `EOC1`\"]\n\npub type EOC1_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Transmit Ready Interrupt Mask of channel 0\"]\n\n #[inline(always)]\n\n pub fn txrdy0(&self) -> TXRDY0_R {\n\n TXRDY0_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmit Ready Interrupt Mask of channel 1\"]\n\n #[inline(always)]\n\n pub fn txrdy1(&self) -> TXRDY1_R {\n\n TXRDY1_R::new(((self.bits >> 1) & 0x01) != 0)\n", "file_path": "src/atsame70q21b/dacc/dacc_imr.rs", "rank": 71, "score": 45.91971281161721 }, { "content": "#[doc = \"Reader of register DACC_ISR\"]\n\npub type R = crate::R<u32, super::DACC_ISR>;\n\n#[doc = \"Reader of field `TXRDY0`\"]\n\npub type TXRDY0_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXRDY1`\"]\n\npub type TXRDY1_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `EOC0`\"]\n\npub type EOC0_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `EOC1`\"]\n\npub type EOC1_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Transmit Ready Interrupt Flag of channel 0\"]\n\n #[inline(always)]\n\n pub fn txrdy0(&self) -> TXRDY0_R {\n\n TXRDY0_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Transmit Ready Interrupt Flag of channel 1\"]\n\n #[inline(always)]\n\n pub fn txrdy1(&self) -> TXRDY1_R {\n\n TXRDY1_R::new(((self.bits >> 1) & 0x01) != 0)\n", "file_path": "src/atsame70q21b/dacc/dacc_isr.rs", "rank": 72, "score": 45.91971281161721 }, { "content": "#[doc = \"Carrier Sense Errors Register\"]\n\npub mod gmac_cse;\n\n#[doc = \"Octets Received Low Received Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_orlo](gmac_orlo) module\"]\n\npub type GMAC_ORLO = crate::Reg<u32, _GMAC_ORLO>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_ORLO;\n\n#[doc = \"`read()` method returns [gmac_orlo::R](gmac_orlo::R) reader structure\"]\n\nimpl crate::Readable for GMAC_ORLO {}\n\n#[doc = \"Octets Received Low Received Register\"]\n\npub mod gmac_orlo;\n\n#[doc = \"Octets Received High Received Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_orhi](gmac_orhi) module\"]\n\npub type GMAC_ORHI = crate::Reg<u32, _GMAC_ORHI>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_ORHI;\n\n#[doc = \"`read()` method returns [gmac_orhi::R](gmac_orhi::R) reader structure\"]\n\nimpl crate::Readable for GMAC_ORHI {}\n\n#[doc = \"Octets Received High Received Register\"]\n\npub mod gmac_orhi;\n", "file_path": "src/atsame70q21b/gmac.rs", "rank": 73, "score": 45.86501540518072 }, { "content": "#[doc = \"Reader of register MCAN_ECR\"]\n\npub type R = crate::R<u32, super::MCAN_ECR>;\n\n#[doc = \"Reader of field `TEC`\"]\n\npub type TEC_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `REC`\"]\n\npub type REC_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `RP`\"]\n\npub type RP_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `CEL`\"]\n\npub type CEL_R = crate::R<u8, u8>;\n\nimpl R {\n\n #[doc = \"Bits 0:7 - Transmit Error Counter\"]\n\n #[inline(always)]\n\n pub fn tec(&self) -> TEC_R {\n\n TEC_R::new((self.bits & 0xff) as u8)\n\n }\n\n #[doc = \"Bits 8:14 - Receive Error Counter\"]\n\n #[inline(always)]\n\n pub fn rec(&self) -> REC_R {\n\n REC_R::new(((self.bits >> 8) & 0x7f) as u8)\n", "file_path": "src/atsame70q21b/mcan0/mcan_ecr.rs", "rank": 74, "score": 45.67944506173436 }, { "content": "#[doc = \"Reader of field `OVRE9`\"]\n\npub type OVRE9_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE10`\"]\n\npub type OVRE10_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRE11`\"]\n\npub type OVRE11_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Overrun Error 0\"]\n\n #[inline(always)]\n\n pub fn ovre0(&self) -> OVRE0_R {\n\n OVRE0_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Overrun Error 1\"]\n\n #[inline(always)]\n\n pub fn ovre1(&self) -> OVRE1_R {\n\n OVRE1_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 2 - Overrun Error 2\"]\n\n #[inline(always)]\n\n pub fn ovre2(&self) -> OVRE2_R {\n", "file_path": "src/atsame70q21b/afec0/afec_over.rs", "rank": 75, "score": 45.448669638887694 }, { "content": " }\n\n #[doc = \"Bit 4 - Transmit Frame Corruption Due to AHB Error\"]\n\n #[inline(always)]\n\n pub fn tfc(&self) -> TFC_R {\n\n TFC_R::new(((self.bits >> 4) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Transmit Complete\"]\n\n #[inline(always)]\n\n pub fn txcomp(&self) -> TXCOMP_R {\n\n TXCOMP_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 8 - HRESP Not OK\"]\n\n #[inline(always)]\n\n pub fn hresp(&self) -> HRESP_R {\n\n HRESP_R::new(((self.bits >> 8) & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - Used Bit Read\"]\n\n #[inline(always)]\n", "file_path": "src/atsame70q21b/gmac/gmac_tsr.rs", "rank": 76, "score": 45.39031652235856 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);\n\n self.w\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - RXRDY Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&mut self) -> RXRDY_W {\n\n RXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 1 - TXRDY Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn txrdy(&mut self) -> TXRDY_W {\n\n TXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 5 - Overrun Error Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn ovre(&mut self) -> OVRE_W {\n", "file_path": "src/atsame70q21b/usart0/us_idr_lon_spi_mode.rs", "rank": 77, "score": 45.38465532766264 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);\n\n self.w\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - RXRDY Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&mut self) -> RXRDY_W {\n\n RXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 1 - TXRDY Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn txrdy(&mut self) -> TXRDY_W {\n\n TXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 5 - Overrun Error Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn ovre(&mut self) -> OVRE_W {\n", "file_path": "src/atsame70q21b/usart0/us_idr_spi_mode.rs", "rank": 78, "score": 45.38465532766264 }, { "content": " #[inline(always)]\n\n pub fn riic(&self) -> RIIC_R {\n\n RIIC_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 17 - Data Set Ready Input Change Flag (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn dsric(&self) -> DSRIC_R {\n\n DSRIC_R::new(((self.bits >> 17) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 18 - Data Carrier Detect Input Change Flag (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn dcdic(&self) -> DCDIC_R {\n\n DCDIC_R::new(((self.bits >> 18) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 10 - SPI Underrun Error\"]\n\n #[inline(always)]\n\n pub fn unre(&self) -> UNRE_R {\n\n UNRE_R::new(((self.bits >> 10) & 0x01) != 0)\n\n }\n\n}\n", "file_path": "src/atsame70q21b/usart0/us_csr_lon_spi_mode.rs", "rank": 79, "score": 45.13325134320132 }, { "content": " pub fn mcack(&mut self) -> MCACK_W {\n\n MCACK_W { w: self }\n\n }\n\n #[doc = \"Bit 18 - Timeout Error Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn tout(&mut self) -> TOUT_W {\n\n TOUT_W { w: self }\n\n }\n\n #[doc = \"Bit 19 - PEC Error Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn pecerr(&mut self) -> PECERR_W {\n\n PECERR_W { w: self }\n\n }\n\n #[doc = \"Bit 20 - SMBus Default Address Match Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn smbdam(&mut self) -> SMBDAM_W {\n\n SMBDAM_W { w: self }\n\n }\n\n #[doc = \"Bit 21 - SMBus Host Header Address Match Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn smbhhm(&mut self) -> SMBHHM_W {\n\n SMBHHM_W { w: self }\n\n }\n\n}\n", "file_path": "src/atsame70q21b/twihs0/twihs_idr.rs", "rank": 80, "score": 45.09833989226381 }, { "content": " pub fn mcack(&mut self) -> MCACK_W {\n\n MCACK_W { w: self }\n\n }\n\n #[doc = \"Bit 18 - Timeout Error Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn tout(&mut self) -> TOUT_W {\n\n TOUT_W { w: self }\n\n }\n\n #[doc = \"Bit 19 - PEC Error Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn pecerr(&mut self) -> PECERR_W {\n\n PECERR_W { w: self }\n\n }\n\n #[doc = \"Bit 20 - SMBus Default Address Match Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn smbdam(&mut self) -> SMBDAM_W {\n\n SMBDAM_W { w: self }\n\n }\n\n #[doc = \"Bit 21 - SMBus Host Header Address Match Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn smbhhm(&mut self) -> SMBHHM_W {\n\n SMBHHM_W { w: self }\n\n }\n\n}\n", "file_path": "src/atsame70q21b/twihs0/twihs_ier.rs", "rank": 81, "score": 45.09833989226381 }, { "content": "#[doc = \"`read()` method returns [gmac_lc::R](gmac_lc::R) reader structure\"]\n\nimpl crate::Readable for GMAC_LC {}\n\n#[doc = \"Late Collisions Register\"]\n\npub mod gmac_lc;\n\n#[doc = \"Deferred Transmission Frames Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_dtf](gmac_dtf) module\"]\n\npub type GMAC_DTF = crate::Reg<u32, _GMAC_DTF>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_DTF;\n\n#[doc = \"`read()` method returns [gmac_dtf::R](gmac_dtf::R) reader structure\"]\n\nimpl crate::Readable for GMAC_DTF {}\n\n#[doc = \"Deferred Transmission Frames Register\"]\n\npub mod gmac_dtf;\n\n#[doc = \"Carrier Sense Errors Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_cse](gmac_cse) module\"]\n\npub type GMAC_CSE = crate::Reg<u32, _GMAC_CSE>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_CSE;\n\n#[doc = \"`read()` method returns [gmac_cse::R](gmac_cse::R) reader structure\"]\n\nimpl crate::Readable for GMAC_CSE {}\n", "file_path": "src/atsame70q21b/gmac.rs", "rank": 82, "score": 45.01005150338879 }, { "content": " #[doc = \"Bit 0 - Data Ready (cleared by setting bit START or bit SWRST in AES_CR or by reading AES_ODATARx)\"]\n\n #[inline(always)]\n\n pub fn datrdy(&self) -> DATRDY_R {\n\n DATRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 8 - Unspecified Register Access Detection Status (cleared by writing SWRST in AES_CR)\"]\n\n #[inline(always)]\n\n pub fn urad(&self) -> URAD_R {\n\n URAD_R::new(((self.bits >> 8) & 0x01) != 0)\n\n }\n\n #[doc = \"Bits 12:15 - Unspecified Register Access (cleared by writing SWRST in AES_CR)\"]\n\n #[inline(always)]\n\n pub fn urat(&self) -> URAT_R {\n\n URAT_R::new(((self.bits >> 12) & 0x0f) as u8)\n\n }\n\n #[doc = \"Bit 16 - GCM Tag Ready\"]\n\n #[inline(always)]\n\n pub fn tagrdy(&self) -> TAGRDY_R {\n\n TAGRDY_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n}\n", "file_path": "src/atsame70q21b/aes/aes_isr.rs", "rank": 83, "score": 44.85315348395315 }, { "content": "#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _US_RHR;\n\n#[doc = \"`read()` method returns [us_rhr::R](us_rhr::R) reader structure\"]\n\nimpl crate::Readable for US_RHR {}\n\n#[doc = \"Receive Holding Register\"]\n\npub mod us_rhr;\n\n#[doc = \"Transmit Holding Register\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [us_thr](us_thr) module\"]\n\npub type US_THR = crate::Reg<u32, _US_THR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _US_THR;\n\n#[doc = \"`write(|w| ..)` method takes [us_thr::W](us_thr::W) writer structure\"]\n\nimpl crate::Writable for US_THR {}\n\n#[doc = \"Transmit Holding Register\"]\n\npub mod us_thr;\n\n#[doc = \"Baud Rate Generator Register\\n\\nThis register you can [`read`](crate::generic::Reg::read), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [us_brgr](us_brgr) module\"]\n\npub type US_BRGR = crate::Reg<u32, _US_BRGR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n", "file_path": "src/atsame70q21b/usart0.rs", "rank": 84, "score": 44.845353654879766 }, { "content": "#[doc = \"Reader of register AES_IMR\"]\n\npub type R = crate::R<u32, super::AES_IMR>;\n\n#[doc = \"Reader of field `DATRDY`\"]\n\npub type DATRDY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `URAD`\"]\n\npub type URAD_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TAGRDY`\"]\n\npub type TAGRDY_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Data Ready Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn datrdy(&self) -> DATRDY_R {\n\n DATRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 8 - Unspecified Register Access Detection Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn urad(&self) -> URAD_R {\n\n URAD_R::new(((self.bits >> 8) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 16 - GCM Tag Ready Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn tagrdy(&self) -> TAGRDY_R {\n\n TAGRDY_R::new(((self.bits >> 16) & 0x01) != 0)\n\n }\n\n}\n", "file_path": "src/atsame70q21b/aes/aes_imr.rs", "rank": 85, "score": 44.76162668585585 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - Transmit Ready Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn txrdy(&mut self) -> TXRDY_W {\n\n TXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 1 - Transmit Empty Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn txempty(&mut self) -> TXEMPTY_W {\n\n TXEMPTY_W { w: self }\n\n }\n\n #[doc = \"Bit 4 - Receive Ready Interrupt Disable\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&mut self) -> RXRDY_W {\n", "file_path": "src/atsame70q21b/ssc/ssc_idr.rs", "rank": 86, "score": 44.683186576127085 }, { "content": " #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);\n\n self.w\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bit 0 - Transmit Ready Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn txrdy(&mut self) -> TXRDY_W {\n\n TXRDY_W { w: self }\n\n }\n\n #[doc = \"Bit 1 - Transmit Empty Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn txempty(&mut self) -> TXEMPTY_W {\n\n TXEMPTY_W { w: self }\n\n }\n\n #[doc = \"Bit 4 - Receive Ready Interrupt Enable\"]\n\n #[inline(always)]\n\n pub fn rxrdy(&mut self) -> RXRDY_W {\n", "file_path": "src/atsame70q21b/ssc/ssc_ier.rs", "rank": 87, "score": 44.683186576127085 }, { "content": "#[doc = \"`read()` method returns [gmac_efrsh::R](gmac_efrsh::R) reader structure\"]\n\nimpl crate::Readable for GMAC_EFRSH {}\n\n#[doc = \"PTP Event Frame Received Seconds High Register\"]\n\npub mod gmac_efrsh;\n\n#[doc = \"PTP Peer Event Frame Transmitted Seconds High Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_peftsh](gmac_peftsh) module\"]\n\npub type GMAC_PEFTSH = crate::Reg<u32, _GMAC_PEFTSH>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_PEFTSH;\n\n#[doc = \"`read()` method returns [gmac_peftsh::R](gmac_peftsh::R) reader structure\"]\n\nimpl crate::Readable for GMAC_PEFTSH {}\n\n#[doc = \"PTP Peer Event Frame Transmitted Seconds High Register\"]\n\npub mod gmac_peftsh;\n\n#[doc = \"PTP Peer Event Frame Received Seconds High Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_pefrsh](gmac_pefrsh) module\"]\n\npub type GMAC_PEFRSH = crate::Reg<u32, _GMAC_PEFRSH>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_PEFRSH;\n\n#[doc = \"`read()` method returns [gmac_pefrsh::R](gmac_pefrsh::R) reader structure\"]\n\nimpl crate::Readable for GMAC_PEFRSH {}\n", "file_path": "src/atsame70q21b/gmac.rs", "rank": 88, "score": 44.662047634332296 }, { "content": " #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bit 0 - Slave Receiver Data Phase NACK enable\"]\n\n #[inline(always)]\n\n pub fn nacken(&self) -> NACKEN_R {\n\n NACKEN_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 2 - SMBus Default Address\"]\n\n #[inline(always)]\n", "file_path": "src/atsame70q21b/twihs0/twihs_smr.rs", "rank": 89, "score": 44.53311856386096 }, { "content": "#[doc = \"PTP Peer Event Frame Received Seconds High Register\"]\n\npub mod gmac_pefrsh;\n\n#[doc = \"Octets Transmitted Low Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_otlo](gmac_otlo) module\"]\n\npub type GMAC_OTLO = crate::Reg<u32, _GMAC_OTLO>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_OTLO;\n\n#[doc = \"`read()` method returns [gmac_otlo::R](gmac_otlo::R) reader structure\"]\n\nimpl crate::Readable for GMAC_OTLO {}\n\n#[doc = \"Octets Transmitted Low Register\"]\n\npub mod gmac_otlo;\n\n#[doc = \"Octets Transmitted High Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_othi](gmac_othi) module\"]\n\npub type GMAC_OTHI = crate::Reg<u32, _GMAC_OTHI>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_OTHI;\n\n#[doc = \"`read()` method returns [gmac_othi::R](gmac_othi::R) reader structure\"]\n\nimpl crate::Readable for GMAC_OTHI {}\n\n#[doc = \"Octets Transmitted High Register\"]\n\npub mod gmac_othi;\n", "file_path": "src/atsame70q21b/gmac.rs", "rank": 90, "score": 44.50670895523692 }, { "content": " #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - TXRDY Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Overrun Error Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 9 - TXEMPTY Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn txempty(&self) -> TXEMPTY_R {\n\n TXEMPTY_R::new(((self.bits >> 9) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 16 - Ring Indicator Input Change Mask\"]\n", "file_path": "src/atsame70q21b/usart0/us_imr_spi_mode.rs", "rank": 91, "score": 44.41884300824445 }, { "content": " #[inline(always)]\n\n pub fn rxrdy(&self) -> RXRDY_R {\n\n RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - TXRDY Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Overrun Error Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 9 - TXEMPTY Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn txempty(&self) -> TXEMPTY_R {\n\n TXEMPTY_R::new(((self.bits >> 9) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 16 - Ring Indicator Input Change Mask\"]\n", "file_path": "src/atsame70q21b/usart0/us_imr_lon_spi_mode.rs", "rank": 92, "score": 44.41884300824446 }, { "content": " RXRDY_R::new((self.bits & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 1 - Disable TXRDY Interrupt\"]\n\n #[inline(always)]\n\n pub fn txrdy(&self) -> TXRDY_R {\n\n TXRDY_R::new(((self.bits >> 1) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 5 - Mask Overrun Error Interrupt\"]\n\n #[inline(always)]\n\n pub fn ovre(&self) -> OVRE_R {\n\n OVRE_R::new(((self.bits >> 5) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 6 - Mask Framing Error Interrupt\"]\n\n #[inline(always)]\n\n pub fn frame(&self) -> FRAME_R {\n\n FRAME_R::new(((self.bits >> 6) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 7 - Mask Parity Error Interrupt\"]\n\n #[inline(always)]\n\n pub fn pare(&self) -> PARE_R {\n", "file_path": "src/atsame70q21b/uart0/uart_imr.rs", "rank": 93, "score": 44.413412993691004 }, { "content": "pub mod gmac_tbft1023;\n\n#[doc = \"1024 to 1518 Byte Frames Transmitted Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_tbft1518](gmac_tbft1518) module\"]\n\npub type GMAC_TBFT1518 = crate::Reg<u32, _GMAC_TBFT1518>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_TBFT1518;\n\n#[doc = \"`read()` method returns [gmac_tbft1518::R](gmac_tbft1518::R) reader structure\"]\n\nimpl crate::Readable for GMAC_TBFT1518 {}\n\n#[doc = \"1024 to 1518 Byte Frames Transmitted Register\"]\n\npub mod gmac_tbft1518;\n\n#[doc = \"Greater Than 1518 Byte Frames Transmitted Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_gtbft1518](gmac_gtbft1518) module\"]\n\npub type GMAC_GTBFT1518 = crate::Reg<u32, _GMAC_GTBFT1518>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _GMAC_GTBFT1518;\n\n#[doc = \"`read()` method returns [gmac_gtbft1518::R](gmac_gtbft1518::R) reader structure\"]\n\nimpl crate::Readable for GMAC_GTBFT1518 {}\n\n#[doc = \"Greater Than 1518 Byte Frames Transmitted Register\"]\n\npub mod gmac_gtbft1518;\n\n#[doc = \"Transmit Underruns Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [gmac_tur](gmac_tur) module\"]\n", "file_path": "src/atsame70q21b/gmac.rs", "rank": 94, "score": 44.23804131772707 }, { "content": "#[doc = \"Reader of register TWIHS_RHR\"]\n\npub type R = crate::R<u32, super::TWIHS_RHR>;\n\n#[doc = \"Reader of field `RXDATA`\"]\n\npub type RXDATA_R = crate::R<u8, u8>;\n\nimpl R {\n\n #[doc = \"Bits 0:7 - Master or Slave Receive Holding Data\"]\n\n #[inline(always)]\n\n pub fn rxdata(&self) -> RXDATA_R {\n\n RXDATA_R::new((self.bits & 0xff) as u8)\n\n }\n\n}\n", "file_path": "src/atsame70q21b/twihs0/twihs_rhr.rs", "rank": 95, "score": 44.19631453987939 }, { "content": "#[doc = \"Reader of field `LINTC`\"]\n\npub type LINTC_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LINBLS`\"]\n\npub type LINBLS_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LINBE`\"]\n\npub type LINBE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LINISFE`\"]\n\npub type LINISFE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LINIPE`\"]\n\npub type LINIPE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LINCE`\"]\n\npub type LINCE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LINSNRE`\"]\n\npub type LINSNRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LINSTE`\"]\n\npub type LINSTE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `LINHTE`\"]\n\npub type LINHTE_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Receiver Ready (cleared by reading US_RHR)\"]\n", "file_path": "src/atsame70q21b/usart0/us_csr_lin_mode.rs", "rank": 96, "score": 44.07143501565635 }, { "content": "#[doc(hidden)]\n\npub struct _SPI_MR;\n\n#[doc = \"`read()` method returns [spi_mr::R](spi_mr::R) reader structure\"]\n\nimpl crate::Readable for SPI_MR {}\n\n#[doc = \"`write(|w| ..)` method takes [spi_mr::W](spi_mr::W) writer structure\"]\n\nimpl crate::Writable for SPI_MR {}\n\n#[doc = \"Mode Register\"]\n\npub mod spi_mr;\n\n#[doc = \"Receive Data Register\\n\\nThis register you can [`read`](crate::generic::Reg::read). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [spi_rdr](spi_rdr) module\"]\n\npub type SPI_RDR = crate::Reg<u32, _SPI_RDR>;\n\n#[allow(missing_docs)]\n\n#[doc(hidden)]\n\npub struct _SPI_RDR;\n\n#[doc = \"`read()` method returns [spi_rdr::R](spi_rdr::R) reader structure\"]\n\nimpl crate::Readable for SPI_RDR {}\n\n#[doc = \"Receive Data Register\"]\n\npub mod spi_rdr;\n\n#[doc = \"Transmit Data Register\\n\\nThis register you can [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`write_with_zero`](crate::generic::Reg::write_with_zero). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\\n\\nFor information about available fields see [spi_tdr](spi_tdr) module\"]\n\npub type SPI_TDR = crate::Reg<u32, _SPI_TDR>;\n\n#[allow(missing_docs)]\n", "file_path": "src/atsame70q21b/spi0.rs", "rank": 97, "score": 43.99758224868123 }, { "content": " }\n\n #[doc = \"Bit 21 - Data CRC Error (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn dcrce(&self) -> DCRCE_R {\n\n DCRCE_R::new(((self.bits >> 21) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 22 - Data Time-out Error (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn dtoe(&self) -> DTOE_R {\n\n DTOE_R::new(((self.bits >> 22) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 23 - Completion Signal Time-out Error (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn cstoe(&self) -> CSTOE_R {\n\n CSTOE_R::new(((self.bits >> 23) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 24 - DMA Block Overrun Error (cleared on read)\"]\n\n #[inline(always)]\n\n pub fn blkovre(&self) -> BLKOVRE_R {\n\n BLKOVRE_R::new(((self.bits >> 24) & 0x01) != 0)\n", "file_path": "src/atsame70q21b/hsmci/hsmci_sr.rs", "rank": 98, "score": 43.96738949440614 }, { "content": "#[doc = \"Reader of register SPI_IMR\"]\n\npub type R = crate::R<u32, super::SPI_IMR>;\n\n#[doc = \"Reader of field `RDRF`\"]\n\npub type RDRF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TDRE`\"]\n\npub type TDRE_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `MODF`\"]\n\npub type MODF_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `OVRES`\"]\n\npub type OVRES_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `NSSR`\"]\n\npub type NSSR_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `TXEMPTY`\"]\n\npub type TXEMPTY_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `UNDES`\"]\n\npub type UNDES_R = crate::R<bool, bool>;\n\nimpl R {\n\n #[doc = \"Bit 0 - Receive Data Register Full Interrupt Mask\"]\n\n #[inline(always)]\n\n pub fn rdrf(&self) -> RDRF_R {\n", "file_path": "src/atsame70q21b/spi0/spi_imr.rs", "rank": 99, "score": 43.94636841848375 } ]
Rust
src/combinators/sequential.rs
codgician/parsic
1b2a32ab568ebf93ceb90db8b21ee931851cfd96
use crate::combinators::FunctorExt; use crate::core::{return_none, Parsable, Parser}; pub fn and<'f, A: 'f, B: 'f, S: Clone>( p1: impl Parsable<Stream = S, Result = A> + 'f, p2: impl Parsable<Stream = S, Result = B> + 'f, ) -> Parser<'f, (A, B), S> { Parser::new(move |stream: &mut S, logger| { let st = stream.clone(); p1.parse(stream, logger) .and_then(|x| p2.parse(stream, logger).map(|y| (x, y))) .or_else(|| return_none(stream, &st)) }) } pub fn left<'f, A: 'f, B: 'f, S: Clone + 'f>( p1: impl Parsable<Stream = S, Result = A> + 'f, p2: impl Parsable<Stream = S, Result = B> + 'f, ) -> Parser<'f, A, S> { p1.and(p2).map(|(l, _)| l) } pub fn right<'f, A: 'f, B: 'f, S: Clone + 'f>( p1: impl Parsable<Stream = S, Result = A> + 'f, p2: impl Parsable<Stream = S, Result = B> + 'f, ) -> Parser<'f, B, S> { p1.and(p2).map(|(_, r)| r) } pub fn mid<'f, A: 'f, B: 'f, C: 'f, S: Clone + 'f>( p1: impl Parsable<Stream = S, Result = A> + 'f, p2: impl Parsable<Stream = S, Result = B> + 'f, p3: impl Parsable<Stream = S, Result = C> + 'f, ) -> Parser<'f, B, S> { p1.and(p2).and(p3).map(|((_, m), _)| m) } pub trait SequentialExt<'f, A: 'f, S>: Parsable<Stream = S, Result = A> { fn and<B: 'f>(self, p: impl Parsable<Stream = S, Result = B> + 'f) -> Parser<'f, (A, B), S> where S: Clone, Self: Sized + 'f, { and(self, p) } fn left<B: 'f>(self, p: impl Parsable<Stream = S, Result = B> + 'f) -> Parser<'f, A, S> where S: Clone + 'f, Self: Sized + 'f, { left(self, p) } fn right<B: 'f>(self, p: impl Parsable<Stream = S, Result = B> + 'f) -> Parser<'f, B, S> where S: Clone + 'f, Self: Sized + 'f, { right(self, p) } fn mid<B: 'f, C: 'f>( self, p1: impl Parsable<Stream = S, Result = B> + 'f, p2: impl Parsable<Stream = S, Result = C> + 'f, ) -> Parser<'f, B, S> where S: Clone + 'f, Self: Sized + 'f, { mid(self, p1, p2) } } impl<'f, A: 'f, S, P: Parsable<Stream = S, Result = A>> SequentialExt<'f, A, S> for P {} #[cfg(test)] mod test_sequential { use crate::combinators::*; use crate::core::Parsable; use crate::primitives::{char, satisfy, CharStream}; #[test] fn different_type_ok() { let parser = satisfy(|&ch| ch.is_digit(10)) .map_option(|ch| ch.to_digit(10)) .and(char('A')); let mut st = CharStream::new("1A+"); let (res, logs) = parser.exec(&mut st); assert_eq!(Some((1, 'A')), res); assert_eq!("+", st.as_str()); assert_eq!(0, logs.len()); } #[test] fn left_fail() { let parser = char('A').and(char('B')); let mut st = CharStream::new("BBC"); let (res, logs) = parser.exec(&mut st); assert_eq!(None, res); assert_eq!("BBC", st.as_str()); assert_eq!(1, logs.len()); } #[test] fn right_fail() { let parser = char('A').and(char('B')); let mut st = CharStream::new("ACC"); let (res, logs) = parser.exec(&mut st); assert_eq!(None, res); assert_eq!("ACC", st.as_str()); assert_eq!(1, logs.len()); } #[test] fn both_fail() { let parser = char('A').and(char('B')); let mut st = CharStream::new("CCC"); let (res, logs) = parser.exec(&mut st); assert_eq!(None, res); assert_eq!("CCC", st.as_str()); assert_eq!(1, logs.len()); } }
use crate::combinators::FunctorExt; use crate::core::{return_none, Parsable, Parser};
pub fn left<'f, A: 'f, B: 'f, S: Clone + 'f>( p1: impl Parsable<Stream = S, Result = A> + 'f, p2: impl Parsable<Stream = S, Result = B> + 'f, ) -> Parser<'f, A, S> { p1.and(p2).map(|(l, _)| l) } pub fn right<'f, A: 'f, B: 'f, S: Clone + 'f>( p1: impl Parsable<Stream = S, Result = A> + 'f, p2: impl Parsable<Stream = S, Result = B> + 'f, ) -> Parser<'f, B, S> { p1.and(p2).map(|(_, r)| r) } pub fn mid<'f, A: 'f, B: 'f, C: 'f, S: Clone + 'f>( p1: impl Parsable<Stream = S, Result = A> + 'f, p2: impl Parsable<Stream = S, Result = B> + 'f, p3: impl Parsable<Stream = S, Result = C> + 'f, ) -> Parser<'f, B, S> { p1.and(p2).and(p3).map(|((_, m), _)| m) } pub trait SequentialExt<'f, A: 'f, S>: Parsable<Stream = S, Result = A> { fn and<B: 'f>(self, p: impl Parsable<Stream = S, Result = B> + 'f) -> Parser<'f, (A, B), S> where S: Clone, Self: Sized + 'f, { and(self, p) } fn left<B: 'f>(self, p: impl Parsable<Stream = S, Result = B> + 'f) -> Parser<'f, A, S> where S: Clone + 'f, Self: Sized + 'f, { left(self, p) } fn right<B: 'f>(self, p: impl Parsable<Stream = S, Result = B> + 'f) -> Parser<'f, B, S> where S: Clone + 'f, Self: Sized + 'f, { right(self, p) } fn mid<B: 'f, C: 'f>( self, p1: impl Parsable<Stream = S, Result = B> + 'f, p2: impl Parsable<Stream = S, Result = C> + 'f, ) -> Parser<'f, B, S> where S: Clone + 'f, Self: Sized + 'f, { mid(self, p1, p2) } } impl<'f, A: 'f, S, P: Parsable<Stream = S, Result = A>> SequentialExt<'f, A, S> for P {} #[cfg(test)] mod test_sequential { use crate::combinators::*; use crate::core::Parsable; use crate::primitives::{char, satisfy, CharStream}; #[test] fn different_type_ok() { let parser = satisfy(|&ch| ch.is_digit(10)) .map_option(|ch| ch.to_digit(10)) .and(char('A')); let mut st = CharStream::new("1A+"); let (res, logs) = parser.exec(&mut st); assert_eq!(Some((1, 'A')), res); assert_eq!("+", st.as_str()); assert_eq!(0, logs.len()); } #[test] fn left_fail() { let parser = char('A').and(char('B')); let mut st = CharStream::new("BBC"); let (res, logs) = parser.exec(&mut st); assert_eq!(None, res); assert_eq!("BBC", st.as_str()); assert_eq!(1, logs.len()); } #[test] fn right_fail() { let parser = char('A').and(char('B')); let mut st = CharStream::new("ACC"); let (res, logs) = parser.exec(&mut st); assert_eq!(None, res); assert_eq!("ACC", st.as_str()); assert_eq!(1, logs.len()); } #[test] fn both_fail() { let parser = char('A').and(char('B')); let mut st = CharStream::new("CCC"); let (res, logs) = parser.exec(&mut st); assert_eq!(None, res); assert_eq!("CCC", st.as_str()); assert_eq!(1, logs.len()); } }
pub fn and<'f, A: 'f, B: 'f, S: Clone>( p1: impl Parsable<Stream = S, Result = A> + 'f, p2: impl Parsable<Stream = S, Result = B> + 'f, ) -> Parser<'f, (A, B), S> { Parser::new(move |stream: &mut S, logger| { let st = stream.clone(); p1.parse(stream, logger) .and_then(|x| p2.parse(stream, logger).map(|y| (x, y))) .or_else(|| return_none(stream, &st)) }) }
function_block-full_function
[ { "content": "/// # `Parsable` trait\n\n/// Anything that is parsable should implement `Parsable` trait,\n\n/// The return types of all the combinators and combinators in this library\n\n/// Implement `Parsable` trait, meaning you can treat them as parsers\n\n/// and call `parse()` or `exec()` from them to parse given input.\n\npub trait Parsable {\n\n type Stream;\n\n type Result;\n\n\n\n /// Parse function\n\n fn parse(&self, stream: &mut Self::Stream, logger: &mut ParseLogger) -> Option<Self::Result>;\n\n\n\n /// Wrapper for parse function\n\n fn exec(&self, stream: &mut Self::Stream) -> (Option<Self::Result>, ParseLogger) {\n\n let mut logger = ParseLogger::default();\n\n (self.parse(stream, &mut logger), logger)\n\n }\n\n\n\n /// Convert into a Parser\n\n fn into_parser<'f>(self) -> Parser<'f, Self::Result, Self::Stream>\n\n where\n\n Self: Sized + 'f,\n\n {\n\n Parser::new(move |stream: &mut Self::Stream, logger| self.parse(stream, logger))\n\n }\n", "file_path": "src/core/parser.rs", "rank": 0, "score": 49528.57333004632 }, { "content": "type FixFn<'f, A, S> = dyn Fn(Parser<'f, A, S>) -> Parser<'f, A, S> + 'f;\n\npub struct Fix<'f, A, S>(Rc<FixFn<'f, A, S>>);\n\n\n\nimpl<'f, A, S> Clone for Fix<'f, A, S> {\n\n fn clone(&self) -> Self {\n\n Fix(self.0.clone())\n\n }\n\n}\n\n\n\nimpl<'f, A: 'f, S: 'f> Parsable for Fix<'f, A, S> {\n\n type Stream = S;\n\n type Result = A;\n\n fn parse(&self, stream: &mut S, logger: &mut ParseLogger) -> Option<A> {\n\n //! fix f = f (fix f)\n\n (self.0)(self.clone().into_parser()).parse(stream, logger)\n\n }\n\n}\n\n\n", "file_path": "src/combinators/fix.rs", "rank": 1, "score": 44755.101335474996 }, { "content": "/// uint := digit { digit }\n\nfn uint<'f>() -> Parser<'f, String, CharStream<'f>> {\n\n digit.some().map(|v| v.iter().collect::<String>())\n\n}\n\n\n", "file_path": "tests/calculator.rs", "rank": 2, "score": 39293.48089895709 }, { "content": "/// digit := '0' | '1' | ... | '9'\n\nfn digit<'f>() -> Parser<'f, char, CharStream<'f>> {\n\n satisfy(|&ch| ch.is_digit(10))\n\n}\n\n\n", "file_path": "tests/calculator.rs", "rank": 3, "score": 39293.48089895709 }, { "content": "/// expr := term {('+'|'-') term}\n\nfn expr<'f>() -> Parser<'f, f64, CharStream<'f>> {\n\n term.and(char('+').or(char('-')).and(term).many())\n\n .trim()\n\n .map(|(v0, r)| {\n\n r.iter().fold(v0, |acc, (op, v)| match op {\n\n '+' => acc + v,\n\n _ => acc - v,\n\n })\n\n })\n\n}\n\n\n", "file_path": "tests/calculator.rs", "rank": 4, "score": 39293.48089895709 }, { "content": "/// term := factor {('*'|'/') factor}\n\nfn term<'f>() -> Parser<'f, f64, CharStream<'f>> {\n\n factor\n\n .and(char('*').or(char('/')).and(factor).many())\n\n .trim()\n\n .map(|(v0, r)| {\n\n r.iter().fold(v0, |acc, (op, v)| match op {\n\n '*' => acc * v,\n\n _ => acc / v,\n\n })\n\n })\n\n}\n\n\n", "file_path": "tests/calculator.rs", "rank": 5, "score": 39293.48089895709 }, { "content": "/// float := uint ['.' uint]\n\nfn float<'f>() -> Parser<'f, f64, CharStream<'f>> {\n\n uint.and(char('.').and(uint).optional())\n\n .map_result(|(s, r)| {\n\n let mut res = s;\n\n if let Some((dot, frac)) = r {\n\n res.push(dot);\n\n res.push_str(&frac[..])\n\n }\n\n res.parse::<f64>()\n\n })\n\n}\n\n\n", "file_path": "tests/calculator.rs", "rank": 6, "score": 39293.48089895709 }, { "content": "/// factor := '(' expr ')' | float\n\nfn factor<'f>() -> Parser<'f, f64, CharStream<'f>> {\n\n mid(char('('), expr, char(')')).or(float).trim()\n\n}\n\n\n", "file_path": "tests/calculator.rs", "rank": 7, "score": 39293.48089895709 }, { "content": "/// # Combinator: `space`\n\n///\n\n/// Consume a single whitespace character (` `, `\\n`, `\\r` or `\\t`).\n\n/// Equivalant to `char(' ').or(char('\\n')).or(char('\\r')).or(char('\\t'))`.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{CharStream, space};\n\n///\n\n/// // Consume a whitespace character\n\n/// let parser = space();\n\n///\n\n/// let mut st = CharStream::new(\" Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(' '), res);\n\n/// assert_eq!(\"Hello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn space<'f>() -> Parser<'f, char, CharStream<'f>> {\n\n char(' ').or(char('\\n')).or(char('\\r')).or(char('\\t'))\n\n}\n\n\n", "file_path": "src/primitives/combinators.rs", "rank": 8, "score": 36703.846419548005 }, { "content": "/// # Combinator: `empty`\n\n///\n\n/// A parser that consumes no item and always fails.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::*;\n\n/// use parsic::primitives::CharStream;\n\n///\n\n/// let parser = empty::<char, CharStream>();\n\n///\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(None, res);\n\n/// assert_eq!(\"Hello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn empty<'f, A: 'f, S: 'f>() -> Parser<'f, A, S> {\n\n Parser::new(move |_: &mut S, _| None)\n\n}\n\n\n", "file_path": "src/combinators/alternative.rs", "rank": 9, "score": 35587.6490378414 }, { "content": "/// # Combinator: `literal`\n\n///\n\n/// Consume given literal string from the parse stream.test\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{CharStream, literal};\n\n///\n\n/// // Consume literal string \"Hello\"\n\n/// let parser = literal(\"Hello\");\n\n///\n\n/// let mut st = CharStream::new(\"Hello!\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(\"Hello\"), res);\n\n/// assert_eq!(\"!\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn literal<'f>(s: &'f str) -> Parser<'f, &'f str, CharStream> {\n\n Parser::new(move |stream: &mut CharStream<'f>, logger| {\n\n if stream.as_str().starts_with(s) {\n\n let ret = &stream.as_str()[0..s.len()];\n\n stream.take(s.len()).for_each(|_| {});\n\n Some(ret)\n\n } else {\n\n logger.with(Msg::Error(MsgBody::new(\n\n &format!(\"expecting \\\"{}\\\".\", s)[..],\n\n Some(stream.pos()),\n\n )));\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/primitives/combinators.rs", "rank": 10, "score": 33925.36160817742 }, { "content": "/// # Combinator: `char`\n\n///\n\n/// Consume the given char from the parse stream.\n\n/// `char(x)` is equivalent to `satisfy(|x: &char| *x == ch)`\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// // Consume a single character 'H'\n\n/// let parser = char('H');\n\n///\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some('H'), res);\n\n/// assert_eq!(\"ello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn char<'f>(ch: char) -> Parser<'f, char, CharStream<'f>> {\n\n satisfy(move |x| *x == ch)\n\n}\n\n\n", "file_path": "src/primitives/combinators.rs", "rank": 11, "score": 33790.06604527538 }, { "content": "/// # Combinator: `regex`\n\n///\n\n/// Consume a literal string that matches given regular expression.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{CharStream, regex};\n\n///\n\n/// // Consume a date string\n\n/// let parser = regex(r\"^\\d{2}/\\d{2}/\\d{4}\");\n\n///\n\n/// let mut st = CharStream::new(\"10/30/2020!\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(\"10/30/2020\"), res);\n\n/// assert_eq!(\"!\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn regex<'f>(re: &'f str) -> Parser<'f, &'f str, CharStream> {\n\n Parser::new(move |stream: &mut CharStream<'f>, logger| {\n\n let regex = regex::Regex::new(re).unwrap();\n\n let s = stream.as_str();\n\n match regex.find(s) {\n\n Some(m) if m.start() == 0 => {\n\n stream.take(m.end()).for_each(|_| {});\n\n Some(&s[0..m.end()])\n\n }\n\n _ => {\n\n logger.with(Msg::Error(MsgBody::new(\n\n &format!(\"expecting \\\"{}\\\".\", regex.as_str())[..],\n\n Some(stream.pos()),\n\n )));\n\n None\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/primitives/combinators.rs", "rank": 12, "score": 33186.90378480544 }, { "content": "/// # Combinator: `pure`\n\n///\n\n/// Injects a value into an identity parser.\n\n///\n\n/// # Examples\n\n/// ## Injects a value\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::*;\n\n/// use parsic::primitives::CharStream;\n\n/// let parser = pure(true);\n\n///\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(true), res);\n\n/// assert_eq!(\"Hello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n///\n\n/// ```\n\n/// ## Injects a function\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::*;\n\n/// use parsic::primitives::CharStream;\n\n///\n\n/// let parser = pure(|_| true);\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(true, res.unwrap()(1));\n\n/// assert_eq!(\"Hello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn pure<'f, A: Clone + 'f, S: 'f>(x: A) -> Parser<'f, A, S> {\n\n Parser::new(move |_, _| Some(x.clone()))\n\n}\n\n\n", "file_path": "src/combinators/applicative.rs", "rank": 13, "score": 33177.36644675242 }, { "content": "/// # Combinator: `fix`\n\n///\n\n/// In Rust, closures are anonymous functions, so there is no name for us to call\n\n/// when we want to make it recursive. Therefore, a Y-Combinator, or\n\n/// [fixed-point combinator](https://en.wikipedia.org/wiki/Fixed-point_combinator) `fix`\n\n/// is introduced to address this issue, making it possible to write parsers that\n\n/// support recursive syntax using closures.\n\n///\n\n/// # Property\n\n/// ```plain\n\n/// fix f = f (fix f)\n\n/// ```\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// // expr := '1' expr | '0'\n\n/// let parser = fix(|parser| char('1').right(parser).or(char('0')));\n\n///\n\n/// let mut st = CharStream::new(\"1110\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some('0'), res);\n\n/// assert_eq!(\"\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn fix<'f, A: 'f, F, S: 'f>(fix: F) -> Parser<'f, A, S>\n\nwhere\n\n F: Fn(Parser<'f, A, S>) -> Parser<'f, A, S> + 'f,\n\n{\n\n Fix(Rc::new(fix)).into_parser()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::combinators::*;\n\n use crate::core::Parsable;\n\n use crate::primitives::{char, satisfy, CharStream};\n\n\n\n #[test]\n\n fn mutual_recursive_grammar() {\n\n // expr := term '+' expr | term\n\n // term := factor '*' term | factor\n\n // factor := '(' expr ')' | uint\n\n // uint := digit { digit }\n\n // digit := '0' | '1' | ... | '9'\n", "file_path": "src/combinators/fix.rs", "rank": 14, "score": 31641.514131655724 }, { "content": "/// # Combinator: `satisfy`\n\n///\n\n/// Consume a single character if given function applied\n\n/// to the next character from the parse stream yields `true`.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{CharStream, satisfy};\n\n///\n\n/// // Consume a uppercase letter\n\n/// let parser = satisfy(|&ch| ch.is_uppercase());\n\n///\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some('H'), res);\n\n/// assert_eq!(\"ello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn satisfy<'f>(f: impl Fn(&char) -> bool + 'f) -> Parser<'f, char, CharStream<'f>> {\n\n Parser::new(move |stream: &mut CharStream<'f>, logger| {\n\n let st = stream.clone();\n\n match stream.next() {\n\n Some(ch) if f(&ch) => Some(ch),\n\n Some(ch) => {\n\n *stream = st;\n\n logger.with(Msg::Error(MsgBody::new(\n\n &format!(\"'{}' does not satisfy required conditions.\", ch)[..],\n\n Some(stream.pos()),\n\n )));\n\n None\n\n }\n\n None => {\n\n logger.with(Msg::Error(MsgBody::new(\n\n \"unexpected end of input.\",\n\n Some(stream.pos()),\n\n )));\n\n None\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/primitives/combinators.rs", "rank": 15, "score": 30317.155412000935 }, { "content": "use crate::core::logger::*;\n\nuse std::rc::Rc;\n\n\n\n/// # `Parser` struct\n\n/// Wraps the parser function.\n\n#[derive(Clone)]\n\npub struct Parser<'f, A, S>(Rc<dyn Fn(&mut S, &mut ParseLogger) -> Option<A> + 'f>);\n\n\n\nimpl<'f, A: 'f, S> Parser<'f, A, S> {\n\n pub fn new<F>(f: F) -> Self\n\n where\n\n F: Fn(&mut S, &mut ParseLogger) -> Option<A> + 'f,\n\n {\n\n Self(Rc::new(f))\n\n }\n\n}\n\n\n\n/// # `Parsable` trait\n\n/// Anything that is parsable should implement `Parsable` trait,\n\n/// The return types of all the combinators and combinators in this library\n\n/// Implement `Parsable` trait, meaning you can treat them as parsers\n\n/// and call `parse()` or `exec()` from them to parse given input.\n", "file_path": "src/core/parser.rs", "rank": 16, "score": 27215.54608513376 }, { "content": "}\n\n\n\nimpl<'f, A: 'f, S> Parsable for Parser<'f, A, S> {\n\n type Stream = S;\n\n type Result = A;\n\n fn parse(&self, stream: &mut Self::Stream, logger: &mut ParseLogger) -> Option<Self::Result> {\n\n (*self).0(stream, logger)\n\n }\n\n}\n\n\n\nimpl<P: Parsable> Parsable for Rc<P> {\n\n type Stream = P::Stream;\n\n type Result = P::Result;\n\n fn parse(&self, stream: &mut Self::Stream, logger: &mut ParseLogger) -> Option<Self::Result> {\n\n (**self).parse(stream, logger)\n\n }\n\n}\n\n\n\nimpl<F, P: Parsable> Parsable for F\n\nwhere\n\n F: Fn() -> P,\n\n{\n\n type Stream = P::Stream;\n\n type Result = P::Result;\n\n fn parse(&self, stream: &mut Self::Stream, logger: &mut ParseLogger) -> Option<Self::Result> {\n\n (*self)().parse(stream, logger)\n\n }\n\n}\n", "file_path": "src/core/parser.rs", "rank": 17, "score": 27211.981262411413 }, { "content": "/// Another equivlent implementation using closures\n\nfn expr_<'s>() -> impl Parsable<Stream = CharStream<'s>, Result = f64> {\n\n fix(|expr| {\n\n // digit := '0' | '1' | ... | '9'\n\n let digit = satisfy(|&ch| ch.is_digit(10));\n\n // uint := digit { digit }\n\n let uint = digit.some().map(|v| v.iter().collect::<String>());\n\n // float := uint ['.' uint]\n\n let float = uint\n\n .clone()\n\n .and(char('.').and(uint).optional())\n\n .map_result(|(s, r)| {\n\n let mut res = s;\n\n if let Some((dot, frac)) = r {\n\n res.push(dot);\n\n res.push_str(&frac[..])\n\n }\n\n res.parse::<f64>()\n\n });\n\n // factor := '(' expr ')' | float\n\n let factor = mid(char('('), expr.clone(), char(')')).or(float).trim();\n", "file_path": "tests/calculator.rs", "rank": 18, "score": 20108.316271760283 }, { "content": "/// Implement `trim` method for `Parsable<CharStream>`:\n\npub trait PrimitiveExt<'f, A: 'f>: Parsable<Stream = CharStream<'f>, Result = A> {\n\n /// # Combinator: `trim`\n\n ///\n\n /// Consume as many whitespace characters (` `, `\\n`, `\\r` or `\\t`)\n\n /// as possible surrounding given parser. `trim(p)` is equivalant to\n\n /// `mid(space().many(), p, space().many())`.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use parsic::core::Parsable;\n\n /// use parsic::primitives::{CharStream, literal, PrimitiveExt, trim};\n\n ///\n\n /// // Consume a whitespace character\n\n /// let parser = literal(\"Hello\").trim();\n\n ///\n\n /// let mut st = CharStream::new(\" Hello \");\n\n /// let (res, logs) = parser.exec(&mut st);\n\n ///\n\n /// assert_eq!(Some(\"Hello\"), res);\n\n /// assert_eq!(\"\", st.as_str());\n", "file_path": "src/primitives/combinators.rs", "rank": 19, "score": 19359.935275430027 }, { "content": "/// Implement error related combinators for `Parsable<S>`.\n\npub trait LogExt<'f, A: 'f, S>: Parsable<Stream = S, Result = A> {\n\n /// # Combinator: `info`\n\n fn info(self, msg: &'f str) -> Parser<'f, A, S>\n\n where\n\n Self: Sized + 'f,\n\n {\n\n info(self, msg)\n\n }\n\n\n\n /// # Combinator: `warn`\n\n fn warn(self, msg: &'f str) -> Parser<'f, A, S>\n\n where\n\n Self: Sized + 'f,\n\n {\n\n warn(self, msg)\n\n }\n\n\n\n /// # Combinator: `error`\n\n fn error(self, msg: &'f str) -> Parser<'f, A, S>\n\n where\n", "file_path": "src/combinators/error.rs", "rank": 20, "score": 19359.935275430027 }, { "content": "pub trait FunctorExt<'f, A: 'f, S>: Parsable<Stream = S, Result = A> {\n\n /// # Combinator: `map`\n\n ///\n\n /// Maps the result of current parser to another value.\n\n ///\n\n /// # Properties\n\n ///\n\n /// Should satisfy [Functor laws](https://wiki.haskell.org/Typeclassopedia#Laws):\n\n ///\n\n /// - **Identity**: `p.map(|x| x) ~ p`\n\n /// - **Composition**: `p.map(|x| f(g(x))) ~ p.map(f).map(g)`\n\n ///\n\n /// Check out `test_functor` module for naive examples of above laws.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use parsic::combinators::*;\n\n /// use parsic::core::Parsable;\n\n /// use parsic::primitives::*;\n\n ///\n", "file_path": "src/combinators/functor.rs", "rank": 21, "score": 19357.06505738883 }, { "content": "pub trait MonadExt<'f, A: 'f, S>: Parsable<Stream = S, Result = A> {\n\n /// # Combinator: `bind`\n\n ///\n\n /// Monadic bind operator `(>>=)` for context sensitive parsing.\n\n ///\n\n /// # Properties\n\n ///\n\n /// Should satisfy [Monad laws](https://wiki.haskell.org/Typeclassopedia#Laws_3):\n\n ///\n\n /// - **Left-identity**: `pure(x).bind(f) ~ f(x)`\n\n /// - **Right-identity**: `p.bind(|x| pure(x)) ~ p`\n\n /// - **Associativity**: `p.bind(f).bind(g) ~ p.bind(|x| f(x).bind(g))`\n\n ///\n\n /// Check out `test_monad` module for naive examples of above laws.\n\n ///\n\n /// # Example\n\n ///\n\n /// The code example below parses `expr` with the following grammar:\n\n ///\n\n /// ```\n", "file_path": "src/combinators/monad.rs", "rank": 22, "score": 19357.06505738883 }, { "content": "pub trait AlternativeExt<'f, A: 'f, S>: Parsable<Stream = S, Result = A> {\n\n /// # Combinator: `or`\n\n ///\n\n /// Alternative combinator. Accepts two parsers as arguments,\n\n /// if the first parser succeeds then its result is returned,\n\n /// otherwise the result of the second parser is returned.\n\n ///\n\n /// # Properties\n\n ///\n\n /// Should satisfy [Alternative laws](https://wiki.haskell.org/Typeclassopedia#Laws_6).\n\n ///\n\n /// Instances of `Parser` and `or` forms a monoid:\n\n /// \n\n /// - **Left identity**: `empty().or(p) ~ p`\n\n /// - **Right identity**: `p.or(empty()) ~ p`\n\n /// - **Associative**: `px.or(py).or(pz) ~ px.or(py.or(pz))`\n\n ///\n\n /// Following properties exist when `empty` and `or` interacts with `pure` and `compose`:\n\n ///\n\n /// - **Left zero**: `empty().compose(x) ~ empty()`\n", "file_path": "src/combinators/alternative.rs", "rank": 23, "score": 19357.06505738883 }, { "content": "pub trait ReplicativeExt<'f, A: 'f, S>: Parsable<Stream = S, Result = A> {\n\n /// # Combinator: `many`\n\n ///\n\n /// Apply given parser as many times as possible (zero or more times),\n\n /// and returns a vector `Vec<T>` containg all the parse results. The\n\n /// combinator always succeeds.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use parsic::combinators::*;\n\n /// use parsic::core::Parsable;\n\n /// use parsic::primitives::{char, CharStream};\n\n ///\n\n /// // Consume character 't' zero or more times\n\n /// let parser = char('t').many();\n\n ///\n\n /// let mut st1 = CharStream::new(\"tttql\");\n\n /// let mut st2 = CharStream::new(\"ql\");\n\n /// let (res1, logs1) = parser.exec(&mut st1);\n\n /// let (res2, logs2) = parser.exec(&mut st2);\n", "file_path": "src/combinators/replicative.rs", "rank": 25, "score": 19357.06505738883 }, { "content": "pub trait ApplicativeExt<'f, F: 'f, S>: Parsable<Stream = S, Result = F> {\n\n /// # Combinator: `compose`\n\n ///\n\n /// Functional composition between parsers.\n\n ///\n\n /// # Properties\n\n ///\n\n /// Should satisfy [Applicative functor laws](https://wiki.haskell.org/Typeclassopedia#Laws_2):\n\n ///\n\n /// - **Identity**: `pure(id).compose(p) ~ p`\n\n /// - **Homomorphism**: `pure(f).compose(pure(g)) ~ pure(|x| f(g(x)))`\n\n /// - **Interchange**: `pf.compose(pure(x)) ~ pure(|f| f(x)).compose(pf)`\n\n /// - **Composition**: `pf.compose(pg.compose(px)) ~ pure(|f| |g| |x| f(g(x))).compose(px)`\n\n ///\n\n /// Check out `test_applicative` module for naive examples of above laws.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use parsic::combinators::*;\n\n /// use parsic::core::Parsable;\n", "file_path": "src/combinators/applicative.rs", "rank": 26, "score": 18015.92891022563 }, { "content": "use crate::core::{Parsable, ParseLogger, Parser};\n\nuse std::rc::Rc;\n\n\n", "file_path": "src/combinators/fix.rs", "rank": 27, "score": 8.376871193604854 }, { "content": "use crate::combinators::*;\n\nuse crate::core::{Parsable, Parser};\n\nuse std::ops::{BitAnd, BitOr, Mul, Shl, Shr};\n\n\n\n/// # Overload Shl `<<` to `left` combinator\n\n///\n\n/// `p1 << p2` ~ `p1.left(p2)`\n\n///\n\n/// ## Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::{Parsable, Parser};\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// let parser = char('A') >> char('B');\n\n///\n\n/// let mut st = CharStream::new(\"ABC\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some('B'), res);\n", "file_path": "src/core/ops.rs", "rank": 28, "score": 8.083360501175855 }, { "content": "use crate::core::{Msg, MsgBody, Parsable, Parser};\n\n\n\n/// # Combinator: `info` (function ver.)\n", "file_path": "src/combinators/error.rs", "rank": 29, "score": 7.659316860443403 }, { "content": " Self: Sized + 'f,\n\n{\n\n type Output = Parser<'f, (A, B), S>;\n\n\n\n fn bitand(self, rhs: P) -> Self::Output {\n\n and(self, rhs)\n\n }\n\n}\n\n\n\n/// # Overload operator `*` to `compose` combinator\n\n///\n\n/// `p1 * p2` ~ `p1.compose(p2)`\n\n///\n\n/// ## Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::{Parsable, Parser};\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// let parser = pure(|x| x == 'A') * char('A');\n", "file_path": "src/core/ops.rs", "rank": 31, "score": 7.491871387151319 }, { "content": "///\n\n/// ## Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::{Parsable, Parser};\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// let parser = char('A') & char('B');\n\n///\n\n/// let mut st = CharStream::new(\"ABC\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(('A', 'B')), res);\n\n/// assert_eq!(\"C\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\nimpl<'f, A: 'f, B: 'f, S, P> BitAnd<P> for Parser<'f, A, S>\n\nwhere\n\n P: Parsable<Stream = S, Result = B> + 'f,\n\n S: Clone + 'f,\n", "file_path": "src/core/ops.rs", "rank": 32, "score": 7.477816943378551 }, { "content": "/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::{Parsable, Parser};\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// let parser = char('A') << char('B');\n\n///\n\n/// let mut st = CharStream::new(\"ABC\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some('A'), res);\n\n/// assert_eq!(\"C\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\nimpl<'f, A: 'f, B: 'f, S, P> Shr<P> for Parser<'f, A, S>\n\nwhere\n\n P: Parsable<Stream = S, Result = B> + 'f,\n\n S: Clone + 'f,\n\n Self: Sized + 'f,\n\n{\n", "file_path": "src/core/ops.rs", "rank": 33, "score": 7.477816943378551 }, { "content": "use crate::core::{return_none, Msg, MsgBody, Parsable, Parser};\n\n\n\n/// # Combinator: `map` (function ver.)\n\n///\n\n/// Maps the result of a parser to another value using the given function.\n\n///\n\n/// # Properties\n\n///\n\n/// Should satisfy [Functor laws](https://wiki.haskell.org/Typeclassopedia#Laws):\n\n///\n\n/// - **Identity**: `map(p, |x| x) ~ p`\n\n/// - **Composition**: `map(p, |x| f(g(x))) ~ map(map(p, f), g)`\n\n///\n\n/// Check out `test_functor` module for naive examples of above laws.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{char, CharStream};\n", "file_path": "src/combinators/functor.rs", "rank": 34, "score": 7.4073068994692495 }, { "content": " /// assert_eq!(0, logs.len());\n\n /// ```\n\n fn trim(self) -> Parser<'f, A, CharStream<'f>>\n\n where\n\n Self: Sized + 'f,\n\n {\n\n trim(self)\n\n }\n\n}\n\n\n\nimpl<'f, A: 'f, P: Parsable<Stream = CharStream<'f>, Result = A>> PrimitiveExt<'f, A> for P {}\n\n\n\n#[cfg(test)]\n\nmod test_char {\n\n use crate::core::Parsable;\n\n use crate::primitives::{char, CharStream};\n\n\n\n #[test]\n\n fn fail_with_grace() {\n\n let parser = char('h');\n", "file_path": "src/primitives/combinators.rs", "rank": 35, "score": 7.345385488194791 }, { "content": "use crate::core::{return_none, Parsable, Parser};\n\n\n\n/// # Combinator: `pure`\n\n///\n\n/// Injects a value into an identity parser.\n\n///\n\n/// # Examples\n\n/// ## Injects a value\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::*;\n\n/// use parsic::primitives::CharStream;\n\n/// let parser = pure(true);\n\n///\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(true), res);\n\n/// assert_eq!(\"Hello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n", "file_path": "src/combinators/applicative.rs", "rank": 36, "score": 7.280238225052766 }, { "content": "use crate::combinators::{map, pure};\n\nuse crate::core::{return_none, Parsable, Parser};\n\n\n\n/// # Combinator: `empty`\n\n///\n\n/// A parser that consumes no item and always fails.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::*;\n\n/// use parsic::primitives::CharStream;\n\n///\n\n/// let parser = empty::<char, CharStream>();\n\n///\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(None, res);\n\n/// assert_eq!(\"Hello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n", "file_path": "src/combinators/alternative.rs", "rank": 37, "score": 7.236376466550562 }, { "content": " type Output = Parser<'f, B, S>;\n\n fn shr(self, rhs: P) -> Self::Output {\n\n right(self, rhs)\n\n }\n\n}\n\n\n\n/// # Overload operator `|` to `or` combinator\n\n///\n\n/// `p1 ^ p2` ~ `p1.or(p2)`\n\n///\n\n/// ## Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::{Parsable, Parser};\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// let parser = char('A') | char('B');\n\n///\n\n/// let mut st = CharStream::new(\"ABC\");\n\n/// let (res, logs) = parser.exec(&mut st);\n", "file_path": "src/core/ops.rs", "rank": 38, "score": 7.234799637269704 }, { "content": "use crate::combinators::*;\n\nuse crate::core::{Msg, MsgBody, Parsable, Parser};\n\nuse crate::primitives::CharStream;\n\n\n\n/// # Combinator: `satisfy`\n\n///\n\n/// Consume a single character if given function applied\n\n/// to the next character from the parse stream yields `true`.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{CharStream, satisfy};\n\n///\n\n/// // Consume a uppercase letter\n\n/// let parser = satisfy(|&ch| ch.is_uppercase());\n\n///\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some('H'), res);\n\n/// assert_eq!(\"ello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n", "file_path": "src/primitives/combinators.rs", "rank": 40, "score": 7.171536576534107 }, { "content": " }\n\n}\n\n\n\nimpl<'f, A: 'f, S, P: Parsable<Stream = S, Result = A>> FunctorExt<'f, A, S> for P {}\n\n\n\n#[cfg(test)]\n\nmod test_functor {\n\n use crate::combinators::*;\n\n use crate::core::Parsable;\n\n use crate::primitives::*;\n\n\n\n #[test]\n\n fn map_fail_with_grace() {\n\n let parser = char('-').and(char('1')).map(|(_, x)| x);\n\n\n\n let mut st = CharStream::new(\"+1\");\n\n let (res, logs) = parser.exec(&mut st);\n\n\n\n assert_eq!(None, res);\n\n assert_eq!(\"+1\", st.as_str());\n", "file_path": "src/combinators/functor.rs", "rank": 41, "score": 7.131227167088473 }, { "content": " compose(self, px)\n\n }\n\n}\n\n\n\nimpl<'f, A: 'f, S, P: Parsable<Stream = S, Result = A>> ApplicativeExt<'f, A, S> for P {}\n\n\n\n#[cfg(test)]\n\nmod test_applicative {\n\n use crate::combinators::*;\n\n use crate::core::Parsable;\n\n use crate::primitives::{char, CharStream};\n\n\n\n #[test]\n\n fn fail_with_grace() {\n\n let parser = pure(|x| x == 'H').compose(char('h'));\n\n\n\n let mut st = CharStream::new(\"Hello\");\n\n let (res, logs) = parser.exec(&mut st);\n\n\n\n assert_eq!(None, res);\n", "file_path": "src/combinators/applicative.rs", "rank": 42, "score": 7.040293772676201 }, { "content": " P: Parsable<Stream = S, Result = B>,\n\n S: Clone,\n\n Self: Sized + 'f,\n\n {\n\n bind(self, f)\n\n }\n\n}\n\n\n\nimpl<'f, A: 'f, S, P> MonadExt<'f, A, S> for P where P: Parsable<Stream = S, Result = A> {}\n\n\n\n#[cfg(test)]\n\nmod test_monad {\n\n use crate::combinators::*;\n\n use crate::core::Parsable;\n\n use crate::primitives::{char, satisfy, CharStream};\n\n\n\n #[test]\n\n fn fail_with_grace() {\n\n let parser = satisfy(|_| true).bind(|ch| {\n\n if ch.is_uppercase() {\n", "file_path": "src/combinators/monad.rs", "rank": 44, "score": 6.902016732033392 }, { "content": "use crate::core::{Parsable, Parser};\n\n\n\n/// # Combinator: `many` (function ver.)\n\n///\n\n/// Apply given parser as many times as possible (**zero** or more times),\n\n/// and returns a vector `Vec<T>` containg all the parse results. The\n\n/// combinator always succeeds.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// // Consume character 't' zero or more times\n\n/// let parser = many(char('t'));\n\n///\n\n/// let mut st1 = CharStream::new(\"tttql\");\n\n/// let mut st2 = CharStream::new(\"ql\");\n\n/// let (res1, logs1) = parser.exec(&mut st1);\n\n/// let (res2, logs2) = parser.exec(&mut st2);\n\n///\n\n/// assert_eq!(Some(vec!['t', 't', 't']), res1);\n\n/// assert_eq!(Some(vec![]), res2);\n\n/// assert_eq!((\"ql\", \"ql\"), (st1.as_str(), st2.as_str()));\n\n/// assert_eq!((0, 0), (logs1.len(), logs2.len()));\n\n/// ```\n", "file_path": "src/combinators/replicative.rs", "rank": 46, "score": 6.8567633996923885 }, { "content": " /// use parsic::core::Parsable;\n\n /// use parsic::combinators::*;\n\n /// use parsic::primitives::{ CharStream, char, satisfy };\n\n ///\n\n /// // <expr> := <uppercase_letter> '+'\n\n /// // <expr> := <lowercase_letter> '-'\n\n /// let parser = satisfy(|_| true)\n\n /// .bind(|ch| if ch.is_uppercase() {\n\n /// char('+')\n\n /// } else {\n\n /// char('-')\n\n /// });\n\n ///\n\n /// let (res1, _) = parser.exec(&mut CharStream::new(\"A+\"));\n\n /// assert_eq!(Some('+'), res1);\n\n /// let (res2, _) = parser.exec(&mut CharStream::new(\"a-\"));\n\n /// assert_eq!(Some('-'), res2);\n\n /// ```\n\n fn bind<B: 'f, P>(self, f: impl Fn(A) -> P + 'f) -> Parser<'f, B, S>\n\n where\n", "file_path": "src/combinators/monad.rs", "rank": 47, "score": 6.852671952501041 }, { "content": "/// # Test: Calculator\n\n///\n\n/// A simple arithmetic expression evaluator that supports\n\n/// `+`, `-`, `*`, `/` and `()`. It accepts input with\n\n/// whitespaces.\n\n///\n\n/// This test contains two implementations: one is implemented\n\n/// with functions that returns a `Parser`, the other is implemented\n\n/// with closures.\n\n///\n\n/// ```plain\n\n/// expr := term {('+'|'-') term}\n\n/// term := factor {('*'|'/') factor}\n\n/// factor := '(' expr ')' | float\n\n/// float := uint ['.' uint]\n\n/// uint := digit { digit }\n\n/// digit := '0' | '1' | ... | '9'\n\n/// ```\n\nuse parsic::combinators::*;\n\nuse parsic::core::{Parsable, Parser};\n\nuse parsic::primitives::*;\n\n\n\n/// digit := '0' | '1' | ... | '9'\n", "file_path": "tests/calculator.rs", "rank": 50, "score": 6.605888351710631 }, { "content": " /// assert_eq!(\"hhh\", st.as_str());\n\n /// assert_eq!(0, logs.len());\n\n /// ```\n\n fn or(self, p: impl Parsable<Stream = S, Result = A> + 'f) -> Parser<'f, A, S>\n\n where\n\n S: Clone,\n\n Self: Sized + 'f,\n\n {\n\n or(self, p)\n\n }\n\n\n\n /// # Combinator: `optional`\n\n ///\n\n /// Apply given parser **at most one time**. Denote the result\n\n /// of the given parser `p` as `x`, then the result of `p.optional()`\n\n /// would be `Some(x)`.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use parsic::combinators::*;\n", "file_path": "src/combinators/alternative.rs", "rank": 53, "score": 6.514426069492369 }, { "content": " Self: Sized + 'f,\n\n {\n\n optional(self)\n\n }\n\n}\n\n\n\nimpl<'f, A: 'f, S, P: Parsable<Stream = S, Result = A>> AlternativeExt<'f, A, S> for P {}\n\n\n\n#[cfg(test)]\n\nmod test_alternative {\n\n use crate::combinators::*;\n\n use crate::core::*;\n\n use crate::primitives::{char, CharStream};\n\n\n\n #[test]\n\n fn fail_with_grace() {\n\n let parser = char('B').or(char('C'));\n\n\n\n let mut st = CharStream::new(\"Ahhh\");\n\n let (res, logs) = parser.exec(&mut st);\n", "file_path": "src/combinators/alternative.rs", "rank": 55, "score": 6.511326256574652 }, { "content": " /// Maps the result of a parser to another value using the given function that\n\n /// produces an `Option<T>`. The only difference with `map` is that `map_option`\n\n /// will automatically try to unwrap the `Option<T>` and will fail if the result\n\n /// is `None`.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use parsic::combinators::*;\n\n /// use parsic::core::Parsable;\n\n /// use parsic::primitives::{CharStream, satisfy};\n\n ///\n\n /// let parser = satisfy(|_| true).map_option(|ch: char| ch.to_digit(10));\n\n ///\n\n /// let mut st = CharStream::new(\"817\");\n\n /// let (res, logs) = parser.exec(&mut st);\n\n ///\n\n /// assert_eq!(Some(8), res);\n\n /// assert_eq!(\"17\", st.as_str());\n\n /// assert_eq!(0, logs.len());\n\n /// ```\n", "file_path": "src/combinators/functor.rs", "rank": 57, "score": 6.4529794851782345 }, { "content": "use crate::core::{return_none, Parsable, Parser};\n\n\n\n/// # Combinator: `bind` (function ver.)\n\n///\n\n/// Monadic bind operator `(>>=)` for context sensitive parsing.\n\n///\n\n/// # Properties\n\n///\n\n/// Should satisfy [Monad laws](https://wiki.haskell.org/Typeclassopedia#Laws_3):\n\n///\n\n/// - **Left-identity**: `bind(pure(x), f) ~ f(x)`\n\n/// - **Right-identity**: `bind(p, |x| pure(x)) ~ p`\n\n/// - **Associativity**: `bind(bind(p, f), g) ~ bind(p, |x| bind(f(x), g))`\n\n///\n\n/// Check out `test_monad` module for naive examples of above laws.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::core::Parsable;\n\n/// use parsic::combinators::*;\n", "file_path": "src/combinators/monad.rs", "rank": 58, "score": 6.451181846462178 }, { "content": " let (res, logs) = parser.exec(&mut st);\n\n\n\n assert_eq!(None, res);\n\n assert_eq!(\"hello\", st.as_str());\n\n assert_eq!(1, logs.len());\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_literal {\n\n use crate::core::Parsable;\n\n use crate::primitives::{literal, CharStream};\n\n\n\n #[test]\n\n fn fail_with_grace() {\n\n let parser = literal(\"Hello\");\n\n\n\n let mut st = CharStream::new(\"Hell\");\n\n let (res, logs) = parser.exec(&mut st);\n\n\n", "file_path": "src/primitives/combinators.rs", "rank": 59, "score": 6.448614278761314 }, { "content": " /// # Example\n\n /// ```\n\n /// use parsic::combinators::*;\n\n /// use parsic::core::Parsable;\n\n /// use parsic::primitives::{char, CharStream};\n\n ///\n\n /// // Consume character 't' one or more time\n\n /// let parser = char('t').some();\n\n ///\n\n /// let mut st1 = CharStream::new(\"tttql\");\n\n /// let mut st2 = CharStream::new(\"ql\");\n\n /// let (res1, logs1) = parser.exec(&mut st1);\n\n /// let (res2, logs2) = parser.exec(&mut st2);\n\n ///\n\n /// assert_eq!(Some(vec!['t', 't', 't']), res1);\n\n /// assert_eq!(None, res2);\n\n /// assert_eq!((\"ql\", \"ql\"), (st1.as_str(), st2.as_str()));\n\n /// assert_eq!((0, 1), (logs1.len(), logs2.len()));\n\n /// ```\n\n fn some(self) -> Parser<'f, Vec<A>, S>\n", "file_path": "src/combinators/replicative.rs", "rank": 60, "score": 6.44154390217397 }, { "content": "/// # Combinator: `map_option` (function ver.)\n\n///\n\n/// Maps the result of a parser to another value using the given function that\n\n/// produces an `Option<T>`. The only difference with `map` is that `map_option`\n\n/// will automatically try to unwrap the `Option<T>` and will fail if the result\n\n/// is `None`.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{CharStream, satisfy};\n\n///\n\n/// let parser = map_option(satisfy(|_| true), |ch: char| ch.to_digit(10));\n\n///\n\n/// let mut st = CharStream::new(\"817\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(8), res);\n\n/// assert_eq!(\"17\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn map_option<'f, A: 'f, B: 'f, S: Clone>(\n\n p: impl Parsable<Stream = S, Result = A> + 'f,\n\n f: impl Fn(A) -> Option<B> + 'f,\n\n) -> Parser<'f, B, S> {\n\n Parser::new(move |stream: &mut S, logger| {\n\n let st = stream.clone();\n\n p.parse(stream, logger).and_then(|x| f(x)).or_else(|| {\n\n logger.add(Msg::Error(MsgBody::new(\n\n \"map_option recieved a function that yielded None.\",\n\n None,\n\n )));\n\n return_none(stream, &st)\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/combinators/functor.rs", "rank": 62, "score": 6.154486312508423 }, { "content": "/// # Combinator: `optional` (function ver.)\n\n///\n\n/// Apply given parser **at most one time**. Denote the result\n\n/// of the given parser `p` as `x`, then the result of `optional(p)`\n\n/// would be `Some(x)`.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// // Consume character 't' at most one time\n\n/// let parser = char('t').optional();\n\n///\n\n/// let mut st1 = CharStream::new(\"tttql\");\n\n/// let mut st2 = CharStream::new(\"ql\");\n\n/// let (res1, logs1) = parser.exec(&mut st1);\n\n/// let (res2, logs2) = parser.exec(&mut st2);\n\n///\n\n/// assert_eq!(Some(Some('t')), res1);\n\n/// assert_eq!(Some(None), res2);\n\n/// assert_eq!((\"ttql\", \"ql\") ,(st1.as_str(), st2.as_str()));\n\n/// assert_eq!((0, 0), (logs1.len(), logs2.len()));\n\n/// ```\n\npub fn optional<'f, A: Clone + 'f, S: Clone + 'f>(\n\n p: impl Parsable<Stream = S, Result = A> + 'f,\n\n) -> Parser<'f, Option<A>, S> {\n\n or(map(p, Some), pure(None))\n\n}\n\n\n", "file_path": "src/combinators/alternative.rs", "rank": 63, "score": 6.105926300572122 }, { "content": " /// use parsic::core::Parsable;\n\n /// use parsic::primitives::{char, CharStream};\n\n ///\n\n /// // Consume character 't' at most one time\n\n /// let parser = char('t').optional();\n\n ///\n\n /// let mut st1 = CharStream::new(\"tttql\");\n\n /// let mut st2 = CharStream::new(\"ql\");\n\n /// let (res1, logs1) = parser.exec(&mut st1);\n\n /// let (res2, logs2) = parser.exec(&mut st2);\n\n ///\n\n /// assert_eq!(Some(Some('t')), res1);\n\n /// assert_eq!(Some(None), res2);\n\n /// assert_eq!((\"ttql\", \"ql\") ,(st1.as_str(), st2.as_str()));\n\n /// assert_eq!((0, 0), (logs1.len(), logs2.len()));\n\n /// ```\n\n fn optional(self) -> Parser<'f, Option<A>, S>\n\n where\n\n A: Clone,\n\n S: Clone + 'f,\n", "file_path": "src/combinators/alternative.rs", "rank": 64, "score": 6.024235986029266 }, { "content": "\n\n let mut st = CharStream::new(\"Hello\");\n\n let (res, logs) = parser.exec(&mut st);\n\n\n\n assert_eq!(None, res);\n\n assert_eq!(\"Hello\", st.as_str());\n\n assert_eq!(1, logs.len());\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_satisfy {\n\n use crate::core::Parsable;\n\n use crate::primitives::{satisfy, CharStream};\n\n\n\n #[test]\n\n fn fail_with_grace() {\n\n let parser = satisfy(|&ch| ch.is_uppercase());\n\n\n\n let mut st = CharStream::new(\"hello\");\n", "file_path": "src/primitives/combinators.rs", "rank": 65, "score": 5.968584234434731 }, { "content": "/// # Combinator: `trim` (function ver.)\n\n///\n\n/// Consume as many whitespace characters (` `, `\\n`, `\\r` or `\\t`)\n\n/// as possible surrounding given parser. `trim(p)` is equivalant to\n\n/// `mid(space().many(), p, space().many())`.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{CharStream, literal, trim};\n\n///\n\n/// // Consume a whitespace character\n\n/// let parser = trim(literal(\"Hello\"));\n\n///\n\n/// let mut st = CharStream::new(\" Hello \");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(\"Hello\"), res);\n\n/// assert_eq!(\"\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn trim<'f, A: 'f>(\n\n p: impl Parsable<Stream = CharStream<'f>, Result = A> + 'f,\n\n) -> Parser<'f, A, CharStream<'f>> {\n\n mid(space().many(), p, space().many())\n\n}\n\n\n", "file_path": "src/primitives/combinators.rs", "rank": 66, "score": 5.904418452111072 }, { "content": "/// # Combinator: `map` (function ver.)\n\n///\n\n/// Maps the result of a parser to another value using the given function.\n\n///\n\n/// # Properties\n\n///\n\n/// Should satisfy [Functor laws](https://wiki.haskell.org/Typeclassopedia#Laws):\n\n///\n\n/// - **Identity**: `map(p, |x| x) ~ p`\n\n/// - **Composition**: `map(p, |x| f(g(x))) ~ map(map(p, f), g)`\n\n///\n\n/// Check out `test_functor` module for naive examples of above laws.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// let parser = map(or(char('H'), char('W')), |ch: char| ch == 'H');\n\n///\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(true), res);\n\n/// assert_eq!(\"ello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn map<'f, A: 'f, B: 'f, S: Clone>(\n\n p: impl Parsable<Stream = S, Result = A> + 'f,\n\n f: impl Fn(A) -> B + 'f,\n\n) -> Parser<'f, B, S> {\n\n Parser::new(move |stream: &mut S, logger| {\n\n let st = stream.clone();\n\n p.parse(stream, logger)\n\n .map(|x| f(x))\n\n .or_else(|| return_none(stream, &st))\n\n })\n\n}\n\n\n", "file_path": "src/combinators/functor.rs", "rank": 67, "score": 5.884163633004337 }, { "content": "/// # Combinator: `some` (function ver.)\n\n///\n\n/// Apply given parser as many times as possible (**one** or more times),\n\n/// and returns a vector `Vec<T>` containg all the parse results. The\n\n/// combinator fails if the parser fails at the first attempt.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// // Consume character 't' one or more times\n\n/// let parser = some(char('t'));\n\n///\n\n/// let mut st1 = CharStream::new(\"tttql\");\n\n/// let mut st2 = CharStream::new(\"ql\");\n\n/// let (res1, logs1) = parser.exec(&mut st1);\n\n/// let (res2, logs2) = parser.exec(&mut st2);\n\n///\n\n/// assert_eq!(Some(vec!['t', 't', 't']), res1);\n\n/// assert_eq!(None, res2);\n\n/// assert_eq!((\"ql\", \"ql\"), (st1.as_str(), st2.as_str()));\n\n/// assert_eq!((0, 1), (logs1.len(), logs2.len()));\n\n/// ```\n\npub fn some<'f, A: 'f, S: Clone>(\n\n p: impl Parsable<Stream = S, Result = A> + 'f,\n\n) -> Parser<'f, Vec<A>, S> {\n\n Parser::new(move |stream: &mut S, logger| {\n\n let (mut st, mut lg) = (stream.clone(), logger.clone());\n\n let mut res = vec![];\n\n while let Some(x) = p.parse(stream, logger) {\n\n res.push(x);\n\n st = stream.clone();\n\n lg = logger.clone();\n\n }\n\n *stream = st;\n\n if res.is_empty() {\n\n None\n\n } else {\n\n *logger = lg;\n\n Some(res)\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/combinators/replicative.rs", "rank": 68, "score": 5.861800111844534 }, { "content": " fn map_option<B: 'f>(self, f: impl Fn(A) -> Option<B> + 'f) -> Parser<'f, B, S>\n\n where\n\n S: Clone,\n\n Self: Sized + 'f,\n\n {\n\n map_option(self, f)\n\n }\n\n\n\n /// # Combinator: `map_result`\n\n ///\n\n /// Maps the result of a parser to another value using the given function that\n\n /// produces an `Result<T, E>`. The only difference with `map` is that `map_result`\n\n /// will automatically try to unwrap the `Result<T, E>`. If an `Err` is yeilded,\n\n /// `map_result` will log down the error message. Therefore, it requires `E` from\n\n /// `Result<T, E>` to implement `ToString` trait.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// use parsic::combinators::*;\n\n /// use parsic::core::Parsable;\n", "file_path": "src/combinators/functor.rs", "rank": 69, "score": 5.846095831323818 }, { "content": "/// # Combinator: `map_result` (function ver.)\n\n///\n\n/// Maps the result of a parser to another value using the given function that\n\n/// produces an `Result<T, E>`. The only difference with `map` is that `map_result`\n\n/// will automatically try to unwrap the `Result<T, E>`. If an `Err` is yeilded,\n\n/// `map_result` will log down the error message. Therefore, it requires `E` from\n\n/// `Result<T, E>` to implement `ToString` trait.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{CharStream, satisfy};\n\n///\n\n/// // A parser that consumes a natural number\n\n/// let parser = map_result(\n\n/// some(satisfy(|&ch| ch.is_digit(10))),\n\n/// |v| v.into_iter().collect::<String>().parse::<i64>()\n\n/// );\n\n///\n\n/// let mut st = CharStream::new(\"12345\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(12345), res);\n\n/// assert_eq!(\"\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn map_result<'f, A: 'f, B: 'f, E: ToString, S: Clone>(\n\n p: impl Parsable<Stream = S, Result = A> + 'f,\n\n f: impl Fn(A) -> Result<B, E> + 'f,\n\n) -> Parser<'f, B, S> {\n\n Parser::new(move |stream: &mut S, logger| {\n\n let st = stream.clone();\n\n p.parse(stream, logger).and_then(|x| match f(x) {\n\n Ok(r) => Some(r),\n\n Err(e) => {\n\n logger.add(Msg::Error(MsgBody::new(&e.to_string()[..], None)));\n\n return_none(stream, &st)\n\n }\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/combinators/functor.rs", "rank": 70, "score": 5.819314555958644 }, { "content": " assert_eq!(None, res);\n\n assert_eq!(\"Hell\", st.as_str());\n\n assert_eq!(1, logs.len());\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_regex {\n\n use crate::core::Parsable;\n\n use crate::primitives::{regex, CharStream};\n\n\n\n #[test]\n\n fn fail_with_grace() {\n\n let parser = regex(r\"^\\d{2}/\\d{2}/\\d{4}\");\n\n\n\n let mut st = CharStream::new(\"Hello\");\n\n let (res, logs) = parser.exec(&mut st);\n\n\n\n assert_eq!(None, res);\n\n assert_eq!(\"Hello\", st.as_str());\n\n assert_eq!(1, logs.len());\n\n }\n\n}\n", "file_path": "src/primitives/combinators.rs", "rank": 71, "score": 5.791494703593779 }, { "content": "/// # Combinator: `many` (function ver.)\n\n///\n\n/// Apply given parser as many times as possible (**zero** or more times),\n\n/// and returns a vector `Vec<T>` containg all the parse results. The\n\n/// combinator always succeeds.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// // Consume character 't' zero or more times\n\n/// let parser = many(char('t'));\n\n///\n\n/// let mut st1 = CharStream::new(\"tttql\");\n\n/// let mut st2 = CharStream::new(\"ql\");\n\n/// let (res1, logs1) = parser.exec(&mut st1);\n\n/// let (res2, logs2) = parser.exec(&mut st2);\n\n///\n\n/// assert_eq!(Some(vec!['t', 't', 't']), res1);\n\n/// assert_eq!(Some(vec![]), res2);\n\n/// assert_eq!((\"ql\", \"ql\"), (st1.as_str(), st2.as_str()));\n\n/// assert_eq!((0, 0), (logs1.len(), logs2.len()));\n\n/// ```\n\npub fn many<'f, A: 'f, S: Clone>(\n\n p: impl Parsable<Stream = S, Result = A> + 'f,\n\n) -> Parser<'f, Vec<A>, S> {\n\n Parser::new(move |stream: &mut S, logger| {\n\n let (mut st, mut lg) = (stream.clone(), logger.clone());\n\n let mut res = vec![];\n\n while let Some(x) = p.parse(stream, logger) {\n\n res.push(x);\n\n st = stream.clone();\n\n lg = logger.clone();\n\n }\n\n *stream = st;\n\n *logger = lg;\n\n Some(res)\n\n })\n\n}\n\n\n", "file_path": "src/combinators/replicative.rs", "rank": 72, "score": 5.678737836359074 }, { "content": " /// use parsic::primitives::{char, CharStream};\n\n ///\n\n /// let parser = pure(|x| x == 'H').compose(char('H'));\n\n ///\n\n /// let mut st = CharStream::new(\"Hello\");\n\n /// let (res, logs) = parser.exec(&mut st);\n\n ///\n\n /// assert_eq!(Some(true), res);\n\n /// assert_eq!(\"ello\", st.as_str());\n\n /// assert_eq!(0, logs.len());\n\n /// ```\n\n fn compose<A: 'f, B: 'f>(\n\n self,\n\n px: impl Parsable<Stream = S, Result = A> + 'f,\n\n ) -> Parser<'f, B, S>\n\n where\n\n F: Fn(A) -> B,\n\n S: Clone,\n\n Self: Sized + 'f,\n\n {\n", "file_path": "src/combinators/applicative.rs", "rank": 73, "score": 5.6067214082234145 }, { "content": " /// - **Right zero**: `pf.compose(empty()) ~ empty()`\n\n /// - **Left distribution**: `pf.or(pg).compose(px) ~ pf.compose(px).or(pg.compose(px))`\n\n /// - **Right distribution**: `pf.compose(px.or(py)) ~ pf.compose(px).or(pf.compose(py))`\n\n /// - **Left catch**: `pure(a).or(x) ~ pure(a)`\n\n ///\n\n /// Check out `test_alternative` module for naive examples of above laws.\n\n ///\n\n /// # Examples\n\n /// ```\n\n /// use parsic::combinators::*;\n\n /// use parsic::core::Parsable;\n\n /// use parsic::primitives::{char, CharStream};\n\n ///\n\n /// // Comsumes a character 'A' or a character 'B'\n\n /// let parser = char('B').or(char('A'));\n\n ///\n\n /// let mut st = CharStream::new(\"Ahhh\");\n\n /// let (res, logs) = parser.exec(&mut st);\n\n ///\n\n /// assert_eq!(Some('A'), res);\n", "file_path": "src/combinators/alternative.rs", "rank": 74, "score": 5.53056868150798 }, { "content": "/// # Combinator: `bind` (function ver.)\n\n///\n\n/// Monadic bind operator `(>>=)` for context sensitive parsing.\n\n///\n\n/// # Properties\n\n///\n\n/// Should satisfy [Monad laws](https://wiki.haskell.org/Typeclassopedia#Laws_3):\n\n///\n\n/// - **Left-identity**: `bind(pure(x), f) ~ f(x)`\n\n/// - **Right-identity**: `bind(p, |x| pure(x)) ~ p`\n\n/// - **Associativity**: `bind(bind(p, f), g) ~ bind(p, |x| bind(f(x), g))`\n\n///\n\n/// Check out `test_monad` module for naive examples of above laws.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::core::Parsable;\n\n/// use parsic::combinators::*;\n\n/// use parsic::primitives::{ CharStream, char, satisfy };\n\n///\n\n/// // <expr> := <uppercase_letter> '+'\n\n/// // <expr> := <lowercase_letter> '-'\n\n/// let parser = bind(\n\n/// satisfy(|_| true),\n\n/// |ch| if ch.is_uppercase() {\n\n/// char('+')\n\n/// } else {\n\n/// char('-')\n\n/// }\n\n/// );\n\n///\n\n/// let (res1, _) = parser.exec(&mut CharStream::new(\"A+\"));\n\n/// assert_eq!(Some('+'), res1);\n\n/// let (res2, _) = parser.exec(&mut CharStream::new(\"a-\"));\n\n/// assert_eq!(Some('-'), res2);\n\n/// ```\n\npub fn bind<'f, A: 'f, B: 'f, S, P>(\n\n p: impl Parsable<Stream = S, Result = A> + 'f,\n\n f: impl Fn(A) -> P + 'f,\n\n) -> Parser<'f, B, S>\n\nwhere\n\n P: Parsable<Stream = S, Result = B>,\n\n S: Clone,\n\n{\n\n Parser::new(move |stream: &mut S, logger| {\n\n let st = stream.clone();\n\n p.parse(stream, logger)\n\n .and_then(|x| f(x).parse(stream, logger))\n\n .or_else(|| return_none(stream, &st))\n\n })\n\n}\n\n\n", "file_path": "src/combinators/monad.rs", "rank": 75, "score": 5.396468608356839 }, { "content": "/// # Combinator: `compose` (function ver.)\n\n///\n\n/// Functional composition between parsers.\n\n///\n\n/// # Properties\n\n///\n\n/// Should satisfy [Applicative functor laws](https://wiki.haskell.org/Typeclassopedia#Laws_2).\n\n///\n\n/// - **Identity**: `compose(pure(id), p) ~ p`\n\n/// - **Homomorphism**: `compose(pure(f), pure(g)) ~ pure(|x| f(g(x)))`\n\n/// - **Interchange**: `compose(pf, pure(x)) ~ compose(pure(|f| f(x)), pf)`\n\n/// - **Composition**: `compose(pf, pg.compose(px)) ~ compose(pure(|f| |g| |x| f(g(x))), px)`\n\n///\n\n/// Check out `test_applicative` module for naive examples of above laws.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// let parser = compose(pure(|x| x == 'H'), char('H'));\n\n///\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(true), res);\n\n/// assert_eq!(\"ello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn compose<'f, A: 'f, B: 'f, F, S: Clone>(\n\n pf: impl Parsable<Stream = S, Result = F> + 'f,\n\n px: impl Parsable<Stream = S, Result = A> + 'f,\n\n) -> Parser<'f, B, S>\n\nwhere\n\n F: Fn(A) -> B + 'f,\n\n{\n\n Parser::new(move |stream: &mut S, logger| {\n\n let st = stream.clone();\n\n pf.parse(stream, logger)\n\n .and_then(|f| px.parse(stream, logger).map(|x| f(x)))\n\n .or_else(|| return_none(stream, &st))\n\n })\n\n}\n\n\n", "file_path": "src/combinators/applicative.rs", "rank": 76, "score": 5.290665326150203 }, { "content": "///\n\n/// ```\n\n/// ## Injects a function\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::*;\n\n/// use parsic::primitives::CharStream;\n\n///\n\n/// let parser = pure(|_| true);\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(true, res.unwrap()(1));\n\n/// assert_eq!(\"Hello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n", "file_path": "src/combinators/applicative.rs", "rank": 77, "score": 5.207787172757177 }, { "content": "/// # Combinator: `or` (function ver.)\n\n///\n\n/// Alternative combinator. Accepts two parsers as arguments,\n\n/// if the first parser succeeds then its result is returned,\n\n/// otherwise the result of the second parser is returned.\n\n///\n\n/// # Properties\n\n///\n\n/// Should satisfy [Alternative laws](https://wiki.haskell.org/Typeclassopedia#Laws_6).\n\n///\n\n/// Instances of `Parser` and `or` forms a monoid:\n\n/// \n\n/// - **Left identity**: `or(empty(), p) ~ p`\n\n/// - **Right identity**: `or(p, empty()) ~ p`\n\n/// - **Associative**: `or(or(px, py), pz) ~ or(px, or(py, pz))`\n\n///\n\n/// Following properties exist when `empty` and `or` interacts with `pure` and `compose`:\n\n///\n\n/// - **Left zero**: `compose(empty(), x) ~ empty()`\n\n/// - **Right zero**: `compose(pf, empty()) ~ empty()`\n\n/// - **Left distribution**: `compose(or(pf, pg), px) ~ or(compose(pf, px), pg.compose(px))`\n\n/// - **Right distribution**: `compose(pf, or(px, py)) ~ or(compose(pf, px), pf.compose(py))`\n\n/// - **Left catch**: `or(pure(a), x) ~ pure(a)`\n\n///\n\n/// Check out `test_alternative` module for naive examples of above laws.\n\n///\n\n/// # Examples\n\n/// ```\n\n/// use parsic::combinators::*;\n\n/// use parsic::core::Parsable;\n\n/// use parsic::primitives::{char, CharStream};\n\n///\n\n/// // Comsumes a character 'A' or a character 'B'\n\n/// let parser = or(char('B'), char('A'));\n\n///\n\n/// let mut st = CharStream::new(\"Ahhh\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some('A'), res);\n\n/// assert_eq!(\"hhh\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\npub fn or<'f, A: 'f, S: Clone>(\n\n p1: impl Parsable<Stream = S, Result = A> + 'f,\n\n p2: impl Parsable<Stream = S, Result = A> + 'f,\n\n) -> Parser<'f, A, S> {\n\n Parser::new(move |stream: &mut S, logger| {\n\n let (st, lg) = (stream.clone(), logger.clone());\n\n p1.parse(stream, logger).or_else(|| {\n\n *stream = st.clone();\n\n *logger = lg;\n\n p2.parse(stream, logger)\n\n .or_else(|| return_none(stream, &st))\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/combinators/alternative.rs", "rank": 78, "score": 4.939843844839925 }, { "content": "/// use parsic::primitives::{ CharStream, char, satisfy };\n\n///\n\n/// // <expr> := <uppercase_letter> '+'\n\n/// // <expr> := <lowercase_letter> '-'\n\n/// let parser = bind(\n\n/// satisfy(|_| true),\n\n/// |ch| if ch.is_uppercase() {\n\n/// char('+')\n\n/// } else {\n\n/// char('-')\n\n/// }\n\n/// );\n\n///\n\n/// let (res1, _) = parser.exec(&mut CharStream::new(\"A+\"));\n\n/// assert_eq!(Some('+'), res1);\n\n/// let (res2, _) = parser.exec(&mut CharStream::new(\"a-\"));\n\n/// assert_eq!(Some('-'), res2);\n\n/// ```\n", "file_path": "src/combinators/monad.rs", "rank": 79, "score": 4.478475056573231 }, { "content": "mod logger;\n\nmod ops;\n\nmod parser;\n\n\n\npub use crate::core::{self, logger::*, ops::*, parser::*};\n\n\n\n/// Helper function that undo changes to stream\n\npub(crate) fn return_none<S: Clone, T>(cur: &mut S, bak: &S) -> Option<T> {\n\n *cur = bak.to_owned();\n\n None\n\n}\n", "file_path": "src/core/mod.rs", "rank": 80, "score": 4.382370883578888 }, { "content": " Self: Sized + 'f,\n\n {\n\n error(self, msg)\n\n }\n\n\n\n /// # Combinator: `inspect`\n\n fn inspect(self) -> Parser<'f, (Option<A>, S), S>\n\n where\n\n S: Clone + 'f,\n\n Self: Sized + 'f,\n\n {\n\n inspect(self)\n\n }\n\n\n\n /// # Combinator: `recover`\n\n fn recover(self, x: A) -> Parser<'f, A, S>\n\n where\n\n A: Clone,\n\n S: Clone,\n\n Self: Sized + 'f,\n\n {\n\n recover(self, x)\n\n }\n\n}\n\n\n\nimpl<'f, A: 'f, S, P: Parsable<Stream = S, Result = A>> LogExt<'f, A, S> for P {}\n", "file_path": "src/combinators/error.rs", "rank": 81, "score": 4.266415113159013 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod test_ops {\n\n use crate::combinators::*;\n\n use crate::core::Parsable;\n\n use crate::primitives::{char, satisfy, CharStream};\n\n\n\n #[test]\n\n fn mutual_recursive_grammar() {\n\n // expr := term '+' expr | term\n\n // term := factor '*' term | factor\n\n // factor := '(' expr ')' | uint\n\n // uint := digit { digit }\n\n // digit := '0' | '1' | ... | '9'\n\n let expr = fix(move |expr| {\n\n let digit = satisfy(|&ch| ch.is_digit(10));\n\n let uint = digit\n\n .some()\n\n .map(|v| v.iter().collect::<String>().parse::<u64>().unwrap());\n", "file_path": "src/core/ops.rs", "rank": 82, "score": 4.183233836035107 }, { "content": " /// use parsic::primitives::{CharStream, satisfy};\n\n ///\n\n /// // A parser that consumes a natural number\n\n /// let parser = satisfy(|&ch| ch.is_digit(10)).some()\n\n /// .map_result(|v| v.into_iter().collect::<String>().parse::<i64>());\n\n ///\n\n /// let mut st = CharStream::new(\"12345\");\n\n /// let (res, logs) = parser.exec(&mut st);\n\n ///\n\n /// assert_eq!(Some(12345), res);\n\n /// assert_eq!(\"\", st.as_str());\n\n /// assert_eq!(0, logs.len());\n\n /// ```\n\n fn map_result<B: 'f, E>(self, f: impl Fn(A) -> Result<B, E> + 'f) -> Parser<'f, B, S>\n\n where\n\n E: ToString,\n\n S: Clone,\n\n Self: Sized + 'f,\n\n {\n\n map_result(self, f)\n", "file_path": "src/combinators/functor.rs", "rank": 83, "score": 4.094243137043085 }, { "content": "///\n\n/// assert_eq!(Some('A'), res);\n\n/// assert_eq!(\"BC\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\nimpl<'f, A: 'f, S, P> BitOr<P> for Parser<'f, A, S>\n\nwhere\n\n P: Parsable<Stream = S, Result = A> + 'f,\n\n S: Clone + 'f,\n\n Self: Sized + 'f,\n\n{\n\n type Output = Parser<'f, A, S>;\n\n fn bitor(self, rhs: P) -> Self::Output {\n\n or(self, rhs)\n\n }\n\n}\n\n\n\n/// # Overload operator `&` to `and` combinator\n\n///\n\n/// `p1 & p2` ~ `p1.and(p2)`\n", "file_path": "src/core/ops.rs", "rank": 84, "score": 4.040582007811631 }, { "content": "///\n\n/// let mut st = CharStream::new(\"ABC\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(true), res);\n\n/// assert_eq!(\"BC\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\nimpl<'f, A: 'f, B: 'f, F, S, P> Mul<P> for Parser<'f, F, S>\n\nwhere\n\n F: Fn(A) -> B + 'f,\n\n P: Parsable<Stream = S, Result = A> + 'f,\n\n S: Clone + 'f,\n\n Self: Sized + 'f,\n\n{\n\n type Output = Parser<'f, B, S>;\n\n\n\n fn mul(self, rhs: P) -> Self::Output {\n\n compose(self, rhs)\n\n }\n", "file_path": "src/core/ops.rs", "rank": 85, "score": 3.991761785555983 }, { "content": "/// assert_eq!(\"C\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n\nimpl<'f, A: 'f, B: 'f, S, P> Shl<P> for Parser<'f, A, S>\n\nwhere\n\n P: Parsable<Stream = S, Result = B> + 'f,\n\n S: Clone + 'f,\n\n Self: Sized + 'f,\n\n{\n\n type Output = Parser<'f, A, S>;\n\n fn shl(self, rhs: P) -> Self::Output {\n\n left(self, rhs)\n\n }\n\n}\n\n\n\n/// # Overload Shr `>>` to `right` combinator\n\n///\n\n/// `p1 >> p2` ~ `p1.right(p2)`\n\n///\n\n/// ## Example\n", "file_path": "src/core/ops.rs", "rank": 86, "score": 3.936647453690368 }, { "content": "# Parsic\n\n\n\n![MIT licensed](https://img.shields.io/badge/license-MIT-blue.svg)\n\n![Build Status](https://github.com/codgician/parsic/actions/workflows/build.yml/badge.svg)\n\n![Doc status](https://github.com/codgician/parsic/actions/workflows/doc.yml/badge.svg)\n\n\n\n🪄 A naive parser combinator written while learning Rust. **Pars**ing mag**ic** ~~tragic~~.\n\n\n\n## To-do\n\n\n\n- 💖 **Core**\n\n - [x] `Parser`: wrapper for parser function\n\n - [x] `ParseLogger`: logger for parser\n\n - [x] `Parsable`: anything that could be parsed\n\n- 🐣 **Primitives**\n\n - [x] `CharStream`: parse state for `&str`\n\n - [x] `char`: consumes one char at a time from parse stream\n\n - [x] `satisfy`: consumes one char if given condition satisifies\n\n - [x] `literal`: consumes given literal string\n\n - [x] `regex`: consumes literal string that matches given regular expression\n\n - [x] `trim`: constructs a parser that consumes whitespaces at both ends\n\n- 🍡 **Combinators**\n\n - [x] `map`: Maps the result of current parser to another value\n\n - [x] `map_option`: `map`, but automatically unwraps `Option<T>`\n\n - [x] `map_result`: `map`, but automatically unwraps `Result<T, E>`\n\n - [x] `pure`: injects value into an identity parser\n\n - [x] `compose`: compose one parser with another if applicable\n\n - [x] `empty`: a parser that always fails\n\n - [x] `fix`: fixed-point combinator for recursive syntax\n\n - [x] `and`: sequential combinator (pair)\n\n - [x] `bind`: monadic bind operator for context sensitive parsing\n\n - [x] `left`, `right`, `mid`: sequencial combinators (select left / right / middle)\n\n - [x] `or`: alternative combinators\n\n - [x] `many`, `some`, `optional`: replicative combinators\n\n - [x] `info`, `warn`, `error`: log combinators\n\n - [x] `inspect`: returns parser result alongwith current parsing state\n\n - [x] `recover`: returns a fallback value is given parser fails\n\n- ✨ **Enhancements**\n\n - [x] Overload operators: `>>`, `<<`, `/`, `&`, `*`\n\n - [ ] ~~Support returning multiple results~~\n\n - [ ] Advanced error handling **(Planning)**\n\n- 🩺 **Tests**\n\n - [x] Tests for laws of Functor, Applicative and Monad.\n\n - [x] Arthimetic calculator\n\n - [ ] Some more real-world tests **(Planning)**\n\n- 📄 **Docs**\n\n - [x] Core\n\n - [x] Primitives\n\n - [x] Combinators\n", "file_path": "README.md", "rank": 87, "score": 3.2113703273020113 }, { "content": "/*!\n\n * A naive parser combinator written while learning Rust.\n\n */\n\n\n\n/// Generic parser combinators.\n\npub mod combinators;\n\n/// Definitions of a parser and its friends.\n\npub mod core;\n\n/// `CharStream` and its primitive parser combinators.\n\npub mod primitives;\n", "file_path": "src/lib.rs", "rank": 88, "score": 3.211093998476955 }, { "content": " /// let parser = char('H').or(char('W'))\n\n /// .map(|ch: char| ch == 'H');\n\n ///\n\n /// let mut st = CharStream::new(\"Hello\");\n\n /// let (res, logs) = parser.exec(&mut st);\n\n ///\n\n /// assert_eq!(Some(true), res);\n\n /// assert_eq!(\"ello\", st.as_str());\n\n /// assert_eq!(0, logs.len());\n\n /// ```\n\n fn map<B: 'f>(self, f: impl Fn(A) -> B + 'f) -> Parser<'f, B, S>\n\n where\n\n S: Clone,\n\n Self: Sized + 'f,\n\n {\n\n map(self, f)\n\n }\n\n\n\n /// # Combinator: `map_option`\n\n ///\n", "file_path": "src/combinators/functor.rs", "rank": 90, "score": 2.594504228282762 }, { "content": "pub mod combinators;\n\npub mod stream;\n\n\n\npub use crate::primitives::{self, combinators::*, stream::*};\n", "file_path": "src/primitives/mod.rs", "rank": 91, "score": 2.5850936955269317 }, { "content": " ///\n\n /// assert_eq!(Some(vec!['t', 't', 't']), res1);\n\n /// assert_eq!(Some(vec![]), res2);\n\n /// assert_eq!((\"ql\", \"ql\"), (st1.as_str(), st2.as_str()));\n\n /// assert_eq!((0, 0), (logs1.len(), logs2.len()));\n\n /// ```\n\n fn many(self) -> Parser<'f, Vec<A>, S>\n\n where\n\n S: Clone,\n\n Self: Sized + 'f,\n\n {\n\n many(self)\n\n }\n\n\n\n /// # Combinator: `some` (function ver.)\n\n ///\n\n /// Apply given parser as many times as possible (**one** or more times),\n\n /// and returns a vector `Vec<T>` containg all the parse results. The\n\n /// combinator fails if the parser fails at the first attempt.\n\n ///\n", "file_path": "src/combinators/replicative.rs", "rank": 92, "score": 2.551790025231413 }, { "content": "///\n\n/// let parser = map(or(char('H'), char('W')), |ch: char| ch == 'H');\n\n///\n\n/// let mut st = CharStream::new(\"Hello\");\n\n/// let (res, logs) = parser.exec(&mut st);\n\n///\n\n/// assert_eq!(Some(true), res);\n\n/// assert_eq!(\"ello\", st.as_str());\n\n/// assert_eq!(0, logs.len());\n\n/// ```\n", "file_path": "src/combinators/functor.rs", "rank": 94, "score": 2.4925839987707277 }, { "content": " where\n\n S: Clone,\n\n Self: Sized + 'f,\n\n {\n\n some(self)\n\n }\n\n}\n\n\n\nimpl<'f, A: 'f, S, P: Parsable<Stream = S, Result = A>> ReplicativeExt<'f, A, S> for P {}\n", "file_path": "src/combinators/replicative.rs", "rank": 95, "score": 2.475909859939102 }, { "content": " assert_eq!(1, logs.len());\n\n }\n\n\n\n #[test]\n\n fn map_option_fail_with_grace() {\n\n let parser = satisfy(|_| true).map_option(|ch: char| ch.to_digit(10));\n\n\n\n let mut st = CharStream::new(\"naive\");\n\n let (res, logs) = parser.exec(&mut st);\n\n\n\n assert_eq!(None, res);\n\n assert_eq!(\"naive\", st.as_str());\n\n assert_eq!(1, logs.len());\n\n }\n\n\n\n #[test]\n\n fn map_result_fail_with_grace() {\n\n let parser = satisfy(|&ch| ch.is_digit(10))\n\n .some()\n\n .map_result(|v| v.into_iter().collect::<String>().parse::<i64>());\n", "file_path": "src/combinators/functor.rs", "rank": 96, "score": 2.470446430333822 }, { "content": "mod alternative;\n\nmod applicative;\n\nmod error;\n\nmod fix;\n\nmod functor;\n\nmod monad;\n\nmod replicative;\n\nmod sequential;\n\n\n\npub use crate::combinators::{\n\n self, alternative::*, applicative::*, error::*, fix::*, functor::*, monad::*, replicative::*,\n\n sequential::*,\n\n};\n", "file_path": "src/combinators/mod.rs", "rank": 97, "score": 2.1120795540229698 }, { "content": " char('+')\n\n } else {\n\n char('-')\n\n }\n\n });\n\n\n\n let mut st = CharStream::new(\"Awesome\");\n\n let (res, logs) = parser.exec(&mut st);\n\n assert_eq!(None, res);\n\n assert_eq!(1, logs.len());\n\n }\n\n\n\n #[test]\n\n fn left_identity() {\n\n //! `pure(x).bind(f) ~ f(x)`\n\n //! Left identity law\n\n let f = |b| if b { char('1') } else { char('0') };\n\n let parser1 = pure(true).bind(f);\n\n let parser2 = f(true);\n\n\n", "file_path": "src/combinators/monad.rs", "rank": 98, "score": 1.7511538321692548 }, { "content": "use std::fmt::{Display, Formatter, Result};\n\n\n\n/// # Struct `Pos`\n\n/// Data structure for parsing position.\n\n#[derive(Clone, Copy, Default, Debug, Eq, PartialEq)]\n\npub struct Pos(usize, usize);\n\n\n\nimpl Pos {\n\n pub fn new(row: usize, col: usize) -> Self {\n\n Self(row, col)\n\n }\n\n pub fn add(&self, d_row: usize, d_col: usize) -> Self {\n\n Self(self.0 + d_row, self.1 + d_col)\n\n }\n\n pub fn row(&self) -> usize {\n\n self.0\n\n }\n\n pub fn col(&self) -> usize {\n\n self.1\n\n }\n", "file_path": "src/core/logger.rs", "rank": 99, "score": 1.5763958531721394 } ]
Rust
src/core/context/default_context.rs
another-s347/rusty-p4
b2c395e1b14e105762d7eb0879315ed8a7dca7d2
use crate::app::P4app; use crate::entity::UpdateType; use crate::error::{ContextError, ContextErrorKind}; use crate::event::{CommonEvents, CoreEvent, CoreRequest, Event, PacketReceived}; use crate::p4rt::bmv2::Bmv2SwitchConnection; use crate::p4rt::pipeconf::{Pipeconf, PipeconfID}; use crate::p4rt::pure::{ new_packet_out_request, new_set_entity_request, new_write_table_entry, table_entry_to_entity, }; use crate::proto::p4runtime::{ stream_message_response, Entity, Index, MeterEntry, PacketIn, StreamMessageRequest, StreamMessageResponse, Uint128, Update, WriteRequest, WriteResponse, }; use rusty_p4_proto::proto::v1::MasterArbitrationUpdate; use crate::representation::{ConnectPoint, Device, DeviceID, DeviceType}; use crate::util::flow::Flow; use byteorder::BigEndian; use byteorder::ByteOrder; use bytes::Bytes; use failure::ResultExt; use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender, Sender}; use futures::future::FutureExt; use futures::sink::SinkExt; use futures::stream::StreamExt; use log::{debug, error, info, trace, warn}; use std::collections::{HashMap, HashSet}; use std::fmt::Debug; use std::path::Path; use std::sync::{Arc, Mutex, RwLock}; use crate::core::connection::bmv2::Bmv2Connection; use crate::core::connection::ConnectionBox; use async_trait::async_trait; use nom::lib::std::collections::hash_map::RandomState; use crate::core::context::Context; #[derive(Clone)] pub struct DefaultContext<E> { pub core_request_sender: futures::channel::mpsc::Sender<CoreRequest>, pub event_sender: futures::channel::mpsc::Sender<CoreEvent<E>>, pub connections: HashMap<DeviceID, ConnectionBox>, pub pipeconf: Arc<HashMap<PipeconfID, Pipeconf>>, } impl<E> DefaultContext<E> where E: Debug + Event, { pub fn new( core_request_sender: futures::channel::mpsc::Sender<CoreRequest>, event_sender: futures::channel::mpsc::Sender<CoreEvent<E>>, connections: HashMap<DeviceID, ConnectionBox>, pipeconf: Arc<HashMap<PipeconfID, Pipeconf>>, ) -> DefaultContext<E> { DefaultContext { core_request_sender, event_sender, connections, pipeconf, } } pub fn update_pipeconf(&mut self, device: DeviceID, pipeconf: PipeconfID) { self.core_request_sender.try_send(CoreRequest::UpdatePipeconf { device, pipeconf, }).unwrap(); } pub async fn set_flow( &mut self, mut flow: Flow, device: DeviceID, update: UpdateType, ) -> Result<Flow, ContextError> { let hash = crate::util::hash(&flow); let connection = self.connections.get_mut(&device).ok_or(ContextError::from( ContextErrorKind::DeviceNotConnected { device }, ))?; let table_entry = flow.to_table_entry(&connection.pipeconf, hash); let request = new_set_entity_request(1, table_entry_to_entity(table_entry), update.into()); match connection.p4runtime_client.write(tonic::Request::new(request)).await { Ok(response) => { debug!(target: "core", "set entity response: {:?}", response); } Err(e) => { error!(target: "core", "grpc send error: {:?}", e); } } flow.metadata = hash; Ok(flow) } pub fn send_event(&mut self, event: E) { self.event_sender .try_send(CoreEvent::Event(event)) .unwrap(); } pub fn send_request(&mut self, request: CoreRequest) { self.core_request_sender.try_send(request).unwrap(); } pub async fn set_entity<T: crate::entity::ToEntity>( &mut self, device: DeviceID, update_type: UpdateType, entity: &T, ) -> Result<(), ContextError> { let connection = self.connections.get_mut(&device).ok_or(ContextError::from( ContextErrorKind::DeviceNotConnected { device }, ))?; if let Some(entity) = entity.to_proto_entity(&connection.pipeconf) { let request = new_set_entity_request(1, entity, update_type.into()); match connection.p4runtime_client.write(tonic::Request::new(request)).await { Ok(response) => { debug!(target: "core", "set entity response: {:?}", response); } Err(e) => { error!(target: "core", "grpc send error: {:?}", e); } } Ok(()) } else { Err(ContextError::from(ContextErrorKind::EntityIsNone)) } } pub fn remove_device(&mut self, device: DeviceID) { self.core_request_sender .try_send(CoreRequest::RemoveDevice { device }) .unwrap(); } } #[async_trait] impl<E> Context<E> for DefaultContext<E> where E: Event { type ContextState = (); fn new( core_request_sender: Sender<CoreRequest>, event_sender: Sender<CoreEvent<E>>, connections: HashMap<DeviceID, ConnectionBox, RandomState>, pipeconf: Arc<HashMap<PipeconfID, Pipeconf, RandomState>>, state: () ) -> Self { DefaultContext { core_request_sender, event_sender, connections, pipeconf, } } fn send_event(&mut self, event: E) { self.event_sender .try_send(CoreEvent::Event(event)) .unwrap(); } fn get_conn(&self) -> &HashMap<DeviceID, ConnectionBox, RandomState> { &self.connections } fn get_mut_conn(&mut self) -> &mut HashMap<DeviceID, ConnectionBox, RandomState> { &mut self.connections } fn get_connectpoint(&self, packet: &PacketReceived) -> Option<ConnectPoint> { self.connections.get(&packet.from) .map(|conn| &conn.pipeconf) .and_then(|pipeconf| { packet.metadata.iter() .find(|x| x.metadata_id == pipeconf.packetin_ingress_id) .map(|x| BigEndian::read_u16(x.value.as_ref())) }) .map(|port| ConnectPoint { device: packet.from, port: port as u32, }) } async fn insert_flow(&mut self, mut flow: Flow, device: DeviceID) -> Result<Flow, ContextError> { self.set_flow(flow, device, UpdateType::Insert).await } async fn send_packet(&mut self, to: ConnectPoint, packet: Bytes) { if let Some(c) = self.connections.get_mut(&to.device) { let request = new_packet_out_request(&c.pipeconf, to.port, packet); if let Err(err) = c.send_stream_request(request).await { error!(target: "core", "packet out err {:?}", err); } } else { error!(target: "core", "PacketOut error: connection not found for device {:?}.", to.device); } } fn add_device(&mut self, device: Device) -> bool { if self.connections.contains_key(&device.id) { return false; } self.core_request_sender .try_send(CoreRequest::AddDevice { device, }) .unwrap(); return true; } }
use crate::app::P4app; use crate::entity::UpdateType; use crate::error::{ContextError, ContextErrorKind}; use crate::event::{CommonEvents, CoreEvent, CoreRequest, Event, PacketReceived}; use crate::p4rt::bmv2::Bmv2SwitchConnection; use crate::p4rt::pipeconf::{Pipeconf, PipeconfID}; use crate::p4rt::pure::{ new_packet_out_request, new_set_entity_request, new_write_table_entry, table_entry_to_entity, }; use crate::proto::p4runtime::{ stream_message_response, Entity, Index, MeterEntry, PacketIn, StreamMessageRequest, StreamMessageResponse, Uint128, Update, WriteRequest, WriteResponse, }; use rusty_p4_proto::proto::v1::MasterArbitrationUpdate; use crate::representation::{ConnectPoint, Device, DeviceID, DeviceType}; use crate::util::flow::Flow; use byteorder::BigEndian; use byteorder::ByteOrder; use bytes::Bytes; use failure::ResultExt; use futures::channel::mpsc::{UnboundedReceiver, UnboundedSender, Sender}; use futures::future::FutureExt; use futures::sink::SinkExt; use futures::stream::StreamExt; use log::{debug, error, info, trace, warn}; use std::collections::{HashMap, HashSet}; use std::fmt::Debug; use std::path::Path; use std::sync::{Arc, Mutex, RwLock}; use crate::core::connection::bmv2::Bmv2Connection; use crate::core::connection::ConnectionBox; use async_trait::async_trait; use nom::lib::std::collections::hash_map::RandomState; use crate::core::context::Context; #[derive(Clone)] pub struct DefaultContext<E> { pub core_request_sender: futures::channel::mpsc::Sender<CoreRequest>, pub event_sender: futures::channel::mpsc::Sender<CoreEvent<E>>, pub connections: HashMap<DeviceID, ConnectionBox>, pub pipeconf: Arc<HashMap<PipeconfID, Pipeconf>>, } impl<E> DefaultContext<E> where E: Debug + Event, { pub fn new( core_request_sender: futures::channel::mpsc::Sender<CoreRequest>, event_sender: futures::channel::mpsc::Sender<CoreEvent<E>>, connections: HashMap<DeviceID, ConnectionBox>, pipeconf: Arc<HashMap<PipeconfID, Pipeconf>>, ) -> DefaultContext<E> { DefaultContext { core_request_sender, event_sender, connections, pipeconf, } } pub fn update_pipeconf(&mut self, device: DeviceID, pipeconf: PipeconfID) { self.core_request_sender.try_send(CoreRequest::UpdatePipeconf { device, pipeconf, }).unwrap(); } pub async fn set_flow( &mut self, mut flow: Flow, device: DeviceID, update: UpdateType, ) -> Result<Flow, ContextError> { let hash = crate::util::hash(&flow); let connection = self.connections.get_mut(&device).ok_or(ContextError::from( ContextErrorKind::DeviceNotConnected { device }, ))?; let table_entry = flow.to_table_entry(&connection.pipeconf, hash); let request = new_set_entity_request(1, table_entry_to_entity(table_entry), update.into()); match connection.p4runtime_client.write(tonic::Request::new(request)).await { Ok(response) => { debug!(target: "core", "set entity response: {:?}", response); } Err(e) => { error!(target: "core", "grpc send error: {:?}", e); } } flow.metadata = hash; Ok(flow) } pub fn send_event(&mut self, event: E) { self.event_sender .try_send(CoreEvent::Event(event)) .unwrap(); } pub fn send_request(&mut self, request: CoreRequest) { self.core_request_sender.try_send(request).unwrap(); } pub async fn set_entity<T: crate::entity::ToEntity>( &mut self, device: DeviceID, update_type: UpdateType, entity: &T, ) -> Result<(), ContextError> { let connection = self.connections.get_mut(&device).ok_or(ContextError::from( ContextErrorKind::DeviceNotConnected { device }, ))?; if let Some(entity) = entity.to_proto_entity(&connection.pipeconf) { let request = new_set_entity_request(1, entity, update_type.into()); match connection.p4runtime_client.write(tonic::Request::new(request)).await { Ok(response) => { debug!(target: "core", "set entity response: {:?}", response); } Err(e) => { error!(target: "core", "grpc send error: {:?}", e); } } Ok(()) } else { Err(ContextError::from(ContextErrorKind::EntityIsNone)) } } pub fn remove_device(&mut self, device: DeviceID) { self.core_request_sender .try_send(CoreRequest::RemoveDevice { device }) .unwrap(); } } #[async_trait] impl<E> Context<E> for DefaultContext<E> where E: Event { type ContextState = (); fn new( core_request_sender: Sender<CoreRequest>, event_sender: Sender<CoreEvent<E>>, connections: HashMap<DeviceID, ConnectionBox, RandomState>, pipeconf: Arc<HashMap<PipeconfID, Pipeconf, RandomState>>, state: () ) -> Self { DefaultContext { core_request_sender, event_sender, connections, pipeconf, } } fn send_event(&mut self, event: E) { self.event_sender .try_send(CoreEvent::Event(event)) .unwrap(); } fn get_conn(&self) -> &HashMap<DeviceID, ConnectionBox, RandomState> { &self.connections } fn get_mut_conn(&mut self) -> &mut HashMap<DeviceID, ConnectionBox, RandomState> { &mut self.connections } fn get_connectpoint(&self, packet: &PacketReceived) -> Option<ConnectPoint> { self.connections.get(&packet.from) .map(|conn| &conn.pipeconf) .and_then(|pipeconf| { packet.metadata.iter() .find(|x| x.metadata_id == pipeconf.packetin_ingress_id) .map(|x| BigEndian::read_u16(x.value.as_ref())) }) .map(|port| ConnectPoint { device: packet.from, port: port as u32, }) } async fn insert_flow(&mut self, mut flow: Flow, device: DeviceID) -> Result<Flow, ContextError> { self.set_flow(flow, device, UpdateType::Insert).await } async fn send_packet(&mut self, to: ConnectPoint, packet: Bytes) {
} fn add_device(&mut self, device: Device) -> bool { if self.connections.contains_key(&device.id) { return false; } self.core_request_sender .try_send(CoreRequest::AddDevice { device, }) .unwrap(); return true; } }
if let Some(c) = self.connections.get_mut(&to.device) { let request = new_packet_out_request(&c.pipeconf, to.port, packet); if let Err(err) = c.send_stream_request(request).await { error!(target: "core", "packet out err {:?}", err); } } else { error!(target: "core", "PacketOut error: connection not found for device {:?}.", to.device); }
if_condition
[ { "content": "pub fn new_set_entity_request(\n\n device_id: u64,\n\n entity: Entity,\n\n update_type: crate::proto::p4runtime::update::Type,\n\n) -> WriteRequest {\n\n WriteRequest {\n\n device_id,\n\n role_id: 0,\n\n election_id: Some(Uint128 { high: 0, low: 1 }),\n\n updates: vec![Update {\n\n r#type: update_type as i32,\n\n entity: Some(entity),\n\n }],\n\n atomicity: 0,\n\n }\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 0, "score": 338108.9179460478 }, { "content": "pub fn new_set_entity_request(\n\n device_id: u64,\n\n entity: Entity,\n\n update_type: crate::proto::p4runtime::update::Type,\n\n) -> WriteRequest {\n\n WriteRequest {\n\n device_id,\n\n role_id: 0,\n\n election_id: Some(Uint128 { high: 0, low: 1 }),\n\n updates: vec![Update {\n\n r#type: update_type as i32,\n\n entity: Some(entity),\n\n }],\n\n atomicity: 0,\n\n }\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 1, "score": 318519.49877255084 }, { "content": "pub fn new_packet_out_request<T>(\n\n pipeconf: &T,\n\n egress_port: u32,\n\n packet: Bytes,\n\n) -> StreamMessageRequest\n\nwhere\n\n T: Pipeconf,\n\n{\n\n let packetOut = PacketOut {\n\n payload: packet,\n\n metadata: vec![PacketMetadata {\n\n metadata_id: pipeconf.get_packetout_egress_id(),\n\n value: adjust_value(\n\n Bytes::copy_from_slice(egress_port.to_be_bytes().as_ref()),\n\n 2,\n\n ),\n\n }],\n\n };\n\n let request = StreamMessageRequest {\n\n update: Some(stream_message_request::Update::Packet(packetOut)),\n\n };\n\n request\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 2, "score": 296903.4224254114 }, { "content": "#[async_trait]\n\npub trait Context<E>: 'static + Send + Sync + Clone {\n\n type ContextState: Default + Clone;\n\n\n\n fn new(\n\n core_request_sender: futures::channel::mpsc::Sender<CoreRequest>,\n\n event_sender: futures::channel::mpsc::Sender<CoreEvent<E>>,\n\n connections: HashMap<DeviceID, ConnectionBox>,\n\n pipeconf: Arc<HashMap<PipeconfID, Pipeconf>>,\n\n state: Self::ContextState\n\n ) -> Self;\n\n\n\n fn send_event(&mut self, event: E);\n\n\n\n fn get_conn(&self) -> &HashMap<DeviceID, ConnectionBox>;\n\n\n\n fn get_mut_conn(&mut self) -> &mut HashMap<DeviceID, ConnectionBox>;\n\n\n\n fn get_connectpoint(&self, packet: &PacketReceived) -> Option<ConnectPoint>;\n\n\n\n async fn insert_flow(&mut self, mut flow: Flow, device: DeviceID) -> Result<Flow, ContextError>;\n\n\n\n async fn send_packet(&mut self, to: ConnectPoint, packet: Bytes);\n\n\n\n fn add_device(&mut self, device: Device) -> bool;\n\n}", "file_path": "src/core/context.rs", "rank": 3, "score": 290345.5811424482 }, { "content": "pub fn new_master_update_request(device_id: u64, option: (u64, u64)) -> StreamMessageRequest {\n\n StreamMessageRequest {\n\n update: Some(stream_message_request::Update::Arbitration(\n\n MasterArbitrationUpdate {\n\n device_id,\n\n role: None,\n\n election_id: Uint128 {\n\n high: option.1,\n\n low: option.0,\n\n }\n\n .into(),\n\n status: None,\n\n },\n\n )),\n\n }\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 4, "score": 288281.0496990257 }, { "content": "pub fn get_counter_id(pipeconf: &P4Info, name: &str) -> Option<u32> {\n\n get_counter(pipeconf, name).map(|table| table.preamble.as_ref().unwrap().id)\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 5, "score": 279545.89749644074 }, { "content": "pub fn get_directcounter_id(pipeconf: &P4Info, name: &str) -> Option<u32> {\n\n get_directcounter(pipeconf, name).map(|table| table.preamble.as_ref().unwrap().id)\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 6, "score": 279545.89749644074 }, { "content": "pub fn get_table_id(pipeconf: &P4Info, name: &str) -> Option<u32> {\n\n // self.p4info.tables.iter().for_each(|t|{\n\n // println!(\"{}\",t.preamble.as_ref().unwrap().name);\n\n // });\n\n get_table(pipeconf, name).map(|table| table.preamble.as_ref().unwrap().id)\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 7, "score": 279545.89749644074 }, { "content": "pub fn get_meter_id(pipeconf: &P4Info, name: &str) -> Option<u32> {\n\n get_meter(pipeconf, name).map(|table| table.preamble.as_ref().unwrap().id)\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 8, "score": 279545.89749644074 }, { "content": "#[doc(hidden)]\n\npub fn merge_matches(ours: &mut SmallVec<[FlowMatch; 3]>, other: &SmallVec<[FlowMatch; 3]>) {\n\n let len = ours.len();\n\n for i in other.iter() {\n\n if ours[0..len].iter().find(|x| x.name == i.name).is_none() {\n\n ours.push(i.clone());\n\n }\n\n }\n\n ours.sort_by(|a, b| a.name.cmp(b.name));\n\n}\n", "file_path": "rusty-p4-core/src/util/flow.rs", "rank": 9, "score": 278599.46229824563 }, { "content": "pub fn get_actions_id(pipeconf: &P4Info, action_name: &str) -> Option<u32> {\n\n get_action(pipeconf, action_name).map(|table| table.preamble.as_ref().unwrap().id)\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 10, "score": 276067.01879275916 }, { "content": "pub fn new_packet_out_request<T>(\n\n pipeconf: &T,\n\n egress_port: u32,\n\n packet: Bytes,\n\n) -> StreamMessageRequest\n\nwhere\n\n T: Pipeconf,\n\n{\n\n let packetOut = PacketOut {\n\n payload: packet,\n\n metadata: vec![PacketMetadata {\n\n metadata_id: pipeconf.get_packetout_egress_id(),\n\n value: adjust_value(\n\n Bytes::copy_from_slice(egress_port.to_be_bytes().as_ref()),\n\n 2,\n\n ),\n\n }],\n\n };\n\n let request = StreamMessageRequest {\n\n update: Some(stream_message_request::Update::Packet(packetOut)),\n\n };\n\n request\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 11, "score": 274871.9749993507 }, { "content": "pub fn get_packin_egress_port_metaid(p4info: &P4Info) -> Option<u32> {\n\n p4info\n\n .controller_packet_metadata\n\n .iter()\n\n .find(|p| {\n\n let pre = p.preamble.as_ref().unwrap();\n\n pre.name == \"packet_in\"\n\n })\n\n .and_then(|x| {\n\n x.metadata\n\n .iter()\n\n .find(|meta| meta.name == \"ingress_port\")\n\n .map(|x| x.id)\n\n })\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 12, "score": 261719.54864118464 }, { "content": "pub fn get_packout_egress_port_metaid(p4info: &P4Info) -> Option<u32> {\n\n p4info\n\n .controller_packet_metadata\n\n .iter()\n\n .find(|p| {\n\n let pre = p.preamble.as_ref().unwrap();\n\n pre.name == \"packet_out\"\n\n })\n\n .and_then(|x| {\n\n x.metadata\n\n .iter()\n\n .find(|meta| meta.name == \"egress_port\")\n\n .map(|x| x.id)\n\n })\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 13, "score": 261719.54864118464 }, { "content": "pub fn get_directcounter_id(pipeconf: &P4Info, name: &str) -> Option<u32> {\n\n get_directcounter(pipeconf, name).map(|table| table.preamble.as_ref().unwrap().id)\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 14, "score": 258691.1334537385 }, { "content": "pub fn get_meter_id(pipeconf: &P4Info, name: &str) -> Option<u32> {\n\n get_meter(pipeconf, name).map(|table| table.preamble.as_ref().unwrap().id)\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 15, "score": 258691.1334537385 }, { "content": "pub fn get_counter_id(pipeconf: &P4Info, name: &str) -> Option<u32> {\n\n get_counter(pipeconf, name).map(|table| table.preamble.as_ref().unwrap().id)\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 16, "score": 258691.1334537385 }, { "content": "pub fn get_table_id(pipeconf: &P4Info, name: &str) -> Option<u32> {\n\n // self.p4info.tables.iter().for_each(|t|{\n\n // println!(\"{}\",t.preamble.as_ref().unwrap().name);\n\n // });\n\n get_table(pipeconf, name).map(|table| table.preamble.as_ref().unwrap().id)\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 17, "score": 258691.1334537385 }, { "content": "pub fn get_actions_id(pipeconf: &P4Info, action_name: &str) -> Option<u32> {\n\n get_action(pipeconf, action_name).map(|table| table.preamble.as_ref().unwrap().id)\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 18, "score": 255412.12131187526 }, { "content": "/// A boxed `Future + Send` trait object.\n\n///\n\n/// This type alias represents a boxed future that is [`Send`] and can be moved\n\n/// across threads.\n\ntype BoxFuture<T, E> = Pin<Box<dyn Future<Output = Result<T, E>> + Send + Sync>>;\n\n\n", "file_path": "rusty-p4-core/src/util.rs", "rank": 19, "score": 255064.28378998517 }, { "content": "pub fn new_probe(cp: &ConnectPoint) -> Bytes {\n\n let probe = serde_json::to_vec(cp).unwrap();\n\n Ethernet {\n\n src: &[0x12, 0x34, 0x56, 0x12, 0x34, 0x56],\n\n dst: MAC::broadcast().as_ref(),\n\n ether_type: 0x861,\n\n payload: probe.as_ref(),\n\n }\n\n .write_to_bytes()\n\n}\n", "file_path": "rusty-p4-app/src/app/linkprobe.rs", "rank": 20, "score": 248081.67322652828 }, { "content": "pub fn new_master_update_request(\n\n device_id: u64,\n\n option: Bmv2MasterUpdateOption,\n\n) -> StreamMessageRequest {\n\n StreamMessageRequest {\n\n update: Some(stream_message_request::Update::Arbitration(\n\n MasterArbitrationUpdate {\n\n device_id,\n\n role: None,\n\n election_id: Uint128 {\n\n high: option.election_id_high,\n\n low: option.election_id_low,\n\n }\n\n .into(),\n\n status: None,\n\n },\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 21, "score": 246966.28943899448 }, { "content": "pub fn adjust_value(mut value: Bytes, bytes_len: usize) -> Bytes {\n\n if bytes_len == value.len() {\n\n value\n\n } else if bytes_len < value.len() {\n\n value.slice(value.len() - bytes_len..value.len())\n\n } else {\n\n let mut value2 = BytesMut::from(value.as_ref());\n\n value2.extend(vec![0u8; bytes_len - value.len()]);\n\n value2.freeze()\n\n }\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 22, "score": 242938.65610704082 }, { "content": "pub fn get_packin_egress_port_metaid(p4info: &P4Info) -> Option<u32> {\n\n p4info\n\n .controller_packet_metadata\n\n .iter()\n\n .find(|p| {\n\n let pre = p.preamble.as_ref().unwrap();\n\n pre.name == \"packet_in\"\n\n })\n\n .and_then(|x| {\n\n x.metadata\n\n .iter()\n\n .find(|meta| meta.name == \"ingress_port\")\n\n .map(|x| x.id)\n\n })\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 23, "score": 240878.612719381 }, { "content": "pub fn get_packout_egress_port_metaid(p4info: &P4Info) -> Option<u32> {\n\n p4info\n\n .controller_packet_metadata\n\n .iter()\n\n .find(|p| {\n\n let pre = p.preamble.as_ref().unwrap();\n\n pre.name == \"packet_out\"\n\n })\n\n .and_then(|x| {\n\n x.metadata\n\n .iter()\n\n .find(|meta| meta.name == \"egress_port\")\n\n .map(|x| x.id)\n\n })\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 24, "score": 240878.612719381 }, { "content": "pub trait Pipeconf: Send + Sync {\n\n fn get_id(&self) -> PipeconfID;\n\n fn get_name(&self) -> &str;\n\n fn get_p4info(&self) -> &P4Info;\n\n fn get_bmv2_file_path(&self) -> &Path;\n\n fn get_any_behaviour(&self, name: &str) -> Box<dyn std::any::Any>;\n\n fn get_packetin_ingress_id(&self) -> u32;\n\n fn get_packetout_egress_id(&self) -> u32;\n\n}\n\n\n\nimpl Pipeconf for &Arc<dyn Pipeconf> {\n\n fn get_id(&self) -> PipeconfID {\n\n self.as_ref().get_id()\n\n }\n\n\n\n fn get_name(&self) -> &str {\n\n self.as_ref().get_name()\n\n }\n\n\n\n fn get_p4info(&self) -> &P4Info {\n", "file_path": "rusty-p4-core/src/p4rt/pipeconf.rs", "rank": 25, "score": 237071.4935559021 }, { "content": "#[proc_macro]\n\npub fn flow_match(input: TokenStream) -> TokenStream {\n\n let flow_match = parse_macro_input!(input as _FlowMatch);\n\n\n\n TokenStream::from(flow_match_to_quotes(flow_match))\n\n}\n\n\n", "file_path": "macro-impl/src/lib.rs", "rank": 26, "score": 223520.46196867834 }, { "content": "pub fn adjust_value(mut value: Bytes, bytes_len: usize) -> Bytes {\n\n if bytes_len == value.len() {\n\n value\n\n } else if bytes_len < value.len() {\n\n value.slice(value.len() - bytes_len..value.len())\n\n } else {\n\n let mut value2 = BytesMut::from(value.as_ref());\n\n value2.extend(vec![0u8; bytes_len - value.len()]);\n\n value2.freeze()\n\n }\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 27, "score": 219351.46064577403 }, { "content": "pub fn get_table<'a>(pipeconf: &'a P4Info, name: &str) -> Option<&'a Table> {\n\n pipeconf\n\n .tables\n\n .iter()\n\n .filter(|t| t.preamble.is_some())\n\n .find(|t| {\n\n let pre = t.preamble.as_ref().unwrap();\n\n &pre.name == name || &pre.alias == name\n\n })\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 28, "score": 218793.96992738545 }, { "content": "pub fn get_action<'a>(pipeconf: &'a P4Info, name: &str) -> Option<&'a Action> {\n\n pipeconf\n\n .actions\n\n .iter()\n\n .filter(|t| t.preamble.is_some())\n\n .find(|t| {\n\n let pre = t.preamble.as_ref().unwrap();\n\n &pre.name == name || &pre.alias == name\n\n })\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 29, "score": 218793.9699273855 }, { "content": "pub fn get_counter<'a>(pipeconf: &'a P4Info, name: &str) -> Option<&'a Counter> {\n\n pipeconf\n\n .counters\n\n .iter()\n\n .filter(|t| t.preamble.is_some())\n\n .find(|t| {\n\n let pre = t.preamble.as_ref().unwrap();\n\n &pre.name == name || &pre.alias == name\n\n })\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 30, "score": 218793.96992738545 }, { "content": "pub fn get_meter<'a>(pipeconf: &'a P4Info, name: &str) -> Option<&'a Meter> {\n\n pipeconf\n\n .meters\n\n .iter()\n\n .filter(|t| t.preamble.is_some())\n\n .find(|t| {\n\n let pre = t.preamble.as_ref().unwrap();\n\n &pre.name == name || &pre.alias == name\n\n })\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 31, "score": 218793.96992738545 }, { "content": "#[proc_macro]\n\npub fn flow(input: TokenStream) -> TokenStream {\n\n let flow = parse_macro_input!(input as _Flow);\n\n let flow_table_name = flow\n\n .pipe\n\n .as_ref()\n\n .map(|pipe| format!(\"{}.{}\", pipe, &flow.table))\n\n .unwrap_or(flow.table.clone());\n\n let action_name = if flow.action_name == \"NoAction\" {\n\n flow.action_name\n\n } else {\n\n flow.pipe\n\n .as_ref()\n\n .map(|pipe| format!(\"{}.{}\", pipe, flow.action_name))\n\n .unwrap_or(flow.action_name)\n\n };\n\n let flow_matches = flow_match_to_quotes(flow.table_match);\n\n let action_params = action_params_to_quote(flow.action_parameters);\n\n let priority = flow.priority.map(|expr| quote!(#expr)).unwrap_or(quote!(1));\n\n TokenStream::from(quote! {\n\n rusty_p4::util::flow::Flow {\n", "file_path": "macro-impl/src/lib.rs", "rank": 32, "score": 218539.0165663974 }, { "content": "pub fn get_directcounter<'a>(pipeconf: &'a P4Info, name: &str) -> Option<&'a DirectCounter> {\n\n pipeconf\n\n .direct_counters\n\n .iter()\n\n .filter(|t| t.preamble.is_some())\n\n .find(|t| {\n\n let pre = t.preamble.as_ref().unwrap();\n\n &pre.name == name || &pre.alias == name\n\n })\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 33, "score": 215797.81322453968 }, { "content": "#[async_trait]\n\npub trait Handler<E>: 'static + Send + Sync {\n\n async fn handle(&self, event: E);\n\n}\n\n\n\npub struct Publisher<E> {\n\n handlers: RwLock<Vec<Box<dyn Handler<E>>>>,\n\n}\n\n\n\nimpl<E> Default for Publisher<E> {\n\n fn default() -> Self {\n\n Publisher {\n\n handlers: RwLock::new(Vec::new()),\n\n }\n\n }\n\n}\n\n\n\nimpl<E> Publisher<E>\n\nwhere\n\n E: Clone + 'static,\n\n{\n", "file_path": "rusty-p4-core/src/util/publisher.rs", "rank": 34, "score": 214509.83514607535 }, { "content": "pub fn hash<T>(obj: T) -> u64\n\nwhere\n\n T: Hash,\n\n{\n\n let mut hasher = DefaultHasher::new();\n\n obj.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n\npub struct FinishSignal {\n\n inner: tokio::sync::oneshot::Receiver<()>,\n\n}\n\n\n\nimpl FinishSignal {\n\n pub fn new(inner: tokio::sync::oneshot::Receiver<()>) -> Self {\n\n Self { inner }\n\n }\n\n}\n\n\n\nimpl std::future::Future for FinishSignal {\n", "file_path": "rusty-p4-core/src/util.rs", "rank": 35, "score": 212838.53720420488 }, { "content": "pub trait Pipeconf: Send+Sync {\n\n fn get_id(&self) -> PipeconfID;\n\n fn get_name(&self) -> &str;\n\n fn get_p4info(&self) -> &P4Info;\n\n fn get_bmv2_file_path(&self) -> &Path;\n\n fn get_any_behaviour(&self, name: &str) -> Box<dyn std::any::Any>;\n\n fn get_packetin_ingress_id(&self) -> u32;\n\n fn get_packetout_egress_id(&self) -> u32;\n\n}\n\n\n\nimpl Pipeconf for &Arc<dyn Pipeconf> {\n\n fn get_id(&self) -> PipeconfID {\n\n self.as_ref().get_id()\n\n }\n\n\n\n fn get_name(&self) -> &str {\n\n self.as_ref().get_name()\n\n }\n\n\n\n fn get_p4info(&self) -> &P4Info {\n", "file_path": "src/p4rt/pipeconf.rs", "rank": 36, "score": 209271.90955078293 }, { "content": "pub fn adjust_value_with(value: Bytes, bytes_len: usize, e: u8) -> Bytes {\n\n if bytes_len == value.len() {\n\n value\n\n } else if bytes_len < value.len() {\n\n value.slice(value.len() - bytes_len..value.len())\n\n } else {\n\n let mut value2 = BytesMut::from(value.as_ref());\n\n value2.extend(vec![e; bytes_len - value.len()]);\n\n value2.freeze()\n\n }\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 37, "score": 207079.34546955512 }, { "content": "fn map_err(_err: rusty_p4_core::error::MyError) -> warp::Rejection {\n\n warp::reject()\n\n}\n\n\n\nasync fn send(\n\n target: String,\n\n action: String,\n\n path: Tail,\n\n params: HashMap<String, String>,\n\n service_bus: ServiceBus,\n\n) -> Result<impl warp::Reply, Rejection> {\n\n let response = service_bus\n\n .send::<WebServer>(\n\n &target,\n\n DefaultRequest {\n\n path: path.as_str().split(\"\\\\\").map(|x| x.to_owned()).collect(),\n\n action,\n\n params,\n\n },\n\n Default::default(),\n", "file_path": "rusty-p4-northbound/src/web.rs", "rank": 38, "score": 201067.1410780184 }, { "content": "#[derive(Debug)]\n\nstruct _FlowMatch {\n\n pub items: Punctuated<_FlowMatchItem, Token![,]>,\n\n pub default: Option<Expr>,\n\n}\n\n\n\nimpl Parse for _FlowMatchItem {\n\n fn parse(input: &ParseBuffer) -> Result<Self> {\n\n let key: LitStr = input.parse()?;\n\n input.parse::<Token![=]>()?;\n\n input.parse::<Token![>]>()?;\n\n let value = input.parse()?;\n\n Ok(Self {\n\n key: key.value(),\n\n value,\n\n })\n\n }\n\n}\n\n\n\nimpl Parse for _FlowActionItem {\n\n fn parse(input: &ParseBuffer) -> Result<Self> {\n", "file_path": "macro-impl/src/lib.rs", "rank": 39, "score": 197578.25086419572 }, { "content": "pub fn new_stratum_get_interfaces_name() -> rusty_p4_proto::proto::gnmi::GetRequest {\n\n rusty_p4_proto::proto::gnmi::GetRequest {\n\n prefix: None,\n\n path: vec![crate::gnmi::new_gnmi_path(\n\n \"/interfaces/interface[name=*]/state/name\",\n\n )],\n\n r#type: 1,\n\n encoding: 2,\n\n use_models: vec![],\n\n extension: vec![],\n\n }\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 40, "score": 197095.6255083157 }, { "content": "pub fn get_meter<'a>(pipeconf: &'a P4Info, name: &str) -> Option<&'a Meter> {\n\n pipeconf\n\n .meters\n\n .iter()\n\n .filter(|t| t.preamble.is_some())\n\n .find(|t| {\n\n let pre = t.preamble.as_ref().unwrap();\n\n &pre.name == name || &pre.alias == name\n\n })\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 41, "score": 195751.2143367965 }, { "content": "pub fn get_counter<'a>(pipeconf: &'a P4Info, name: &str) -> Option<&'a Counter> {\n\n pipeconf\n\n .counters\n\n .iter()\n\n .filter(|t| t.preamble.is_some())\n\n .find(|t| {\n\n let pre = t.preamble.as_ref().unwrap();\n\n &pre.name == name || &pre.alias == name\n\n })\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 42, "score": 195751.2143367965 }, { "content": "pub fn get_action<'a>(pipeconf: &'a P4Info, name: &str) -> Option<&'a Action> {\n\n pipeconf\n\n .actions\n\n .iter()\n\n .filter(|t| t.preamble.is_some())\n\n .find(|t| {\n\n let pre = t.preamble.as_ref().unwrap();\n\n &pre.name == name || &pre.alias == name\n\n })\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 43, "score": 195751.2143367965 }, { "content": "pub fn get_table<'a>(pipeconf: &'a P4Info, name: &str) -> Option<&'a Table> {\n\n pipeconf\n\n .tables\n\n .iter()\n\n .filter(|t| t.preamble.is_some())\n\n .find(|t| {\n\n let pre = t.preamble.as_ref().unwrap();\n\n &pre.name == name || &pre.alias == name\n\n })\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 44, "score": 195751.2143367965 }, { "content": "pub fn get_match_field_pb(\n\n pipeconf: &P4Info,\n\n table_name: &str,\n\n match_field_name: &str,\n\n value: &InnerValue,\n\n) -> Option<FieldMatch> {\n\n let p4info_match = get_match_field_by_name(pipeconf, table_name, match_field_name).unwrap();\n\n let bitwidth = p4info_match.bitwidth;\n\n let byte_len = (bitwidth as f32 / 8.0).ceil() as usize;\n\n let x = p4info_match.r#match.as_ref().map(|x| {\n\n match x {\n\n match_field::Match::MatchType(x) => {\n\n match (match_field::MatchType::from_i32(*x), value) {\n\n (Some(match_field::MatchType::Exact), InnerValue::EXACT(v)) => {\n\n // assert_eq!(byte_len, v.len());\n\n let v = adjust_value(v.clone(), byte_len);\n\n field_match::FieldMatchType::Exact(\n\n crate::proto::p4runtime::field_match::Exact { value: v },\n\n )\n\n }\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 45, "score": 195633.54664079804 }, { "content": "pub fn new_write_table_entry(\n\n device_id: u64,\n\n table_entry: TableEntry,\n\n update: UpdateType,\n\n) -> WriteRequest {\n\n let update_type = if table_entry.is_default_action {\n\n crate::proto::p4runtime::update::Type::Modify\n\n } else {\n\n update.into()\n\n };\n\n let mut request = crate::proto::p4runtime::WriteRequest {\n\n device_id,\n\n role_id: 0,\n\n election_id: Some(Uint128 { high: 0, low: 1 }),\n\n updates: vec![Update {\n\n r#type: update_type as i32,\n\n entity: Some(Entity {\n\n entity: Some(crate::proto::p4runtime::entity::Entity::TableEntry(\n\n table_entry.clone(),\n\n )),\n\n }),\n\n }],\n\n atomicity: 0,\n\n };\n\n request\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 46, "score": 195592.5037511093 }, { "content": "#[derive(Debug)]\n\nstruct _FlowMatchItem {\n\n pub key: String,\n\n pub value: _FlowMatchValue,\n\n}\n\n\n", "file_path": "macro-impl/src/lib.rs", "rank": 47, "score": 193871.2556836644 }, { "content": "pub fn get_directcounter<'a>(pipeconf: &'a P4Info, name: &str) -> Option<&'a DirectCounter> {\n\n pipeconf\n\n .direct_counters\n\n .iter()\n\n .filter(|t| t.preamble.is_some())\n\n .find(|t| {\n\n let pre = t.preamble.as_ref().unwrap();\n\n &pre.name == name || &pre.alias == name\n\n })\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 48, "score": 193029.95583986305 }, { "content": "pub fn get_match_field_by_name<'a>(\n\n pipeconf: &'a P4Info,\n\n table_name: &str,\n\n name: &str,\n\n) -> Option<&'a MatchField> {\n\n for t in pipeconf.tables.iter().filter(|p| p.preamble.is_some()) {\n\n let pre = t.preamble.as_ref().unwrap();\n\n if &pre.name == table_name {\n\n for mf in t.match_fields.iter() {\n\n if &mf.name == name {\n\n return Some(mf);\n\n }\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 49, "score": 192863.6890140432 }, { "content": "pub fn get_match_field_by_id<'a>(\n\n pipeconf: &'a P4Info,\n\n table_name: &str,\n\n id: u32,\n\n) -> Option<&'a MatchField> {\n\n for t in pipeconf.tables.iter().filter(|p| p.preamble.is_some()) {\n\n let pre = t.preamble.as_ref().unwrap();\n\n if &pre.name == table_name {\n\n for mf in t.match_fields.iter() {\n\n if mf.id == id {\n\n return Some(mf);\n\n }\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 50, "score": 192863.6890140432 }, { "content": "fn flow_match_to_quotes(flow_match: _FlowMatch) -> proc_macro2::TokenStream {\n\n if flow_match.items.is_empty() && flow_match.default.is_none() {\n\n if let Some(default) = flow_match.default {\n\n return quote! {\n\n #default.clone()\n\n };\n\n } else {\n\n return quote! {\n\n std::sync::Arc::new(<rusty_p4::util::SmallVec<[rusty_p4::util::flow::FlowMatch;3]>>::new())\n\n };\n\n }\n\n }\n\n\n\n let mut quotes = Vec::with_capacity(flow_match.items.len());\n\n for m in flow_match.items {\n\n let name = m.key;\n\n match m.value {\n\n _FlowMatchValue::Exact(expr) => {\n\n quotes.push(quote! {\n\n rusty_p4::util::flow::FlowMatch {\n", "file_path": "macro-impl/src/lib.rs", "rank": 51, "score": 190108.6571302701 }, { "content": "pub fn new_stratum_get_interface_mac(name: &str) -> rusty_p4_proto::proto::gnmi::GetRequest {\n\n rusty_p4_proto::proto::gnmi::GetRequest {\n\n prefix: None,\n\n path: vec![crate::gnmi::new_gnmi_path(&format!(\n\n \"/interfaces/interface[name={}]/ethernet/state/mac-address\",\n\n name\n\n ))],\n\n r#type: 1,\n\n encoding: 2,\n\n use_models: vec![],\n\n extension: vec![],\n\n }\n\n}\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 52, "score": 188209.64543061843 }, { "content": "#[async_trait]\n\npub trait Handler<E>:'static + Send + Sync {\n\n async fn handle(&self, event: E);\n\n}\n\n\n\npub struct Publisher<E> {\n\n handlers: RwLock<Vec<Box<dyn Handler<E>>>>\n\n}\n\n\n\nimpl<E> Default for Publisher<E> {\n\n fn default() -> Self {\n\n Publisher {\n\n handlers: RwLock::new(Vec::new()),\n\n }\n\n }\n\n}\n\n\n\nimpl<E> Publisher<E> where E:Clone + 'static {\n\n pub async fn emit(&self, event: E) {\n\n for x in self.handlers.read().iter() {\n\n x.handle(event.clone()).await;\n\n }\n\n }\n\n\n\n pub fn add_handler<H>(&self, handler: H) where H: Handler<E> {\n\n self.handlers.write().push(Box::new(handler));\n\n }\n\n}", "file_path": "src/util/publisher.rs", "rank": 53, "score": 188057.71313633362 }, { "content": "pub fn table_entry_to_entity(table_entry: TableEntry) -> Entity {\n\n Entity {\n\n entity: Some(crate::proto::p4runtime::entity::Entity::TableEntry(\n\n table_entry,\n\n )),\n\n }\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 54, "score": 187727.38101661138 }, { "content": "pub fn hash<T>(obj: T) -> u64\n\nwhere\n\n T: Hash,\n\n{\n\n let mut hasher = DefaultHasher::new();\n\n obj.hash(&mut hasher);\n\n hasher.finish()\n\n}\n\n\n\npub struct FinishSignal {\n\n inner: tokio::sync::oneshot::Receiver<()>\n\n}\n\n\n\nimpl FinishSignal {\n\n pub fn new(inner: tokio::sync::oneshot::Receiver<()>) -> Self {\n\n Self {\n\n inner\n\n }\n\n }\n\n}\n", "file_path": "src/util.rs", "rank": 55, "score": 185949.2394551193 }, { "content": "#[async_trait]\n\npub trait Connection:Send+Sync+'static {\n\n async fn master_updated(&mut self,master_update:MasterArbitrationUpdate) -> Result<(), ContextError>;\n\n\n\n async fn set_pipeconf(&mut self, pipeconf:Pipeconf) -> Result<(), ContextError>;\n\n\n\n fn clone_box(&self)->ConnectionBox;\n\n\n\n fn as_any(&self) -> &dyn Any;\n\n\n\n fn as_mut_any(&mut self) -> &mut dyn Any;\n\n}\n\n\n\npub struct ConnectionBox {\n\n // use arc?\n\n pub(crate) inner:Box<dyn Connection>,\n\n pub p4runtime_client: P4RuntimeClient,\n\n pub sink: tokio::sync::mpsc::Sender<StreamMessageRequest>,\n\n pub device_id: u64,\n\n pub pipeconf: Pipeconf,\n\n pub master_arbitration:Option<MasterArbitrationUpdate>\n", "file_path": "src/core/connection.rs", "rank": 56, "score": 185791.72632457438 }, { "content": "pub fn on_arp_received<E, C>(\n\n data: Ethernet<&[u8]>,\n\n cp: ConnectPoint,\n\n state: &CommonState,\n\n ctx: &mut C,\n\n) where\n\n E: Event,\n\n C: Context<E>\n\n{\n\n let device = cp.device;\n\n let data = data.payload;\n\n let arp = Arp::from_bytes(data);\n\n if arp.is_none() {\n\n error!(target:\"proxyarp\",\"invalid arp packet\");\n\n return;\n\n }\n\n let state = state.inner.lock();\n\n let arp = arp.unwrap();\n\n let arp_sender_mac = MAC::from_slice(arp.sender_mac);\n\n match arp.opcode {\n", "file_path": "rusty-p4-app/src/app/proxyarp.rs", "rank": 57, "score": 185481.05536207103 }, { "content": "pub fn adjust_value_with(value: Bytes, bytes_len: usize, e: u8) -> Bytes {\n\n if bytes_len == value.len() {\n\n value\n\n } else if bytes_len < value.len() {\n\n value.slice(value.len() - bytes_len..value.len())\n\n } else {\n\n let mut value2 = BytesMut::from(value.as_ref());\n\n value2.extend(vec![e; bytes_len - value.len()]);\n\n value2.freeze()\n\n }\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 58, "score": 181500.61352543323 }, { "content": "pub fn install<S, T>(store: &mut S, option: T::Option) -> Option<Arc<T>>\n\nwhere\n\n T: App + Clone,\n\n S: AppStore,\n\n{\n\n let dependencies: T::Dependency = T::Dependency::get(store)?;\n\n let app = T::init(dependencies, store, option);\n\n Some(store.store(app))\n\n}\n\n\n\n/// The default implementation of `AppStore` which should just work.\n\n#[derive(Default)]\n\npub struct DefaultAppStore {\n\n pub map: HashMap<TypeId, Arc<dyn Any + Send + Sync>>,\n\n // pub handler_map: HashMap<TypeId, Vec<Arc<dyn Any+Send+Sync>>>,\n\n // pub join_handle: Vec<BoxFuture<'static, ()>>\n\n}\n\n\n\nimpl AppStore for DefaultAppStore {\n\n fn store<T>(&mut self, object: T) -> Arc<T>\n", "file_path": "rusty-p4-core/src/app/store.rs", "rank": 59, "score": 175515.6365727977 }, { "content": "pub fn new_stratum_get_interfaces_name() -> rusty_p4_proto::proto::gnmi::GetRequest {\n\n rusty_p4_proto::proto::gnmi::GetRequest {\n\n prefix: None,\n\n path: vec![crate::gnmi::new_gnmi_path(\n\n \"/interfaces/interface[name=*]/state/name\",\n\n )],\n\n r#type: 1,\n\n encoding: 2,\n\n use_models: vec![],\n\n extension: vec![],\n\n }\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 60, "score": 174874.2857323261 }, { "content": "pub fn get_match_field_pb(\n\n pipeconf: &P4Info,\n\n table_name: &str,\n\n match_field_name: &str,\n\n value: &InnerValue,\n\n) -> Option<FieldMatch> {\n\n let p4info_match = get_match_field_by_name(pipeconf, table_name, match_field_name).unwrap();\n\n let bitwidth = p4info_match.bitwidth;\n\n let byte_len = (bitwidth as f32 / 8.0).ceil() as usize;\n\n let x = p4info_match.r#match.as_ref().map(|x| {\n\n match x {\n\n match_field::Match::MatchType(x) => {\n\n match (match_field::MatchType::from_i32(*x), value) {\n\n (Some(match_field::MatchType::Exact), InnerValue::EXACT(v)) => {\n\n // assert_eq!(byte_len, v.len());\n\n let v = adjust_value(v.clone(), byte_len);\n\n field_match::FieldMatchType::Exact(\n\n crate::proto::p4runtime::field_match::Exact { value: v },\n\n )\n\n }\n", "file_path": "src/p4rt/pure.rs", "rank": 61, "score": 168738.77751384221 }, { "content": "pub fn new_write_table_entry(\n\n device_id: u64,\n\n table_entry: TableEntry,\n\n update: UpdateType,\n\n) -> WriteRequest {\n\n let update_type = if table_entry.is_default_action {\n\n crate::proto::p4runtime::update::Type::Modify\n\n } else {\n\n update.into()\n\n };\n\n let mut request = crate::proto::p4runtime::WriteRequest {\n\n device_id,\n\n role_id: 0,\n\n election_id: Some(Uint128 { high: 0, low: 1 }),\n\n updates: vec![Update {\n\n r#type: update_type as i32,\n\n entity: Some(Entity {\n\n entity: Some(crate::proto::p4runtime::entity::Entity::TableEntry(\n\n table_entry.clone(),\n\n )),\n\n }),\n\n }],\n\n atomicity: 0,\n\n };\n\n request\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 62, "score": 168695.0060362194 }, { "content": "pub fn new_gnmi_path(path: &str) -> rusty_p4_proto::proto::gnmi::Path {\n\n let elems: Vec<&str> = path.split('/').collect();\n\n let mut value = vec![];\n\n for elem in elems {\n\n if elem.contains('[') {\n\n let mut t: Vec<&str> = elem.split('[').collect();\n\n let mut map = HashMap::new();\n\n let name = t.remove(0).to_string();\n\n for kv in t {\n\n let mut kv: Vec<&str> = kv.split('=').collect();\n\n let key = kv.remove(0);\n\n let value = kv.remove(0);\n\n let (value, _) = value.split_at(value.len() - 1);\n\n map.insert(key.to_string(), value.to_string());\n\n }\n\n value.push(rusty_p4_proto::proto::gnmi::PathElem { name, key: map });\n\n } else {\n\n if !elem.is_empty() {\n\n value.push(rusty_p4_proto::proto::gnmi::PathElem {\n\n name: elem.to_string(),\n", "file_path": "rusty-p4-core/src/gnmi.rs", "rank": 63, "score": 166609.5936101932 }, { "content": "pub fn new_stratum_get_interface_mac(name: &str) -> rusty_p4_proto::proto::gnmi::GetRequest {\n\n rusty_p4_proto::proto::gnmi::GetRequest {\n\n prefix: None,\n\n path: vec![crate::gnmi::new_gnmi_path(&format!(\n\n \"/interfaces/interface[name={}]/ethernet/state/mac-address\",\n\n name\n\n ))],\n\n r#type: 1,\n\n encoding: 2,\n\n use_models: vec![],\n\n extension: vec![],\n\n }\n\n}\n", "file_path": "src/p4rt/pure.rs", "rank": 64, "score": 166506.75849907723 }, { "content": "pub fn get_match_field_by_id<'a>(\n\n pipeconf: &'a P4Info,\n\n table_name: &str,\n\n id: u32,\n\n) -> Option<&'a MatchField> {\n\n for t in pipeconf.tables.iter().filter(|p| p.preamble.is_some()) {\n\n let pre = t.preamble.as_ref().unwrap();\n\n if &pre.name == table_name {\n\n for mf in t.match_fields.iter() {\n\n if mf.id == id {\n\n return Some(mf);\n\n }\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 65, "score": 165968.91988708737 }, { "content": "pub fn get_match_field_by_name<'a>(\n\n pipeconf: &'a P4Info,\n\n table_name: &str,\n\n name: &str,\n\n) -> Option<&'a MatchField> {\n\n for t in pipeconf.tables.iter().filter(|p| p.preamble.is_some()) {\n\n let pre = t.preamble.as_ref().unwrap();\n\n if &pre.name == table_name {\n\n for mf in t.match_fields.iter() {\n\n if &mf.name == name {\n\n return Some(mf);\n\n }\n\n }\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 66, "score": 165968.91988708737 }, { "content": "pub fn table_entry_to_entity(table_entry: TableEntry) -> Entity {\n\n Entity {\n\n entity: Some(crate::proto::p4runtime::entity::Entity::TableEntry(\n\n table_entry,\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 67, "score": 162036.31054227427 }, { "content": "pub fn build_table_entry(\n\n p4info: &P4Info,\n\n table_name: &str,\n\n match_fields: &[FlowMatch],\n\n default_action: bool,\n\n action_name: &str,\n\n action_params: &[FlowActionParam],\n\n priority: i32,\n\n metadata: u64,\n\n) -> TableEntry {\n\n let action = if !action_name.is_empty() {\n\n let action_id = get_actions_id(p4info, action_name);\n\n let action_id = if action_id.is_none() {\n\n panic!(\"action with name '{}' not found.\", action_name);\n\n } else {\n\n action_id.unwrap()\n\n };\n\n let mut action = crate::proto::p4runtime::Action {\n\n action_id: get_actions_id(p4info, action_name).unwrap(),\n\n params: vec![],\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 68, "score": 161198.10450513772 }, { "content": "struct _Flow {\n\n pipe: Option<String>,\n\n table: String,\n\n table_match: _FlowMatch,\n\n action_name: String,\n\n action_parameters: Option<Punctuated<_FlowActionItem, Token![,]>>,\n\n priority: Option<Expr>,\n\n}\n\n\n\nimpl Parse for _Flow {\n\n fn parse(input: &ParseBuffer) -> Result<Self> {\n\n let mut pipe = None;\n\n let mut table = None;\n\n let mut table_matches = None;\n\n let mut action = None;\n\n let mut action_params = None;\n\n let mut priority = None;\n\n while !input.is_empty() {\n\n let field_name = input.parse::<Ident>()?.to_string();\n\n match field_name.as_ref() {\n", "file_path": "macro-impl/src/lib.rs", "rank": 69, "score": 159548.0011857046 }, { "content": "pub fn get_action_param_pb(\n\n pipeconf: &P4Info,\n\n action_name: &str,\n\n param_name: &str,\n\n mut value: InnerParamValue,\n\n) -> crate::proto::p4runtime::action::Param {\n\n let p4info_param = get_action_param_by_name(pipeconf, action_name, param_name).unwrap();\n\n let bytes_len = (p4info_param.bitwidth as f32 / 8.0).ceil() as usize;\n\n // println!(\"adjust value: action:{}, param:{}, value:{:?}, bitwidth:{}\",action_name,param_name,value,p4info_param.bitwidth);\n\n let value = adjust_value(value, bytes_len);\n\n let p4runtime_param = crate::proto::p4runtime::action::Param {\n\n param_id: p4info_param.id,\n\n value,\n\n };\n\n return p4runtime_param;\n\n}\n\n\n\npub async fn new_set_forwarding_pipeline_config_request(\n\n p4info: &P4Info,\n\n bmv2_json_file_path: &Path,\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 70, "score": 158949.89245797892 }, { "content": "pub fn get_action_param_by_name<'a>(\n\n pipeconf: &'a P4Info,\n\n action_name: &str,\n\n param: &str,\n\n) -> Option<&'a action::Param> {\n\n get_action(pipeconf, action_name).map_or(None, |action| {\n\n for p in action.params.iter() {\n\n if &p.name == param {\n\n return Some(p);\n\n }\n\n }\n\n None\n\n })\n\n}\n\n\n", "file_path": "rusty-p4-core/src/p4rt/pure.rs", "rank": 71, "score": 156180.03483122407 }, { "content": "#[async_trait]\n\npub trait App: Sync + Send + 'static + Sized {\n\n /// To support special function (like optional dependency), some generic container type (like `Arc` or `Option`) can also be defined as app.\n\n /// If you create a container type (`Option<T>`), use this field to specify the target type (`T`).\n\n /// For regular app, this field should be `Self`.\n\n type Container: App + Clone;\n\n /// Use this field to specify your dependency.\n\n /// It should be a variadic tuple, but variadic tuple is not supported currently.\n\n /// So now, rusty-p4 use crate `tuple_list` to define dependencies.\n\n /// Use `()` for no dependency.\n\n type Dependency: Dependencies;\n\n type Option: options::AppOption;\n\n /// Name your app please.\n\n const Name: &'static str;\n\n\n\n /// This is where app get their dependencies, option then initialize, returns a new instance of app.\n\n /// Container type should not be `install`ed directly, so this method should not be called for container type.\n\n fn init<S>(dependencies: Self::Dependency, store: &mut S, option: Self::Option) -> Self\n\n where\n\n S: store::AppStore;\n\n\n", "file_path": "rusty-p4-core/src/app.rs", "rank": 72, "score": 154158.59562168366 }, { "content": "#[derive(Debug)]\n\nstruct _FlowActionItem {\n\n pub key: String,\n\n pub value: Expr,\n\n}\n\n\n", "file_path": "macro-impl/src/lib.rs", "rank": 73, "score": 153862.202539573 }, { "content": "type P4RuntimeClient =\n\ncrate::proto::p4runtime::p4_runtime_client::P4RuntimeClient<tonic::transport::channel::Channel>;\n\n\n", "file_path": "src/core/connection.rs", "rank": 74, "score": 153309.78370711947 }, { "content": "pub fn install<S, T>(store: &mut S, option: T::Option) -> Option<Arc<T>>\n\nwhere T:App + Clone, S: AppStore\n\n{\n\n let dependencies: T::Dependency = T::Dependency::get(store)?;\n\n let app = T::init(dependencies, store, option);\n\n Some(store.store(app))\n\n}\n\n\n\n/// The default implementation of `AppStore` which should just work.\n\n#[derive(Default)]\n\npub struct DefaultAppStore {\n\n pub map: HashMap<TypeId, Arc<dyn Any + Send + Sync>>,\n\n // pub handler_map: HashMap<TypeId, Vec<Arc<dyn Any+Send+Sync>>>,\n\n // pub join_handle: Vec<BoxFuture<'static, ()>>\n\n}\n\n\n\nimpl AppStore for DefaultAppStore {\n\n fn store<T>(&mut self, object: T) -> Arc<T> where T: App + Clone + 'static {\n\n let b = Arc::new(object);\n\n let ret = b.clone();\n", "file_path": "src/app/store.rs", "rank": 75, "score": 151525.1852041488 }, { "content": "pub trait ToEntity {\n\n fn to_proto_entity(&self, pipeconf: &DefaultPipeconf) -> Option<ProtoEntity>;\n\n}\n", "file_path": "rusty-p4-core/src/entity.rs", "rank": 76, "score": 151454.33693650187 }, { "content": "#[derive(Debug)]\n\nstruct Inner {\n\n pub p4info: P4Info,\n\n pub bmv2_json_file_path: PathBuf,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Copy, Clone, Eq, PartialEq, Hash, Debug)]\n\npub struct PipeconfID(pub u64);\n\n\n\nimpl Pipeconf for DefaultPipeconf {\n\n fn get_id(&self) -> PipeconfID {\n\n self.id\n\n }\n\n\n\n fn get_name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n fn get_p4info(&self) -> &P4Info {\n\n &self.inner.p4info\n\n }\n", "file_path": "rusty-p4-core/src/p4rt/pipeconf.rs", "rank": 77, "score": 150998.582166771 }, { "content": "type P4RuntimeClient =\n\n crate::proto::p4runtime::p4_runtime_client::P4RuntimeClient<tonic::transport::channel::Channel>;\n\n\n\n#[derive(Clone)]\n\npub struct Bmv2Connection {\n\n pub p4runtime_client: P4RuntimeClient,\n\n pub sink: tokio::sync::mpsc::Sender<StreamMessageRequest>,\n\n pub device_id: u64,\n\n pub pipeconf: Pipeconf,\n\n pub master_arbitration:Option<MasterArbitrationUpdate>\n\n}\n\n\n\nimpl Bmv2Connection {\n\n pub async fn master_up<E>(\n\n &mut self,\n\n master_update:MasterArbitrationUpdate,\n\n context:&mut DefaultContext<E>\n\n ) -> Result<(), ContextError>\n\n where E:Event+Debug\n\n {\n", "file_path": "src/core/connection/bmv2.rs", "rank": 78, "score": 150595.21248384748 }, { "content": "type P4RuntimeClient =\n\ncrate::proto::p4runtime::p4_runtime_client::P4RuntimeClient<tonic::transport::channel::Channel>;\n", "file_path": "src/core/connection/stratum_bmv2.rs", "rank": 79, "score": 148000.95355408697 }, { "content": "pub trait Packet<'a>\n\nwhere\n\n Self: Sized,\n\n{\n\n type Payload: Packet<'a>;\n\n\n\n fn self_bytes_hint(&self) -> usize;\n\n\n\n fn from_bytes(b: &'a [u8]) -> Option<Self>;\n\n\n\n fn write_self_to_buf<T: BufMut>(&self, buf: &mut T);\n\n\n\n fn write_all_to_buf<T: BufMut>(&self, buf: &mut T) {\n\n self.write_self_to_buf(buf);\n\n if let Some(payload) = self.get_payload() {\n\n payload.write_all_to_buf(buf);\n\n }\n\n }\n\n\n\n fn get_payload(&self) -> Option<&Self::Payload>;\n", "file_path": "src/util/packet.rs", "rank": 80, "score": 147170.34948617124 }, { "content": "pub fn install<S, T>(store: &mut S, option: T::Option) -> Option<Arc<T>>\n\nwhere T:App + Clone, S: AppStore\n\n{\n\n let dependencies: T::Dependency = T::Dependency::get(store)?;\n\n let app = T::init(dependencies, store, option);\n\n Some(store.store(app))\n\n}\n\n\n\n/// The default implementation of `AppStore` which should just work.\n\n#[derive(Default)]\n\npub struct DefaultAppStore {\n\n pub map: HashMap<TypeId, Arc<dyn Any + Send + Sync>>,\n\n // pub handler_map: HashMap<TypeId, Vec<Arc<dyn Any+Send+Sync>>>,\n\n // pub join_handle: Vec<BoxFuture<'static, ()>>\n\n}\n\n\n\nimpl AppStore for DefaultAppStore {\n\n fn store<T>(&mut self, object: T) -> Arc<T> where T: App + Clone + 'static {\n\n let b = Arc::new(object);\n\n let ret = b.clone();\n", "file_path": "rusty-p4-app/src/app/store.rs", "rank": 81, "score": 146268.06008966183 }, { "content": "pub trait ParseRequest: Sized {\n\n fn parse(req: DefaultRequest) -> crate::error::Result<Self>;\n\n}\n\n\n\nimpl ParseRequest for DefaultRequest {\n\n fn parse(req: DefaultRequest) -> crate::error::Result<Self> {\n\n Ok(req)\n\n }\n\n}\n\n\n", "file_path": "rusty-p4-core/src/service/request.rs", "rank": 82, "score": 143371.4873903882 }, { "content": "pub fn EXACT<T: Encode>(v: T) -> InnerValue {\n\n InnerValue::EXACT(v.encode())\n\n}\n\n\n", "file_path": "rusty-p4-core/src/util/value.rs", "rank": 83, "score": 143267.78022871018 }, { "content": "pub fn new_gnmi_path(path:&str) -> rusty_p4_proto::proto::gnmi::Path {\n\n let elems:Vec<&str> = path.split('/').collect();\n\n let mut value = vec![];\n\n for elem in elems {\n\n if elem.contains('[') {\n\n let mut t:Vec<&str> = elem.split('[').collect();\n\n let mut map = HashMap::new();\n\n let name = t.remove(0).to_string();\n\n for kv in t {\n\n let mut kv:Vec<&str> = kv.split('=').collect();\n\n let key = kv.remove(0);\n\n let value = kv.remove(0);\n\n let (value,_) = value.split_at(value.len()-1);\n\n map.insert(key.to_string(),value.to_string());\n\n }\n\n value.push(rusty_p4_proto::proto::gnmi::PathElem {\n\n name,\n\n key:map\n\n });\n\n }\n", "file_path": "src/gnmi.rs", "rank": 84, "score": 141941.22079502116 }, { "content": "pub fn encode<T: Encode>(v: T) -> InnerParamValue {\n\n v.encode()\n\n}\n\n\n\npub type InnerParamValue = Bytes;\n\n\n", "file_path": "rusty-p4-core/src/util/value.rs", "rank": 85, "score": 141278.15114413542 }, { "content": "// https://github.com/serde-rs/serde/issues/1030\n\nfn _default_1() -> usize {\n\n 1\n\n}\n", "file_path": "rusty-p4-core/src/service/request.rs", "rank": 86, "score": 141095.23907503037 }, { "content": "#[test]\n\nfn test_new_gnmi_path() {\n\n dbg!(new_gnmi_path(\"/interfaces/interface[name=*][test=*]\"));\n\n dbg!(new_gnmi_path(\"/interfaces/interface[name=*]\"));\n\n dbg!(new_gnmi_path(\"/\"));\n\n}\n", "file_path": "rusty-p4-core/src/gnmi.rs", "rank": 87, "score": 140236.56866324315 }, { "content": "pub fn LPM<T: Encode>(v: T, prefix_len: i32) -> InnerValue {\n\n InnerValue::LPM(v.encode(), prefix_len)\n\n}\n\n\n", "file_path": "rusty-p4-core/src/util/value.rs", "rank": 88, "score": 134217.71613315842 }, { "content": "pub fn RANGE<T: Encode, P: Encode>(v: T, p: P) -> InnerValue {\n\n InnerValue::RANGE(v.encode(), p.encode())\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, Eq, PartialEq)]\n\npub enum InnerValue {\n\n EXACT(Bytes),\n\n LPM(Bytes, /*prefix_len*/ i32),\n\n TERNARY(Bytes, /*mask*/ Bytes),\n\n RANGE(/*low*/ Bytes, /*high*/ Bytes),\n\n}\n\n\n", "file_path": "rusty-p4-core/src/util/value.rs", "rank": 89, "score": 131399.14952117973 }, { "content": "pub fn build_table_entry(\n\n p4info: &P4Info,\n\n table_name: &str,\n\n match_fields: &[FlowMatch],\n\n default_action: bool,\n\n action_name: &str,\n\n action_params: &[FlowActionParam],\n\n priority: i32,\n\n metadata: u64,\n\n) -> TableEntry {\n\n let action = if !action_name.is_empty() {\n\n let action_id = get_actions_id(p4info, action_name);\n\n let action_id = if action_id.is_none() {\n\n panic!(\"action with name '{}' not found.\", action_name);\n\n } else {\n\n action_id.unwrap()\n\n };\n\n let mut action = crate::proto::p4runtime::Action {\n\n action_id: get_actions_id(p4info, action_name).unwrap(),\n\n params: vec![],\n", "file_path": "src/p4rt/pure.rs", "rank": 90, "score": 131352.63023790828 }, { "content": "fn take_mac(input: &[u8]) -> IResult<&[u8], &[u8; 6]> {\n\n let (b, t) = take(6usize)(input)?;\n\n Ok((b, <&[u8; 6]>::try_from(t).unwrap()))\n\n}\n", "file_path": "src/util/packet/ethernet.rs", "rank": 91, "score": 130703.7434850254 }, { "content": "pub fn TERNARY<T: Encode, P: Encode>(v: T, mask: P) -> InnerValue {\n\n InnerValue::TERNARY(v.encode(), mask.encode())\n\n}\n\n\n", "file_path": "rusty-p4-core/src/util/value.rs", "rank": 92, "score": 129625.92643983226 }, { "content": "pub fn get_action_param_pb(\n\n pipeconf: &P4Info,\n\n action_name: &str,\n\n param_name: &str,\n\n mut value: InnerParamValue,\n\n) -> crate::proto::p4runtime::action::Param {\n\n let p4info_param = get_action_param_by_name(pipeconf, action_name, param_name).unwrap();\n\n let bytes_len = (p4info_param.bitwidth as f32 / 8.0).ceil() as usize;\n\n // println!(\"adjust value: action:{}, param:{}, value:{:?}, bitwidth:{}\",action_name,param_name,value,p4info_param.bitwidth);\n\n let value = adjust_value(value, bytes_len);\n\n let p4runtime_param = crate::proto::p4runtime::action::Param {\n\n param_id: p4info_param.id,\n\n value,\n\n };\n\n return p4runtime_param;\n\n}\n\n\n\npub async fn new_set_forwarding_pipeline_config_request(\n\n p4info: &P4Info,\n\n bmv2_json_file_path: &Path,\n", "file_path": "src/p4rt/pure.rs", "rank": 93, "score": 129616.34338783383 }, { "content": "pub fn get_action_param_by_name<'a>(\n\n pipeconf: &'a P4Info,\n\n action_name: &str,\n\n param: &str,\n\n) -> Option<&'a action::Param> {\n\n get_action(pipeconf, action_name).map_or(None, |action| {\n\n for p in action.params.iter() {\n\n if &p.name == param {\n\n return Some(p);\n\n }\n\n }\n\n None\n\n })\n\n}\n\n\n", "file_path": "src/p4rt/pure.rs", "rank": 94, "score": 126846.48576107898 }, { "content": "#[async_trait]\n\npub trait App: Sync + Send + 'static + Sized {\n\n /// To support special function (like optional dependency), some generic container type (like `Arc` or `Option`) can also be defined as app.\n\n /// If you create a container type (`Option<T>`), use this field to specify the target type (`T`).\n\n /// For regular app, this field should be `Self`.\n\n type Container: App + Clone;\n\n /// Use this field to specify your dependency. \n\n /// It should be a variadic tuple, but variadic tuple is not supported currently.\n\n /// So now, rusty-p4 use crate `tuple_list` to define dependencies.\n\n /// Use `()` for no dependency.\n\n type Dependency: Dependencies;\n\n type Option: options::AppOption;\n\n /// Name your app please.\n\n const Name: &'static str;\n\n\n\n /// This is where app get their dependencies, option then initialize, returns a new instance of app.\n\n /// Container type should not be `install`ed directly, so this method should not be called for container type. \n\n fn init<S>(dependencies: Self::Dependency, store: &mut S, option: Self::Option) -> Self where S: store::AppStore;\n\n\n\n /// For container type, use this method to convert from target type to container type (`T -> Option<T>`).\n\n /// For regular type, simplely return will do.\n", "file_path": "src/app.rs", "rank": 95, "score": 124871.748228364 }, { "content": "pub trait ArpInterceptor: Sync + Send {\n\n fn new_flow(&self, device: DeviceID) -> Flow;\n\n}\n\n\n\nimpl ProxyArpLoader {\n\n pub fn new() -> Self {\n\n ProxyArpLoader {\n\n interceptor: Default::default(),\n\n }\n\n }\n\n\n\n pub fn with_interceptor<T: 'static>(mut self, pipeconf: &str, interceptor: T) -> Self\n\n where\n\n T: ArpInterceptor,\n\n {\n\n let pipeconf = rusty_p4::util::hash(pipeconf);\n\n self.interceptor\n\n .insert(PipeconfID(pipeconf), Box::new(interceptor));\n\n self\n\n }\n", "file_path": "rusty-p4-app/src/app/proxyarp.rs", "rank": 96, "score": 124763.0691013255 }, { "content": "pub trait LinkProbeInterceptor: Sync + Send {\n\n fn new_flow(&self, device: DeviceID) -> Flow;\n\n}\n\n\n\n// impl LinkProbeLoader {\n\n// pub fn new() -> Self {\n\n// LinkProbeLoader {\n\n// interceptor: HashMap::new(),\n\n// }\n\n// }\n\n\n\n// pub fn with_interceptor<T: 'static>(mut self, pipeconf: &str, interceptor: T) -> Self\n\n// where\n\n// T: LinkProbeInterceptor,\n\n// {\n\n// let pipeconf = rusty_p4::util::hash(pipeconf);\n\n// self.interceptor\n\n// .insert(PipeconfID(pipeconf), Box::new(interceptor));\n\n// self\n\n// }\n", "file_path": "rusty-p4-app/src/app/linkprobe.rs", "rank": 97, "score": 123286.42130289329 }, { "content": "type P4RuntimeClient =\n\ncrate::proto::p4runtime::p4_runtime_client::P4RuntimeClient<tonic::transport::channel::Channel>;\n\n\n\n#[derive(Copy, Clone, Default)]\n\npub struct ContextConfig {\n\n pub enable_netconfiguration: bool,\n\n}\n\n\n\npub struct Core<E, C = DefaultContext<E>>\n\nwhere C:Context<E>\n\n{\n\n pub(crate) pipeconf: Arc<HashMap<PipeconfID, Pipeconf>>,\n\n pub(crate) core_channel_sender: Sender<CoreRequest>,\n\n pub(crate) event_sender: Sender<CoreEvent<E>>,\n\n pub(crate) connections: HashMap<DeviceID, ConnectionBox>,\n\n pub(crate) config: ContextConfig,\n\n context_state: C::ContextState,\n\n}\n\n\n\nimpl<E, C> Core<E, C>\n", "file_path": "src/core/core.rs", "rank": 98, "score": 121167.83297628746 }, { "content": "#[derive(Debug)]\n\nenum _FlowMatchValue {\n\n Exact(Expr),\n\n Range(Box<Expr>, Box<Expr>),\n\n Lpm(Box<Expr>, Box<Expr>),\n\n Ternary(Box<Expr>, Box<Expr>),\n\n}\n\n\n\nimpl Parse for _FlowMatchValue {\n\n fn parse(input: &ParseBuffer) -> Result<Self> {\n\n let expr: Expr = input.parse()?;\n\n match expr {\n\n // range\n\n Expr::Range(range) => {\n\n match range.limits {\n\n syn::RangeLimits::Closed(_) => {\n\n return Err(input.error(\"Unsupported range limits\"))\n\n }\n\n syn::RangeLimits::HalfOpen(_) => {}\n\n }\n\n let from: Box<Expr> = range.from.ok_or(input.error(\"Missing range 'from'\"))?;\n", "file_path": "macro-impl/src/lib.rs", "rank": 99, "score": 119870.26233737472 } ]
Rust
src/lib.rs
matclab/automattermostatus
169b72638dbda91528e30e54a7a614e2e6a20471
#![warn(missing_docs)] use anyhow::{bail, Context, Result}; use std::fs; use std::path::PathBuf; use std::thread::sleep; use std::{collections::HashMap, time}; use tracing::{debug, error, info, warn}; use tracing_subscriber::prelude::*; use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter}; pub mod config; pub mod mattermost; pub mod offtime; pub mod state; pub mod utils; pub mod wifiscan; pub use config::{Args, SecretType, WifiStatusConfig}; pub use mattermost::{BaseSession, MMStatus, Session}; use offtime::Off; pub use state::{Cache, Location, State}; pub use wifiscan::{WiFi, WifiInterface}; pub fn setup_tracing(args: &Args) -> Result<()> { let fmt_layer = fmt::layer().with_target(false); let filter_layer = EnvFilter::try_new(args.verbose.get_level_filter().to_string()).unwrap(); tracing_subscriber::registry() .with(filter_layer) .with(fmt_layer) .init(); Ok(()) } pub fn get_cache(dir: Option<PathBuf>) -> Result<Cache> { let mut state_file_name: PathBuf; if let Some(ref state_dir) = dir { state_file_name = PathBuf::from(state_dir); fs::create_dir_all(&state_dir) .with_context(|| format!("Creating cache dir {:?}", &state_dir))?; } else { bail!("Internal Error, no `state_dir` configured"); } state_file_name.push("automattermostatus.state"); Ok(Cache::new(state_file_name)) } pub fn prepare_status(args: &Args) -> Result<HashMap<Location, MMStatus>> { let mut res = HashMap::new(); for s in &args.status { let sc: WifiStatusConfig = s.parse().with_context(|| format!("Parsing {}", s))?; debug!("Adding : {:?}", sc); res.insert( Location::Known(sc.wifi_string), MMStatus::new(sc.text, sc.emoji), ); } Ok(res) } pub fn create_session(args: &Args) -> Result<Box<dyn BaseSession>> { args.mm_url.as_ref().expect("Mattermost URL is not defined"); args.secret_type .as_ref() .expect("Internal Error: secret_type is not defined"); args.mm_secret.as_ref().expect("Secret is not defined"); let mut session = Session::new(args.mm_url.as_ref().unwrap()); let mut session: Box<dyn BaseSession> = match args.secret_type.as_ref().unwrap() { SecretType::Password => Box::new(session.with_credentials( args.mm_user.as_ref().unwrap(), args.mm_secret.as_ref().unwrap(), )), SecretType::Token => Box::new(session.with_token(args.mm_secret.as_ref().unwrap())), }; session.login()?; Ok(session) } pub fn get_wifi_and_update_status_loop( args: Args, mut status_dict: HashMap<Location, MMStatus>, ) -> Result<()> { let cache = get_cache(args.state_dir.to_owned()).context("Reading cached state")?; let mut state = State::new(&cache).context("Creating cache")?; let delay_duration = time::Duration::new( args.delay .expect("Internal error: args.delay shouldn't be None") .into(), 0, ); let wifi = WiFi::new( &args .interface_name .clone() .expect("Internal error: args.interface_name shouldn't be None"), ); if !wifi .is_wifi_enabled() .context("Checking if wifi is enabled")? { error!("wifi is disabled"); } else { info!("Wifi is enabled"); } let mut session = create_session(&args)?; loop { if !&args.is_off_time() { let ssids = wifi.visible_ssid().context("Getting visible SSIDs")?; debug!("Visible SSIDs {:#?}", ssids); let mut found_ssid = false; for (l, mmstatus) in status_dict.iter_mut() { if let Location::Known(wifi_substring) = l { if ssids.iter().any(|x| x.contains(wifi_substring)) { if wifi_substring.is_empty() { debug!("We do not match against empty SSID reserved for off time"); continue; } debug!("known wifi '{}' detected", wifi_substring); found_ssid = true; mmstatus.expires_at(&args.expires_at); if let Err(e) = state.update_status( l.clone(), Some(mmstatus), &mut session, &cache, delay_duration.as_secs(), ) { error!("Fail to update status : {}", e) } break; } } } if !found_ssid { debug!("Unknown wifi"); if let Err(e) = state.update_status( Location::Unknown, None, &mut session, &cache, delay_duration.as_secs(), ) { error!("Fail to update status : {}", e) } } } else { let off_location = Location::Known("".to_string()); if let Some(offstatus) = status_dict.get_mut(&off_location) { debug!("Setting state for Offtime"); if let Err(e) = state.update_status( off_location, Some(offstatus), &mut session, &cache, delay_duration.as_secs(), ) { error!("Fail to update status : {}", e) } } } if let Some(0) = args.delay { break; } else { sleep(delay_duration); } } Ok(()) } #[cfg(test)] mod get_cache_should { use super::*; use anyhow::anyhow; use test_log::test; #[test] fn panic_when_called_with_none() -> Result<()> { match get_cache(None) { Ok(_) => Err(anyhow!("Expected an error")), Err(e) => { assert_eq!(e.to_string(), "Internal Error, no `state_dir` configured"); Ok(()) } } } } #[cfg(test)] mod prepare_status_should { use super::*; use test_log::test; #[test] fn prepare_expected_status() -> Result<()> { let args = Args { status: vec!["a::b::c", "d::e::f", "::off::off text"] .iter() .map(|s| s.to_string()) .collect(), mm_secret: Some("AAA".to_string()), ..Default::default() }; let res = prepare_status(&args)?; let mut expected: HashMap<state::Location, mattermost::MMStatus> = HashMap::new(); expected.insert( Location::Known("".to_string()), MMStatus::new("off text".to_string(), "off".to_string()), ); expected.insert( Location::Known("a".to_string()), MMStatus::new("c".to_string(), "b".to_string()), ); expected.insert( Location::Known("d".to_string()), MMStatus::new("f".to_string(), "e".to_string()), ); assert_eq!(res, expected); Ok(()) } } #[cfg(test)] mod create_session_should { use super::*; #[test] #[should_panic(expected = "Mattermost URL is not defined")] fn panic_when_mm_url_is_none() { let args = Args { status: vec!["a::b::c".to_string()], mm_secret: Some("AAA".to_string()), mm_url: None, ..Default::default() }; let _res = create_session(&args); } } #[cfg(test)] mod main_loop_should { use super::*; #[test] #[should_panic(expected = "Internal error: args.delay shouldn't be None")] fn panic_when_args_delay_is_none() { let args = Args { status: vec!["a::b::c".to_string()], delay: None, ..Default::default() }; let _res = get_wifi_and_update_status_loop(args, HashMap::new()); } }
#![warn(missing_docs)] use anyhow::{bail, Context, Result}; use std::fs; use std::path::PathBuf; use std::thread::sleep; use std::{collections::HashMap, time}; use tracing::{debug, error, info, warn}; use tracing_subscriber::prelude::*; use tracing_subscriber::{fmt, layer::SubscriberExt, EnvFilter}; pub mod config; pub mod mattermost; pub mod offtime; pub mod state; pub mod utils; pub mod wifiscan; pub use config::{Args, SecretType, WifiStatusConfig}; pub use mattermost::{BaseSession, MMStatus, Session}; use offtime::Off; pub use state::{Cache, Location, State}; pub use wifiscan::{WiFi, WifiInterface};
pub fn get_cache(dir: Option<PathBuf>) -> Result<Cache> { let mut state_file_name: PathBuf; if let Some(ref state_dir) = dir { state_file_name = PathBuf::from(state_dir); fs::create_dir_all(&state_dir) .with_context(|| format!("Creating cache dir {:?}", &state_dir))?; } else { bail!("Internal Error, no `state_dir` configured"); } state_file_name.push("automattermostatus.state"); Ok(Cache::new(state_file_name)) } pub fn prepare_status(args: &Args) -> Result<HashMap<Location, MMStatus>> { let mut res = HashMap::new(); for s in &args.status { let sc: WifiStatusConfig = s.parse().with_context(|| format!("Parsing {}", s))?; debug!("Adding : {:?}", sc); res.insert( Location::Known(sc.wifi_string), MMStatus::new(sc.text, sc.emoji), ); } Ok(res) } pub fn create_session(args: &Args) -> Result<Box<dyn BaseSession>> { args.mm_url.as_ref().expect("Mattermost URL is not defined"); args.secret_type .as_ref() .expect("Internal Error: secret_type is not defined"); args.mm_secret.as_ref().expect("Secret is not defined"); let mut session = Session::new(args.mm_url.as_ref().unwrap()); let mut session: Box<dyn BaseSession> = match args.secret_type.as_ref().unwrap() { SecretType::Password => Box::new(session.with_credentials( args.mm_user.as_ref().unwrap(), args.mm_secret.as_ref().unwrap(), )), SecretType::Token => Box::new(session.with_token(args.mm_secret.as_ref().unwrap())), }; session.login()?; Ok(session) } pub fn get_wifi_and_update_status_loop( args: Args, mut status_dict: HashMap<Location, MMStatus>, ) -> Result<()> { let cache = get_cache(args.state_dir.to_owned()).context("Reading cached state")?; let mut state = State::new(&cache).context("Creating cache")?; let delay_duration = time::Duration::new( args.delay .expect("Internal error: args.delay shouldn't be None") .into(), 0, ); let wifi = WiFi::new( &args .interface_name .clone() .expect("Internal error: args.interface_name shouldn't be None"), ); if !wifi .is_wifi_enabled() .context("Checking if wifi is enabled")? { error!("wifi is disabled"); } else { info!("Wifi is enabled"); } let mut session = create_session(&args)?; loop { if !&args.is_off_time() { let ssids = wifi.visible_ssid().context("Getting visible SSIDs")?; debug!("Visible SSIDs {:#?}", ssids); let mut found_ssid = false; for (l, mmstatus) in status_dict.iter_mut() { if let Location::Known(wifi_substring) = l { if ssids.iter().any(|x| x.contains(wifi_substring)) { if wifi_substring.is_empty() { debug!("We do not match against empty SSID reserved for off time"); continue; } debug!("known wifi '{}' detected", wifi_substring); found_ssid = true; mmstatus.expires_at(&args.expires_at); if let Err(e) = state.update_status( l.clone(), Some(mmstatus), &mut session, &cache, delay_duration.as_secs(), ) { error!("Fail to update status : {}", e) } break; } } } if !found_ssid { debug!("Unknown wifi"); if let Err(e) = state.update_status( Location::Unknown, None, &mut session, &cache, delay_duration.as_secs(), ) { error!("Fail to update status : {}", e) } } } else { let off_location = Location::Known("".to_string()); if let Some(offstatus) = status_dict.get_mut(&off_location) { debug!("Setting state for Offtime"); if let Err(e) = state.update_status( off_location, Some(offstatus), &mut session, &cache, delay_duration.as_secs(), ) { error!("Fail to update status : {}", e) } } } if let Some(0) = args.delay { break; } else { sleep(delay_duration); } } Ok(()) } #[cfg(test)] mod get_cache_should { use super::*; use anyhow::anyhow; use test_log::test; #[test] fn panic_when_called_with_none() -> Result<()> { match get_cache(None) { Ok(_) => Err(anyhow!("Expected an error")), Err(e) => { assert_eq!(e.to_string(), "Internal Error, no `state_dir` configured"); Ok(()) } } } } #[cfg(test)] mod prepare_status_should { use super::*; use test_log::test; #[test] fn prepare_expected_status() -> Result<()> { let args = Args { status: vec!["a::b::c", "d::e::f", "::off::off text"] .iter() .map(|s| s.to_string()) .collect(), mm_secret: Some("AAA".to_string()), ..Default::default() }; let res = prepare_status(&args)?; let mut expected: HashMap<state::Location, mattermost::MMStatus> = HashMap::new(); expected.insert( Location::Known("".to_string()), MMStatus::new("off text".to_string(), "off".to_string()), ); expected.insert( Location::Known("a".to_string()), MMStatus::new("c".to_string(), "b".to_string()), ); expected.insert( Location::Known("d".to_string()), MMStatus::new("f".to_string(), "e".to_string()), ); assert_eq!(res, expected); Ok(()) } } #[cfg(test)] mod create_session_should { use super::*; #[test] #[should_panic(expected = "Mattermost URL is not defined")] fn panic_when_mm_url_is_none() { let args = Args { status: vec!["a::b::c".to_string()], mm_secret: Some("AAA".to_string()), mm_url: None, ..Default::default() }; let _res = create_session(&args); } } #[cfg(test)] mod main_loop_should { use super::*; #[test] #[should_panic(expected = "Internal error: args.delay shouldn't be None")] fn panic_when_args_delay_is_none() { let args = Args { status: vec!["a::b::c".to_string()], delay: None, ..Default::default() }; let _res = get_wifi_and_update_status_loop(args, HashMap::new()); } }
pub fn setup_tracing(args: &Args) -> Result<()> { let fmt_layer = fmt::layer().with_target(false); let filter_layer = EnvFilter::try_new(args.verbose.get_level_filter().to_string()).unwrap(); tracing_subscriber::registry() .with(filter_layer) .with(fmt_layer) .init(); Ok(()) }
function_block-full_function
[ { "content": "/// Trait implementing function necessary to establish a session (getting a authenticating token).\n\npub trait BaseSession {\n\n /// Get session token\n\n fn token(&self) -> Result<&str>;\n\n\n\n /// Get session `base_uri`\n\n fn base_uri(&self) -> &str;\n\n\n\n /// Login to mattermost instance\n\n fn login(&mut self) -> Result<()>;\n\n}\n\n\n\n/// Base Session without authentication management\n\npub struct Session {\n\n #[allow(rustdoc::bare_urls)]\n\n /// base URL of the mattermost server like https://mattermost.example.com\n\n base_uri: String,\n\n}\n\n\n\n/// Implement [Session] authenticated with a private access token.\n\npub struct SessionWithToken {\n", "file_path": "src/mattermost/session.rs", "rank": 0, "score": 77165.7032307823 }, { "content": "struct Time {}\n\n\n\n/// Trait providing a `now` function.\n\n///\n\n/// The use of a trait instead of calling directly `Local::now` is needed in order to be able to\n\n/// mock time in tests\n", "file_path": "src/offtime.rs", "rank": 2, "score": 65008.84673150951 }, { "content": "/// Wifi interface for an operating system.\n\n/// This provides basic functionalities for wifi interface.\n\npub trait WifiInterface: fmt::Debug {\n\n /// Check if the wifi interface on host machine is enabled.\n\n fn is_wifi_enabled(&self) -> Result<bool, WifiError> {\n\n unimplemented!();\n\n }\n\n\n\n /// Return visible SSIDs\n\n fn visible_ssid(&self) -> Result<Vec<String>, WifiError> {\n\n unimplemented!();\n\n }\n\n}\n", "file_path": "src/wifiscan/mod.rs", "rank": 3, "score": 62781.112519306276 }, { "content": "fn de_from_str<'de, D>(deserializer: D) -> Result<QuietVerbose, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n match s.to_ascii_lowercase().as_ref() {\n\n \"off\" => Ok(QuietVerbose {\n\n verbosity_level: 0,\n\n quiet_level: 2,\n\n }),\n\n \"error\" => Ok(QuietVerbose {\n\n verbosity_level: 0,\n\n quiet_level: 1,\n\n }),\n\n \"warn\" => Ok(QuietVerbose {\n\n verbosity_level: 0,\n\n quiet_level: 0,\n\n }),\n\n \"info\" => Ok(QuietVerbose {\n\n verbosity_level: 1,\n", "file_path": "src/config.rs", "rank": 4, "score": 61867.56105014545 }, { "content": "pub fn parse_from_hmstr(time_str: &Option<String>) -> Option<DateTime<Local>> {\n\n if let Some(ref s) = time_str {\n\n let splitted: Vec<&str> = s.split(':').collect();\n\n let hh: u32 = match splitted[0].parse() {\n\n Ok(h) => h,\n\n Err(_) => {\n\n warn!(\"Unable to get hour from {:?}\", &time_str);\n\n return None;\n\n }\n\n };\n\n let mm = if splitted.len() < 2 {\n\n 0\n\n } else {\n\n match splitted[1].parse() {\n\n Ok(m) => m,\n\n Err(_) => {\n\n warn!(\"Unable to get minutes from {:?}\", &time_str);\n\n 0\n\n }\n\n }\n", "file_path": "src/utils.rs", "rank": 6, "score": 58350.28515007681 }, { "content": "/// Manage the time where the application shall not update the status because the user\n\n/// is not working\n\npub trait Off {\n\n /// Is the user off now ?\n\n fn is_off_time(&self) -> bool;\n\n}\n\n\n\n/// Struct for describing the parity of the week for which the out of work day apply\n\n/// Parity is given according to iso week number\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub enum Parity {\n\n /// Day off for all weeks\n\n EveryWeek,\n\n /// Day off only for odd weeks\n\n OddWeek,\n\n /// Day off only for even weeks\n\n EvenWeek,\n\n}\n\n\n\n/// Struct olding a map of ([`Weekday`], [`Parity`]) descripting day offs.\n\n#[derive(Serialize, Deserialize, Debug)]\n\n#[serde(transparent)]\n\npub struct OffDays(HashMap<Weekday, Parity>);\n\n\n", "file_path": "src/offtime.rs", "rank": 7, "score": 57759.85097743137 }, { "content": "#[cfg_attr(test, automock)] // create MockNow Struct for tests\n\npub trait Now {\n\n /// Returns current local time\n\n fn now(&self) -> Date<Local>;\n\n}\n\nimpl Now for Time {\n\n fn now(&self) -> Date<Local> {\n\n Local::now().date()\n\n }\n\n}\n\n\n\nimpl OffDays {\n\n /// Create new empty `OffDays` instance\n\n pub fn new() -> OffDays {\n\n OffDays(HashMap::new())\n\n }\n\n #[allow(dead_code)]\n\n /// Insert a new offday for week of `parity`\n\n fn insert(&mut self, day: Weekday, parity: Parity) -> Option<Parity> {\n\n self.0.insert(day, parity)\n\n }\n", "file_path": "src/offtime.rs", "rank": 9, "score": 54912.4669421148 }, { "content": "//! Implement wifi SSID scan for linux, windows and mac os.\n\n// Mostly courtesy of https://github.com/tnkemdilim/wifi-rs\n\n\n\n#[cfg(target_os = \"linux\")]\n\nmod linux;\n\n#[cfg(target_os = \"macos\")]\n\nmod osx;\n\n#[cfg(any(test, target_os = \"macos\"))]\n\nmod osx_parse;\n\n#[cfg(target_os = \"windows\")]\n\nmod windows;\n\n#[cfg(any(test, target_os = \"windows\"))]\n\nmod windows_parse;\n\n// We include all modules for tests as tests do not depend upon specific platform\n\n//#[cfg(test)]\n\n//mod osx;\n\n\n\nuse std::{fmt, io};\n\nuse thiserror::Error;\n\n\n", "file_path": "src/wifiscan/mod.rs", "rank": 11, "score": 47380.73018861169 }, { "content": "/// Wireless network interface.\n\n#[derive(Debug)]\n\npub struct WiFi {\n\n #[allow(dead_code)]\n\n /// wifi interface name\n\n pub interface: String,\n\n}\n\n\n\n#[derive(Debug, Error)]\n\n/// Error specific to `Wifi` struct.\n\npub enum WifiError {\n\n /// The specified wifi is currently disabled. Try switching it on.\n\n #[error(\"Wifi is currently disabled\")]\n\n WifiDisabled,\n\n /// The wifi interface interface failed to switch on.\n\n #[cfg(target_os = \"windows\")]\n\n #[error(\"Wifi interface failed to switch on\")]\n\n InterfaceFailedToOn,\n\n #[allow(missing_docs)]\n\n #[error(\"Wifi IO Error\")]\n\n IoError(#[from] io::Error),\n\n}\n\n\n\n/// Wifi interface for an operating system.\n\n/// This provides basic functionalities for wifi interface.\n", "file_path": "src/wifiscan/mod.rs", "rank": 12, "score": 47378.974499867596 }, { "content": "//! This module exports [Session] and [MMStatus]\n\n//!\n\npub mod session;\n\npub mod status;\n\npub use session::*;\n\npub use status::*;\n", "file_path": "src/mattermost/mod.rs", "rank": 13, "score": 47076.30362757098 }, { "content": "//! let token = session.token()?;\n\n//! # server_mock.assert();\n\n//! # Ok::<(), anyhow::Error>(())\n\n//! ```\n\n//!\n\n//! Or via a private access token:\n\n//!\n\n//! ```\n\n//! use lib::{Session,BaseSession};\n\n//! let mut session = Session::new(\"https://mattermost.example.com\")\n\n//! .with_token(\"sdqgserdfmkjqBXHZFH:qgjr\");\n\n//! let token = session.token()?;\n\n//! # Ok::<(), anyhow::Error>(())\n\n//! ```\n\n//!\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::mem;\n\n\n\n/// Trait implementing function necessary to establish a session (getting a authenticating token).\n", "file_path": "src/mattermost/session.rs", "rank": 14, "score": 46683.961626041564 }, { "content": " //let token = login(&server.url(\"\"), Some(\"username\"), Some(\"passwordtext\"))?;\n\n\n\n // Ensure the specified mock was called exactly one time (or fail with a detailed error description).\n\n server_mock.assert();\n\n // Ensure the mock server did respond as specified.\n\n assert_eq!(session.token()?, \"xyzxyz\");\n\n assert_eq!(session.base_uri, server.url(\"\"));\n\n Ok(())\n\n }\n\n #[test]\n\n fn return_token() -> Result<()> {\n\n let session = Session::new(\"https://mattermost.example.com\").with_token(\"xyzxyz\");\n\n assert_eq!(session.base_uri, \"https://mattermost.example.com\");\n\n assert_eq!(session.token()?, \"xyzxyz\");\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/mattermost/session.rs", "rank": 15, "score": 46682.0980350083 }, { "content": " #[allow(rustdoc::bare_urls)]\n\n /// base URL of the mattermost server like https://mattermost.example.com\n\n pub base_uri: String,\n\n /// private access token for current user on the `base_uri` mattermost instance\n\n /// (either permanent and given at init or renewable with the help of login function)\n\n token: String,\n\n}\n\n///\n\n/// Implement a session authenticated with a login and password\n\npub struct SessionWithLogin {\n\n #[allow(rustdoc::bare_urls)]\n\n /// base URL of the mattermost server like https://mattermost.example.com\n\n pub base_uri: String,\n\n /// private access token for current user on the `base_uri` mattermost instance\n\n /// (either permanent and given at init or renewable with the help of login function)\n\n token: Option<String>,\n\n /// user login\n\n user: String,\n\n /// user password\n\n password: String,\n\n}\n\n\n", "file_path": "src/mattermost/session.rs", "rank": 16, "score": 46679.196497362966 }, { "content": " pub fn with_credentials(&mut self, user_login: &str, password: &str) -> SessionWithLogin {\n\n SessionWithLogin {\n\n user: user_login.into(),\n\n password: password.into(),\n\n token: None,\n\n base_uri: mem::take(&mut self.base_uri),\n\n }\n\n }\n\n}\n\n\n\nimpl BaseSession for SessionWithToken {\n\n fn token(&self) -> Result<&str> {\n\n Ok(&self.token)\n\n }\n\n fn base_uri(&self) -> &str {\n\n &self.base_uri\n\n }\n\n fn login(&mut self) -> Result<()> {\n\n Ok(())\n\n }\n", "file_path": "src/mattermost/session.rs", "rank": 17, "score": 46679.185940614116 }, { "content": "//! This module implement mattermost session management.\n\n//!\n\n//! A session may be created via login:\n\n//! ```\n\n//! # use httpmock::prelude::*;\n\n//! # let server = MockServer::start();\n\n//! # let server_mock = server.mock(|expect, resp_with| {\n\n//! # expect.method(POST).path(\"//api/v4/users/login\").json_body(\n\n//! # serde_json::json!({\"login_id\":\"username\",\"password\":\"passwordtext\"}\n\n//! # ),\n\n//! # );\n\n//! # resp_with\n\n//! # .status(200)\n\n//! # .header(\"content-type\", \"application/json\")\n\n//! # .header(\"Token\", \"xyzxyz\");\n\n//! # });\n\n//! use lib::{Session,BaseSession};\n\n//! let mut session = Session::new(&server.url(\"/\"))\n\n//! .with_credentials(\"username\", \"passwordtext\");\n\n//! session.login()?;\n", "file_path": "src/mattermost/session.rs", "rank": 18, "score": 46678.75066702942 }, { "content": " if let Some(token) = response.header(\"Token\") {\n\n self.token = Some(token.into());\n\n Ok(())\n\n } else {\n\n Err(anyhow!(\n\n \"Login authentication failed (response: {})\",\n\n response.into_string()?\n\n ))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod should {\n\n use super::*;\n\n use httpmock::prelude::*;\n\n use test_log::test; // Automatically trace tests\n\n #[test]\n\n fn login_with_success() -> Result<()> {\n\n // Start a lightweight mock server.\n", "file_path": "src/mattermost/session.rs", "rank": 19, "score": 46678.50669458188 }, { "content": "}\n\n\n\nimpl BaseSession for SessionWithLogin {\n\n fn token(&self) -> Result<&str> {\n\n if let Some(token) = &self.token {\n\n Ok(token)\n\n } else {\n\n Err(anyhow!(\"No token available, did login succeed ?\"))\n\n }\n\n }\n\n fn base_uri(&self) -> &str {\n\n &self.base_uri\n\n }\n\n\n\n fn login(&mut self) -> Result<()> {\n\n let uri = self.base_uri.to_owned() + \"/api/v4/users/login\";\n\n let response = ureq::post(&uri).send_json(serde_json::to_value(LoginData {\n\n login_id: self.user.clone(),\n\n password: self.password.clone(),\n\n })?)?;\n", "file_path": "src/mattermost/session.rs", "rank": 20, "score": 46676.41712880401 }, { "content": " let server = MockServer::start();\n\n\n\n // Create a mock on the server.\n\n let server_mock = server.mock(|expect, resp_with| {\n\n expect.method(POST).path(\"/api/v4/users/login\").json_body(\n\n serde_json::json!({\"login_id\":\"username\",\"password\":\"passwordtext\"}\n\n ),\n\n );\n\n resp_with\n\n .status(200)\n\n .header(\"content-type\", \"application/json\")\n\n .header(\"Token\", \"xyzxyz\");\n\n });\n\n\n\n let mut session =\n\n Session::new(&server.url(\"\")).with_credentials(\"username\", \"passwordtext\");\n\n\n\n session.login()?;\n\n\n\n // Send an HTTP request to the mock server. This simulates your code.\n", "file_path": "src/mattermost/session.rs", "rank": 21, "score": 46674.21508431906 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct LoginData {\n\n login_id: String,\n\n password: String,\n\n}\n\n\n\nimpl Session {\n\n /// Create new empty [Session] to the `base_uri` mattermost server\n\n pub fn new(base_uri: &str) -> Self {\n\n Session {\n\n base_uri: base_uri.into(),\n\n }\n\n }\n\n /// Add existing token to current [Session]\n\n pub fn with_token(&mut self, token: &str) -> SessionWithToken {\n\n SessionWithToken {\n\n token: token.into(),\n\n base_uri: mem::take(&mut self.base_uri),\n\n }\n\n }\n\n /// Add login credentials to current [Session]\n", "file_path": "src/mattermost/session.rs", "rank": 22, "score": 41379.82689416743 }, { "content": "#[paw::main]\n\nfn main(args: Args) -> Result<()> {\n\n setup_tracing(&args).context(\"Setting up tracing\")?;\n\n let args = args\n\n .merge_config_and_params()?\n\n // Retrieve token if possible\n\n .update_secret_with_command()\n\n .context(\"Get secret from mm_secret_cmd\")?\n\n .update_secret_with_keyring()\n\n .context(\"Get secret from OS keyring\")?;\n\n let status_dict = prepare_status(&args).context(\"Building custom status messages\")?;\n\n get_wifi_and_update_status_loop(args, status_dict)?;\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 23, "score": 31536.034616628887 }, { "content": "//! Simple utilities functions\n\nuse chrono::{DateTime, Local};\n\nuse tracing::warn;\n\n\n\n/// Parse a string with the expected format \"hh:mm\" and return a [`DateTime<Local>`]\n\n/// for the current day at time \"hh:mm\"\n\n///\n\n/// If `mm` is not parsable we return a datetime set at `hh:00`.\n\n\n", "file_path": "src/utils.rs", "rank": 25, "score": 25650.137239437325 }, { "content": " };\n\n let res = Local::now().date().and_hms(hh, mm, 0);\n\n Some(res)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod should {\n\n use super::*;\n\n use test_log::test; // Automatically trace tests\n\n\n\n #[test]\n\n fn return_none_if_unparsable() {\n\n assert_eq!(None, parse_from_hmstr(&None));\n\n assert_eq!(None, parse_from_hmstr(&Some(\"biii\".to_string())));\n\n assert_eq!(None, parse_from_hmstr(&Some(\":12:30\".to_string())));\n\n }\n\n #[test]\n", "file_path": "src/utils.rs", "rank": 26, "score": 25643.792515952042 }, { "content": " fn return_hour_if_mn_is_unparsable() {\n\n let expect = Local::now().date().and_hms(12, 00, 0);\n\n assert_eq!(Some(expect), parse_from_hmstr(&Some(\"12:3O\".to_string())));\n\n assert_eq!(Some(expect), parse_from_hmstr(&Some(\"12\".to_string())));\n\n }\n\n #[test]\n\n fn return_expected_date() {\n\n let expect = Local::now().date().and_hms(7, 1, 0);\n\n assert_eq!(Some(expect), parse_from_hmstr(&Some(\"07:01\".to_string())));\n\n assert_eq!(Some(expect), parse_from_hmstr(&Some(\"7:1\".to_string())));\n\n let expect = Local::now().date().and_hms(23, 39, 0);\n\n assert_eq!(Some(expect), parse_from_hmstr(&Some(\"23:39\".to_string())));\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 27, "score": 25639.155873994583 }, { "content": "//! Implement persistant state for current location\n\n//!\n\n//! The [`State`] also provide the [`State::update_status`] function used to propagate the custom status\n\n//! state to the mattermost instance\n\nuse anyhow::{Context, Result};\n\nuse chrono::Utc;\n\nuse std::fs;\n\nuse tracing::{debug, info};\n\n\n\nuse crate::mattermost::{BaseSession, MMStatus};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::path::PathBuf;\n\n\n\n/// If more than MAX_SECS_BEFORE_FORCE_UPDATE are elapsed, we forcibly update\n\n/// mattermost custom status to the expected value even if there was no change in visible\n\n/// wifi SSIDs.\n\nconst MAX_SECS_BEFORE_FORCE_UPDATE: u64 = 60 * 60;\n\n\n\n/// Struct implementing a cache for the application state\n\n#[derive(Debug)]\n", "file_path": "src/state.rs", "rank": 28, "score": 25479.94079150532 }, { "content": "/// State containing at least location info\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct State {\n\n location: Location,\n\n timestamp: i64,\n\n}\n\n\n\nimpl State {\n\n /// Build a state, either by reading current persisted state in `cache`\n\n /// or by creating an empty default one.\n\n pub fn new(cache: &Cache) -> Result<Self> {\n\n let res: State;\n\n if let Ok(json) = &fs::read(&cache.path) {\n\n res = serde_json::from_str(&String::from_utf8_lossy(json)).context(format!(\n\n \"Unable to deserialize state file {:?} (try to remove it)\",\n\n &cache.path\n\n ))?;\n\n } else {\n\n res = Self {\n\n location: Location::Unknown,\n", "file_path": "src/state.rs", "rank": 29, "score": 25477.53459507503 }, { "content": " timestamp: 0,\n\n };\n\n }\n\n debug!(\"Previous known location `{:?}`\", res.location);\n\n Ok(res)\n\n }\n\n\n\n /// Update state with location and ensure persisting of state on disk\n\n pub fn set_location(&mut self, location: Location, cache: &Cache) -> Result<()> {\n\n info!(\"Set location to `{:?}`\", location);\n\n self.location = location;\n\n self.timestamp = Utc::now().timestamp();\n\n fs::write(\n\n &cache.path,\n\n serde_json::to_string(&self)\n\n .unwrap_or_else(|_| panic!(\"Serialization of State Failed :{:?}\", &self)),\n\n )\n\n .with_context(|| format!(\"Writing to cache file {:?}\", cache.path))?;\n\n Ok(())\n\n }\n", "file_path": "src/state.rs", "rank": 30, "score": 25477.234941053113 }, { "content": "\n\n /// Update mattermost status depending upon current state\n\n ///\n\n /// If `current_location` is Unknown, then nothing is changed.\n\n /// If `current_location` is still the same for more than `MAX_SECS_BEFORE_FORCE_UPDATE`\n\n /// then we force update the mattermost status in order to catch up with desynchronise state\n\n /// Else we update mattermost status to the one associated to `current_location`.\n\n pub fn update_status(\n\n &mut self,\n\n current_location: Location,\n\n status: Option<&mut MMStatus>,\n\n session: &mut Box<dyn BaseSession>,\n\n cache: &Cache,\n\n delay_between_polling: u64,\n\n ) -> Result<()> {\n\n if current_location == Location::Unknown {\n\n return Ok(());\n\n } else if current_location == self.location {\n\n // Less than max seconds have elapsed.\n\n // No need to update MM status again\n", "file_path": "src/state.rs", "rank": 31, "score": 25477.192534219485 }, { "content": "\n\n#[cfg(test)]\n\nmod should {\n\n use super::*;\n\n use mktemp::Temp;\n\n use test_log::test; // Automatically trace tests\n\n #[test]\n\n fn remember_state() -> Result<()> {\n\n let temp = Temp::new_file().unwrap().to_path_buf();\n\n let cache = Cache::new(&temp);\n\n let mut state = State::new(&cache)?;\n\n assert_eq!(state.location, Location::Unknown);\n\n state.set_location(Location::Known(\"abcd\".to_string()), &cache)?;\n\n assert_eq!(state.location, Location::Known(\"abcd\".to_string()));\n\n let mut state = State::new(&cache)?;\n\n assert_eq!(state.location, Location::Known(\"abcd\".to_string()));\n\n state.set_location(Location::Known(\"work\".to_string()), &cache)?;\n\n assert_eq!(state.location, Location::Known(\"work\".to_string()));\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/state.rs", "rank": 32, "score": 25475.867904772836 }, { "content": " let elapsed_sec: u64 = (Utc::now().timestamp() - self.timestamp)\n\n .try_into()\n\n .unwrap();\n\n if delay_between_polling * 2 < elapsed_sec\n\n && elapsed_sec <= MAX_SECS_BEFORE_FORCE_UPDATE\n\n {\n\n debug!(\n\n \"No change for {}s : no update to mattermost status\",\n\n MAX_SECS_BEFORE_FORCE_UPDATE\n\n );\n\n return Ok(());\n\n }\n\n }\n\n // We update the status on MM\n\n status.unwrap().send(session)?;\n\n // We update the location (only if setting mattermost status succeed)\n\n self.set_location(current_location, cache)?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/state.rs", "rank": 33, "score": 25470.747846250888 }, { "content": "pub struct Cache {\n\n path: PathBuf,\n\n}\n\n\n\nimpl Cache {\n\n /// Create a cache at location `path`.\n\n pub fn new(path: impl Into<PathBuf>) -> Self {\n\n Self { path: path.into() }\n\n }\n\n}\n\n\n\n/// Wifi locations\n\n#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Hash, Clone)]\n\npub enum Location {\n\n /// Known location based on wifi ssid substring match\n\n Known(String),\n\n /// Unknown location\n\n Unknown,\n\n}\n\n\n", "file_path": "src/state.rs", "rank": 34, "score": 25469.54464097664 }, { "content": " /// The user is off if\n\n /// current day is in OffDays and either,\n\n /// - parity is all\n\n /// - parity match the current iso week number\n\n fn is_off_time(&self) -> bool {\n\n self.is_off_at_date(Time {})\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod is_off_should {\n\n use super::*;\n\n use anyhow::Result;\n\n use chrono::{Local, TimeZone, Weekday};\n\n use test_log::test; // Automatically trace tests\n\n\n\n #[test]\n\n fn return_false_when_day_dont_match() -> Result<()> {\n\n let mut leave = OffDays::new();\n\n leave.insert(Weekday::Mon, Parity::EveryWeek);\n", "file_path": "src/offtime.rs", "rank": 35, "score": 25292.892136240665 }, { "content": "//! This module Provide the [`Off`] trait and [`OffDays`] struct\n\npub use chrono::Weekday;\n\nuse chrono::{Date, Datelike, Local};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\nuse tracing::{debug, trace};\n\n\n\n#[cfg(test)]\n\nuse mockall::automock;\n\n\n\n/// Manage the time where the application shall not update the status because the user\n\n/// is not working\n", "file_path": "src/offtime.rs", "rank": 36, "score": 25290.115045119182 }, { "content": " let mut mock = MockNow::new();\n\n mock.expect_now()\n\n .times(1)\n\n .returning(|| Local.isoywd(2015, 1, Weekday::Tue));\n\n assert_eq!(leave.is_off_at_date(mock), false);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn return_true_when_match_and_no_parity() -> Result<()> {\n\n let mut leave = OffDays::new();\n\n leave.insert(Weekday::Tue, Parity::EveryWeek);\n\n let mut mock = MockNow::new();\n\n mock.expect_now()\n\n .times(1)\n\n .returning(|| Local.isoywd(2015, 1, Weekday::Tue));\n\n assert_eq!(leave.is_off_at_date(mock), true);\n\n Ok(())\n\n }\n\n\n", "file_path": "src/offtime.rs", "rank": 37, "score": 25286.758617830674 }, { "content": " #[test]\n\n fn return_true_when_day_and_parity_match() -> Result<()> {\n\n let mut leave = OffDays::new();\n\n leave.insert(Weekday::Wed, Parity::OddWeek);\n\n\n\n let mut mock = MockNow::new();\n\n mock.expect_now()\n\n .times(1)\n\n .returning(|| Local.isoywd(2015, 15, Weekday::Wed));\n\n assert_eq!(leave.is_off_at_date(mock), true);\n\n\n\n leave.insert(Weekday::Thu, Parity::EvenWeek);\n\n let mut mock = MockNow::new();\n\n mock.expect_now()\n\n .times(1)\n\n .returning(|| Local.isoywd(2015, 16, Weekday::Thu));\n\n assert_eq!(leave.is_off_at_date(mock), true);\n\n\n\n Ok(())\n\n }\n", "file_path": "src/offtime.rs", "rank": 38, "score": 25286.573863840833 }, { "content": "\n\n #[test]\n\n fn return_false_when_day_match_but_not_parity() -> Result<()> {\n\n let mut leave = OffDays::new();\n\n leave.insert(Weekday::Fri, Parity::EvenWeek);\n\n let mut mock = MockNow::new();\n\n mock.expect_now()\n\n .times(1)\n\n .returning(|| Local.isoywd(2015, 15, Weekday::Fri));\n\n assert_eq!(leave.is_off_at_date(mock), false);\n\n\n\n leave.insert(Weekday::Sun, Parity::OddWeek);\n\n let mut mock = MockNow::new();\n\n mock.expect_now()\n\n .times(1)\n\n .returning(|| Local.isoywd(2015, 16, Weekday::Sun));\n\n assert_eq!(leave.is_off_at_date(mock), false);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/offtime.rs", "rank": 39, "score": 25286.573863840833 }, { "content": " &now.weekday(),\n\n &now.iso_week(),\n\n if !res { \"not\" } else { \"\" }\n\n );\n\n res\n\n }\n\n\n\n /// Return `true` if there are no OffDays.\n\n pub fn is_empty(&self) -> bool {\n\n self.0.is_empty()\n\n }\n\n}\n\n\n\nimpl Default for OffDays {\n\n fn default() -> Self {\n\n OffDays::new()\n\n }\n\n}\n\n\n\nimpl Off for OffDays {\n", "file_path": "src/offtime.rs", "rank": 40, "score": 25284.912849475077 }, { "content": " /// The user is off if date day is in OffDays and either,\n\n /// - parity is all\n\n /// - parity match the current iso week number\n\n fn is_off_at_date(&self, date: impl Now) -> bool {\n\n let now = date.now();\n\n trace!(\"now: {:?}\", now);\n\n trace!(\"now.weekday: {:?}\", now.weekday());\n\n let res: bool;\n\n if let Some(parity) = self.0.get(&now.weekday()) {\n\n trace!(\"match and parity = {:?}\", parity);\n\n res = match parity {\n\n Parity::EveryWeek => true,\n\n Parity::OddWeek => &now.iso_week().week() % 2 == 1,\n\n Parity::EvenWeek => &now.iso_week().week() % 2 == 0,\n\n };\n\n } else {\n\n res = false;\n\n }\n\n debug!(\n\n \"{:?} {:?} is {} off\",\n", "file_path": "src/offtime.rs", "rank": 41, "score": 25282.504469978747 }, { "content": "#![allow(missing_docs)]\n\n//! This module holds struct and helpers for parameters and configuration\n\n//!\n\nuse crate::offtime::{Off, OffDays};\n\nuse crate::utils::parse_from_hmstr;\n\nuse ::structopt::clap::AppSettings;\n\nuse anyhow::{bail, Context, Result};\n\nuse chrono::Local;\n\nuse directories_next::ProjectDirs;\n\nuse figment::{\n\n providers::{Format, Serialized, Toml},\n\n Figment,\n\n};\n\nuse serde::{Deserialize, Deserializer, Serialize, Serializer};\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\nuse structopt;\n\nuse structopt::clap::arg_enum;\n\nuse tracing::{debug, info, warn};\n", "file_path": "src/config.rs", "rank": 42, "score": 24583.01761249191 }, { "content": " /// `:home:` mattermost emoji.\n\n pub emoji: String,\n\n /// custom status text description\n\n pub text: String,\n\n}\n\n\n\n/// Implement [`std::str::FromStr`] for [`WifiStatusConfig`] which allows to call `parse` from a\n\n/// string representation:\n\n/// ```\n\n/// use lib::config::WifiStatusConfig;\n\n/// let wsc : WifiStatusConfig = \"wifinet::house::Working home\".parse().unwrap();\n\n/// assert_eq!(wsc, WifiStatusConfig {\n\n/// wifi_string: \"wifinet\".to_owned(),\n\n/// emoji:\"house\".to_owned(),\n\n/// text: \"Working home\".to_owned() });\n\n/// ```\n\nimpl std::str::FromStr for WifiStatusConfig {\n\n type Err = anyhow::Error;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let splitted: Vec<&str> = s.split(\"::\").collect();\n", "file_path": "src/config.rs", "rank": 43, "score": 24580.88059498674 }, { "content": " };\n\n let verbose: i8 = if self.verbosity_level > 2 {\n\n 3\n\n } else {\n\n self.verbosity_level as i8\n\n };\n\n match verbose - quiet {\n\n -2 => \"Off\",\n\n -1 => \"Error\",\n\n 0 => \"Warn\",\n\n 1 => \"Info\",\n\n 2 => \"Debug\",\n\n _ => \"Trace\",\n\n }\n\n }\n\n}\n\n\n\n#[derive(structopt::StructOpt, Serialize, Deserialize, Debug)]\n\n/// Automate mattermost status with the help of wifi network\n\n///\n", "file_path": "src/config.rs", "rank": 44, "score": 24578.84189101187 }, { "content": "/// Use current visible wifi SSID to automate your mattermost status.\n\n/// This program is meant to either be running in background or be call regularly\n\n/// with option `--delay 0`.\n\n/// It will then update your mattermost custom status according to the config file\n\n#[structopt(global_settings(&[AppSettings::ColoredHelp, AppSettings::ColorAuto]))]\n\npub struct Args {\n\n /// wifi interface name\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[structopt(short, long, env, name = \"itf_name\")]\n\n pub interface_name: Option<String>,\n\n\n\n /// Status configuration triplets (:: separated)\n\n ///\n\n /// Each triplet shall have the format:\n\n /// \"wifi_substring::emoji_name::status_text\". If `wifi_substring` is empty, the ssociated\n\n /// status will be used for off time.\n\n #[serde(skip_serializing_if = \"Vec::is_empty\")]\n\n #[structopt(short, long, name = \"wifi_substr::emoji::text\")]\n\n pub status: Vec<String>,\n\n\n", "file_path": "src/config.rs", "rank": 45, "score": 24578.67071698964 }, { "content": " ///\n\n pub fn update_secret_with_keyring(mut self) -> Result<Self> {\n\n if let Some(user) = &self.mm_user {\n\n if let Some(service) = &self.keyring_service {\n\n let keyring = keyring::Keyring::new(service, user);\n\n let secret = keyring.get_password().with_context(|| {\n\n format!(\"Querying OS keyring (user: {}, service: {})\", user, service)\n\n })?;\n\n self.mm_secret = Some(secret);\n\n } else {\n\n warn!(\"User is defined for keyring lookup but service is not\");\n\n info!(\"Skipping keyring lookup\");\n\n }\n\n }\n\n Ok(self)\n\n }\n\n\n\n /// Update `args.mm_secret` with the standard output of\n\n /// `args.mm_secret_cmd` if defined.\n\n ///\n", "file_path": "src/config.rs", "rank": 46, "score": 24578.489026664975 }, { "content": " }\n\n\n\n /// Merge with precedence default [`Args`], config file and command line parameters.\n\n pub fn merge_config_and_params(&self) -> Result<Args> {\n\n let default_args = Args::default();\n\n debug!(\"default Args : {:#?}\", default_args);\n\n let conf_dir = ProjectDirs::from(\"net\", \"ams\", \"automattermostatus\")\n\n .expect(\"Unable to find a project dir\")\n\n .config_dir()\n\n .to_owned();\n\n fs::create_dir_all(&conf_dir)\n\n .with_context(|| format!(\"Creating conf dir {:?}\", &conf_dir))?;\n\n let conf_file = conf_dir.join(\"automattermostatus.toml\");\n\n if !conf_file.exists() {\n\n info!(\"Write {:?} default config file\", &conf_file);\n\n fs::write(&conf_file, toml::to_string(&Args::default())?)\n\n .unwrap_or_else(|_| panic!(\"Unable to write default config file {:?}\", conf_file));\n\n }\n\n\n\n let config_args: Args = Figment::from(Toml::file(&conf_file)).extract()?;\n", "file_path": "src/config.rs", "rank": 47, "score": 24578.21097504513 }, { "content": " pub mm_secret_cmd: Option<String>,\n\n\n\n /// directory for state file\n\n ///\n\n /// Will use content of XDG_CACHE_HOME if unset.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[structopt(long, env, parse(from_os_str), name = \"cache dir\")]\n\n pub state_dir: Option<PathBuf>,\n\n\n\n /// beginning of status update with the format hh:mm\n\n ///\n\n /// Before this time the status won't be updated\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[structopt(short, long, env, name = \"begin hh:mm\")]\n\n pub begin: Option<String>,\n\n\n\n /// end of status update with the format hh:mm\n\n ///\n\n /// After this time the status won't be updated\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n", "file_path": "src/config.rs", "rank": 48, "score": 24578.04674865901 }, { "content": " /// If the secret is a password, `secret` will be updated later when login to the mattermost\n\n /// server\n\n pub fn update_secret_with_command(mut self) -> Result<Args> {\n\n if let Some(command) = &self.mm_secret_cmd {\n\n let params =\n\n shell_words::split(command).context(\"Splitting mm_token_cmd into shell words\")?;\n\n debug!(\"Running command {}\", command);\n\n let output = Command::new(&params[0])\n\n .args(&params[1..])\n\n .output()\n\n .context(format!(\"Error when running {}\", &command))?;\n\n let secret = String::from_utf8_lossy(&output.stdout);\n\n if secret.len() == 0 {\n\n bail!(\"command '{}' returns nothing\", &command);\n\n }\n\n // /!\\ Do not spit secret on stdout on released binary.\n\n //debug!(\"setting secret to {}\", secret);\n\n self.mm_secret = Some(secret.to_string());\n\n }\n\n Ok(self)\n", "file_path": "src/config.rs", "rank": 49, "score": 24577.93626209567 }, { "content": "\n\narg_enum! {\n\n/// Enum used to encode `secret_type` parameter (password or token)\n\n///\n\n/// When set to [Password], the secret is used to obtain a session token\n\n/// by using the login API. When set to [Token], the secret is a private access\n\n/// token directly usable to access API.\n\n#[derive(Serialize, Deserialize,Debug)]\n\npub enum SecretType {\n\n Token,\n\n Password,\n\n}\n\n}\n\n\n\n/// Status that shall be send when a wifi with `wifi_string` is being seen.\n\n#[derive(Debug, PartialEq)]\n\npub struct WifiStatusConfig {\n\n /// wifi SSID substring associated to this object custom status\n\n pub wifi_string: String,\n\n /// string description of the emoji that will be set as a custom status (like `home` for\n", "file_path": "src/config.rs", "rank": 50, "score": 24577.87070613062 }, { "content": " #[structopt(short, long, env, name = \"end hh:mm\")]\n\n pub end: Option<String>,\n\n\n\n /// Expiration time with the format hh:mm\n\n ///\n\n /// This parameter is used to set the custom status expiration time\n\n /// Set to \"0\" to avoid setting expiration time\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[structopt(long, env, name = \"expiry hh:mm\")]\n\n pub expires_at: Option<String>,\n\n\n\n /// delay between wifi SSID polling in seconds\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[structopt(long, env)]\n\n pub delay: Option<u32>,\n\n\n\n #[allow(missing_docs)]\n\n #[structopt(flatten)]\n\n #[serde(deserialize_with = \"de_from_str\")]\n\n pub verbose: QuietVerbose,\n", "file_path": "src/config.rs", "rank": 51, "score": 24576.496161092502 }, { "content": " /// mattermost URL\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[structopt(short = \"u\", long, env, name = \"url\")]\n\n pub mm_url: Option<String>,\n\n\n\n /// User name used for mattermost login or for password or private token lookup in OS keyring.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[structopt(long, env, name = \"username\")]\n\n pub mm_user: Option<String>,\n\n\n\n /// Type of secret. Either `Password` (default) or `Token`\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[structopt(short = \"t\", long, env, possible_values = &SecretType::variants(), case_insensitive = true)]\n\n pub secret_type: Option<SecretType>,\n\n\n\n /// Service name used for mattermost secret lookup in OS keyring.\n\n ///\n\n /// The secret is either a `password` (default) or a`token` according to\n\n /// `secret_type` option\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n", "file_path": "src/config.rs", "rank": 52, "score": 24576.24411352445 }, { "content": " /// Pass many times to increase verbosity level, up to 3.\n\n #[structopt(\n\n name = \"quietverbose\",\n\n long = \"verbose\",\n\n short = \"v\",\n\n parse(from_occurrences),\n\n conflicts_with = \"quietquiet\",\n\n global = true\n\n )]\n\n verbosity_level: u8,\n\n\n\n /// Decrease the output's verbosity level.\n\n ///\n\n /// Used once, it will set error log level.\n\n /// Used twice, will silent the log completely\n\n #[structopt(\n\n name = \"quietquiet\",\n\n long = \"quiet\",\n\n short = \"q\",\n\n parse(from_occurrences),\n", "file_path": "src/config.rs", "rank": 53, "score": 24575.78553877911 }, { "content": " #[structopt(long, env, name = \"token service name\")]\n\n pub keyring_service: Option<String>,\n\n\n\n /// mattermost private Token\n\n ///\n\n /// Usage of this option may leak your personal token. It is recommended to\n\n /// use `mm_token_cmd` or `keyring_service`.\n\n ///\n\n /// The secret is either a `password` (default) or a`token` according to\n\n /// `secret_type` option\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[structopt(long, env, hide_env_values = true, name = \"token\")]\n\n pub mm_secret: Option<String>,\n\n\n\n /// mattermost secret command\n\n ///\n\n /// The secret is either a `password` (default) or a`token` according to\n\n /// `secret_type` option\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[structopt(long, env, name = \"command\")]\n", "file_path": "src/config.rs", "rank": 54, "score": 24575.243744275842 }, { "content": " debug!(\"config Args : {:#?}\", config_args);\n\n debug!(\"parameter Args : {:#?}\", self);\n\n // Merge config Default → Config File → command line args\n\n let res = Figment::from(Serialized::defaults(Args::default()))\n\n .merge(Toml::file(&conf_file))\n\n .merge(Serialized::defaults(self))\n\n .extract()\n\n .context(\"Merging configuration file and parameters\")?;\n\n debug!(\"Merged config and parameters : {:#?}\", res);\n\n Ok(res)\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 55, "score": 24575.08662987631 }, { "content": " conflicts_with = \"quietverbose\",\n\n global = true\n\n )]\n\n quiet_level: u8,\n\n}\n\n\n\nimpl Default for QuietVerbose {\n\n fn default() -> Self {\n\n QuietVerbose {\n\n verbosity_level: 1,\n\n quiet_level: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl Serialize for QuietVerbose {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_str(self.get_level_filter())\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 56, "score": 24573.352278553346 }, { "content": " if splitted.len() != 3 {\n\n bail!(\n\n \"Expect status argument to contain two and only two :: separator (in '{}')\",\n\n &s\n\n );\n\n }\n\n Ok(WifiStatusConfig {\n\n wifi_string: splitted[0].to_owned(),\n\n emoji: splitted[1].to_owned(),\n\n text: splitted[2].to_owned(),\n\n })\n\n }\n\n}\n\n\n\n// Courtesy of structopt_flags crate\n\n/// [`structopt::StructOpt`] implementing the verbosity parameter\n\n#[derive(structopt::StructOpt, Debug, Clone)]\n\npub struct QuietVerbose {\n\n /// Increase the output's verbosity level\n\n ///\n", "file_path": "src/config.rs", "rank": 57, "score": 24573.26344047253 }, { "content": "\n\n #[structopt(skip)]\n\n /// Days off for which the custom status shall not be changed\n\n pub offdays: OffDays,\n\n}\n\n\n\nimpl Default for Args {\n\n fn default() -> Args {\n\n let res = Args {\n\n #[cfg(target_os = \"linux\")]\n\n interface_name: Some(\"wlan0\".into()),\n\n #[cfg(target_os = \"windows\")]\n\n interface_name: Some(\"Wireless Network Connection\".into()),\n\n #[cfg(target_os = \"macos\")]\n\n interface_name: Some(\"en0\".into()),\n\n status: [\"home::house::working at home\".to_string()].to_vec(),\n\n delay: Some(60),\n\n state_dir: Some(\n\n ProjectDirs::from(\"net\", \"ams\", \"automattermostatus\")\n\n .expect(\"Unable to find a project dir\")\n", "file_path": "src/config.rs", "rank": 58, "score": 24572.934832673913 }, { "content": "}\n\n\n\nimpl Off for Args {\n\n fn is_off_time(&self) -> bool {\n\n self.offdays.is_off_time() // The day is off, so we are off\n\n || if let Some(begin) = parse_from_hmstr(&self.begin) {\n\n Local::now() < begin // now is before begin, we are off\n\n } else {\n\n false // now is after begin, we are on duty if not after end\n\n }\n\n || if let Some(end) = parse_from_hmstr(&self.end) {\n\n Local::now() > end // now is after end, we are off\n\n } else {\n\n false // now is before end, we are on duty\n\n }\n\n }\n\n}\n\n\n\nimpl Args {\n\n /// Update `args.mm_secret` with the one fetched from OS keyring\n", "file_path": "src/config.rs", "rank": 59, "score": 24572.155111775694 }, { "content": " quiet_level: 0,\n\n }),\n\n \"debug\" => Ok(QuietVerbose {\n\n verbosity_level: 2,\n\n quiet_level: 0,\n\n }),\n\n _ => Ok(QuietVerbose {\n\n verbosity_level: 3,\n\n quiet_level: 0,\n\n }),\n\n }\n\n}\n\n\n\nimpl QuietVerbose {\n\n /// Returns the string associated to the current verbose level\n\n pub fn get_level_filter(&self) -> &str {\n\n let quiet: i8 = if self.quiet_level > 1 {\n\n 2\n\n } else {\n\n self.quiet_level as i8\n", "file_path": "src/config.rs", "rank": 60, "score": 24571.380547266876 }, { "content": " .cache_dir()\n\n .to_owned(),\n\n ),\n\n mm_user: None,\n\n keyring_service: None,\n\n mm_secret: None,\n\n mm_secret_cmd: None,\n\n secret_type: Some(SecretType::Password),\n\n mm_url: Some(\"https://mattermost.example.com\".into()),\n\n verbose: QuietVerbose {\n\n verbosity_level: 1,\n\n quiet_level: 0,\n\n },\n\n expires_at: Some(\"19:30\".to_string()),\n\n begin: Some(\"8:00\".to_string()),\n\n end: Some(\"19:30\".to_string()),\n\n offdays: OffDays::default(),\n\n };\n\n res\n\n }\n", "file_path": "src/config.rs", "rank": 61, "score": 24571.316405957976 }, { "content": "use super::osx_parse::extract_airport_ssid;\n\nuse crate::wifiscan::{WiFi, WifiError, WifiInterface};\n\nuse std::process::Command;\n\n\n\nimpl WiFi {\n\n /// Create MacOS `WiFi` interface\n\n pub fn new(interface: &str) -> Self {\n\n WiFi {\n\n interface: interface.to_owned(),\n\n }\n\n }\n\n}\n\n\n\n/// Wifi interface for osx operating system.\n\n/// This provides basic functionalities for wifi interface.\n\nimpl WifiInterface for WiFi {\n\n fn is_wifi_enabled(&self) -> Result<bool, WifiError> {\n\n let output = Command::new(\"networksetup\")\n\n .args(&[\"radio\", \"wifi\"])\n\n .output()\n", "file_path": "src/wifiscan/osx.rs", "rank": 62, "score": 23383.098113240143 }, { "content": "use super::windows_parse::extract_netsh_ssid;\n\nuse crate::wifiscan::{WiFi, WifiError, WifiInterface};\n\nuse std::process::Command;\n\n\n\nimpl WiFi {\n\n /// Create windows `WiFi` interface\n\n pub fn new(interface: &str) -> Self {\n\n WiFi {\n\n interface: interface.to_owned(),\n\n }\n\n }\n\n}\n\n\n\n/// Wifi interface for windows operating system.\n\n/// This provides basic functionalities for wifi interface.\n\nimpl WifiInterface for WiFi {\n\n /// Check if wireless network adapter is enabled.\n\n fn is_wifi_enabled(&self) -> Result<bool, WifiError> {\n\n let output = Command::new(\"netsh\")\n\n .args(&[\n", "file_path": "src/wifiscan/windows.rs", "rank": 63, "score": 23383.034394017428 }, { "content": "use crate::wifiscan::{WiFi, WifiError, WifiInterface};\n\nuse std::process::Command;\n\n\n\nimpl WiFi {\n\n /// Create linux `WiFi` interface\n\n pub fn new(interface: &str) -> Self {\n\n WiFi {\n\n interface: interface.to_owned(),\n\n }\n\n }\n\n}\n\n\n\n/// Wifi interface for linux operating system.\n\n/// This provides basic functionalities for wifi interface.\n\nimpl WifiInterface for WiFi {\n\n /// Check if wireless network adapter is enabled.\n\n fn is_wifi_enabled(&self) -> Result<bool, WifiError> {\n\n let output = Command::new(\"nmcli\")\n\n .args(&[\"radio\", \"wifi\"])\n\n .output()\n", "file_path": "src/wifiscan/linux.rs", "rank": 64, "score": 23382.920256661444 }, { "content": " .map_err(WifiError::IoError)?;\n\n\n\n Ok(String::from_utf8_lossy(&output.stdout).contains(\"enabled\"))\n\n }\n\n\n\n fn visible_ssid(&self) -> Result<Vec<String>, WifiError> {\n\n let output = Command::new(\"nmcli\")\n\n .args(&[\"-t\", \"-m\", \"tabular\", \"-f\", \"SSID\", \"device\", \"wifi\"])\n\n .output()\n\n .map_err(WifiError::IoError)?;\n\n let stdout = String::from_utf8_lossy(&output.stdout).to_owned();\n\n Ok(stdout.split('\\n').map(str::to_string).collect())\n\n }\n\n}\n", "file_path": "src/wifiscan/linux.rs", "rank": 65, "score": 23377.962685312774 }, { "content": " \"wlan\",\n\n \"show\",\n\n \"interface\",\n\n &format!(\"name= \\\"{}\\\"\", self.interface),\n\n ])\n\n .output()\n\n .map_err(|err| WifiError::IoError(err))?;\n\n\n\n Ok(!String::from_utf8_lossy(&output.stdout).contains(\"There is no wireless interface\"))\n\n }\n\n\n\n fn visible_ssid(&self) -> Result<Vec<String>, WifiError> {\n\n let output = Command::new(\"netsh\")\n\n .args(&[\"wlan\", \"show\", \"networks\"])\n\n .output()\n\n .map_err(|err| WifiError::IoError(err))?;\n\n let stdout = String::from_utf8_lossy(&output.stdout).to_owned();\n\n Ok(extract_netsh_ssid(&stdout))\n\n }\n\n}\n", "file_path": "src/wifiscan/windows.rs", "rank": 66, "score": 23377.81662213573 }, { "content": " .map_err(|err| WifiError::IoError(err))?;\n\n\n\n Ok(String::from_utf8_lossy(&output.stdout).contains(\"enabled\"))\n\n }\n\n\n\n fn visible_ssid(&self) -> Result<Vec<String>, WifiError> {\n\n let output = Command::new(\n\n \"/System/Library/PrivateFrameworks/Apple80211.framework/Versions/A/Resources/airport \",\n\n )\n\n .args(&[\"scan\"])\n\n .output()\n\n .map_err(|err| WifiError::IoError(err))?;\n\n let stdout = String::from_utf8_lossy(&output.stdout).to_owned();\n\n Ok(extract_airport_ssid(&stdout))\n\n }\n\n}\n", "file_path": "src/wifiscan/osx.rs", "rank": 67, "score": 23377.793280226713 }, { "content": "//! Module responsible for sending custom status change to mattermost.\n\nuse crate::mattermost::BaseSession;\n\nuse crate::utils::parse_from_hmstr;\n\nuse anyhow::Result;\n\nuse chrono::{DateTime, Local};\n\nuse derivative::Derivative;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json as json;\n\nuse std::fmt;\n\nuse thiserror::Error;\n\nuse tracing::debug;\n\n\n\n/// Implement errors specific to `MMStatus`\n\n#[allow(missing_docs)]\n\n#[derive(Debug, Error)]\n\npub enum MMSError {\n\n #[error(\"Bad json data\")]\n\n BadJSONData(#[from] serde_json::error::Error),\n\n #[error(\"HTTP request error\")]\n\n HTTPRequestError(#[from] ureq::Error),\n", "file_path": "src/mattermost/status.rs", "rank": 68, "score": 23079.619540343418 }, { "content": " }\n\n }\n\n /// Add expiration time with the format \"hh:mm\" to the mattermost custom status\n\n pub fn expires_at(&mut self, time_str: &Option<String>) {\n\n // do not set expiry time if set in the past\n\n if let Some(expiry) = parse_from_hmstr(time_str) {\n\n if Local::now() < expiry {\n\n self.expires_at = Some(expiry);\n\n self.duration = Some(\"date_and_time\".to_owned());\n\n } else {\n\n debug!(\"now {:?} >= expiry {:?}\", Local::now(), expiry);\n\n }\n\n }\n\n // let dt: NaiveDateTime = NaiveDate::from_ymd(2016, 7, 8).and_hms(9, 10, 11);\n\n }\n\n /// This function is essentially used for debugging or testing\n\n pub fn to_json(&self) -> Result<String, MMSError> {\n\n json::to_string(&self).map_err(MMSError::BadJSONData)\n\n }\n\n\n", "file_path": "src/mattermost/status.rs", "rank": 69, "score": 23075.449973114024 }, { "content": " #[error(\"Mattermost login error\")]\n\n LoginError(#[from] anyhow::Error),\n\n}\n\n\n\n/// Custom struct to serialize the HTTP POST data into a json objecting using serde_json\n\n/// For a description of these fields see the [MatterMost OpenApi sources](https://github.com/mattermost/mattermost-api-reference/blob/master/v4/source/status.yaml)\n\n#[derive(Derivative, Serialize, Deserialize, Clone, PartialEq, Eq, Default)]\n\n#[derivative(Debug)]\n\npub struct MMStatus {\n\n /// custom status text description\n\n pub text: String,\n\n /// custom status emoji name\n\n pub emoji: String,\n\n /// custom status duration\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub duration: Option<String>,\n\n /// custom status expiration\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub expires_at: Option<DateTime<Local>>,\n\n}\n", "file_path": "src/mattermost/status.rs", "rank": 70, "score": 23073.66267514642 }, { "content": " /// Send self custom status once\n\n #[allow(clippy::borrowed_box)] // Box needed beacause we can get two different types.\n\n pub fn _send(&self, session: &Box<dyn BaseSession>) -> Result<ureq::Response, ureq::Error> {\n\n let token = session\n\n .token()\n\n .expect(\"Internal Error: token is unset in current session\");\n\n let uri = session.base_uri().to_owned() + \"/api/v4/users/me/status/custom\";\n\n ureq::put(&uri)\n\n .set(\"Authorization\", &(\"Bearer \".to_owned() + token))\n\n .send_json(serde_json::to_value(&self).unwrap_or_else(|e| {\n\n panic!(\n\n \"Serialization of MMStatus '{:?}' failed with {:?}\",\n\n &self, &e\n\n )\n\n }))\n\n }\n\n /// Send self custom status, trying to login once in case of 401 failure.\n\n pub fn send(&mut self, session: &mut Box<dyn BaseSession>) -> Result<ureq::Response, MMSError> {\n\n debug!(\"Post status: {}\", self.to_owned().to_json()?);\n\n match self._send(session) {\n", "file_path": "src/mattermost/status.rs", "rank": 71, "score": 23072.607010902557 }, { "content": " use super::*;\n\n use crate::mattermost::{BaseSession, Session};\n\n use httpmock::prelude::*;\n\n use test_log::test; // Automatically trace tests\n\n #[test]\n\n fn send_required_json() -> Result<()> {\n\n // Start a lightweight mock server.\n\n let server = MockServer::start();\n\n let mut mmstatus = MMStatus::new(\"text\".into(), \"emoji\".into());\n\n\n\n // Create a mock on the server.\n\n let server_mock = server.mock(|expect, resp_with| {\n\n expect\n\n .method(PUT)\n\n .header(\"Authorization\", \"Bearer token\")\n\n .path(\"/api/v4/users/me/status/custom\")\n\n .json_body(serde_json::json!({\"emoji\":\"emoji\",\"text\":\"text\"}\n\n ));\n\n resp_with\n\n .status(200)\n", "file_path": "src/mattermost/status.rs", "rank": 72, "score": 23071.040882596866 }, { "content": " .header(\"content-type\", \"text/html\")\n\n .body(\"ok\");\n\n });\n\n\n\n // Send an HTTP request to the mock server. This simulates your code.\n\n let mut session: Box<dyn BaseSession> =\n\n Box::new(Session::new(&server.url(\"\")).with_token(\"token\"));\n\n let resp = mmstatus.send(&mut session)?;\n\n\n\n // Ensure the specified mock was called exactly one time (or fail with a detailed error description).\n\n server_mock.assert();\n\n // Ensure the mock server did respond as specified.\n\n assert_eq!(resp.status(), 200);\n\n Ok(())\n\n }\n\n #[test]\n\n fn catch_api_error() -> Result<()> {\n\n // Start a lightweight mock server.\n\n let server = MockServer::start();\n\n let mut mmstatus = MMStatus::new(\"text\".into(), \"emoji\".into());\n", "file_path": "src/mattermost/status.rs", "rank": 73, "score": 23071.03900419314 }, { "content": " Ok(response) => Ok(response),\n\n Err(ureq::Error::Status(code, response)) => {\n\n /* the server returned an unexpected status\n\n code (such as 400, 500 etc) */\n\n if code == 401 {\n\n // relogin and retry\n\n session.login().map_err(MMSError::LoginError)?;\n\n self._send(session)\n\n } else {\n\n Err(ureq::Error::Status(code, response))\n\n }\n\n }\n\n Err(e) => Err(e),\n\n }\n\n .map_err(MMSError::HTTPRequestError)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod send_should {\n", "file_path": "src/mattermost/status.rs", "rank": 74, "score": 23070.852870249648 }, { "content": "\n\n // Create a mock on the server.\n\n let server_mock = server.mock(|expect, resp_with| {\n\n expect\n\n .method(PUT)\n\n .header(\"Authorization\", \"Bearer token\")\n\n .path(\"/api/v4/users/me/status/custom\")\n\n .json_body(serde_json::json!({\"emoji\":\"emoji\",\"text\":\"text\"}\n\n ));\n\n resp_with\n\n .status(500)\n\n .header(\"content-type\", \"text/html\")\n\n .body(\"Internal error\");\n\n });\n\n\n\n // Send an HTTP request to the mock server. This simulates your code.\n\n let mut session: Box<dyn BaseSession> =\n\n Box::new(Session::new(&server.url(\"\")).with_token(\"token\"));\n\n let resp = mmstatus.send(&mut session);\n\n assert!(resp.is_err());\n\n\n\n // Ensure the specified mock was called exactly one time (or fail with a detailed error description).\n\n server_mock.assert();\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/mattermost/status.rs", "rank": 75, "score": 23069.23966276168 }, { "content": "\n\nimpl fmt::Display for MMStatus {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}:{} (duration: {:?}, expire at: {:?})\",\n\n self.emoji, self.text, self.duration, self.expires_at\n\n )\n\n }\n\n}\n\n\n\nimpl MMStatus {\n\n /// Create a `MMStatus` ready to be sent to the `mm_base_uri` mattermost instance.\n\n /// Authentication is done with the private access `token`.\n\n pub fn new(text: String, emoji: String) -> MMStatus {\n\n MMStatus {\n\n text,\n\n emoji,\n\n duration: None,\n\n expires_at: None,\n", "file_path": "src/mattermost/status.rs", "rank": 76, "score": 23068.21885994096 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n mod should {\n\n use super::*;\n\n use anyhow::Result;\n\n #[test]\n\n fn extract_expected_ssid() -> Result<()> {\n\n let res = include_str!(\"macscan.xml\");\n\n assert_eq!(\n\n extract_airport_ssid(res),\n\n [\"NEUF_5EE4\", \"FreeWifi_secure\", \"SFR_6A68\", \"NEUF_5EE4\"]\n\n );\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "src/wifiscan/osx_parse.rs", "rank": 77, "score": 21976.873305553247 }, { "content": "pub(crate) fn extract_netsh_ssid(netsh_output: &str) -> Vec<String> {\n\n netsh_output\n\n .split(\"\\n\")\n\n .filter(|x| x.starts_with(\"SSID\"))\n\n .map(|x| {\n\n x.split(\":\")\n\n .skip(1)\n\n .collect::<Vec<&str>>()\n\n .join(\":\")\n\n .trim()\n\n .to_owned()\n\n })\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n mod should {\n\n use super::*;\n", "file_path": "src/wifiscan/windows_parse.rs", "rank": 78, "score": 21976.038992305843 }, { "content": "use quick_xml::events::Event;\n\nuse quick_xml::Reader;\n\nuse tracing::error;\n\n\n\npub(crate) fn extract_airport_ssid(airport_output: &str) -> Vec<String> {\n\n let mut reader = Reader::from_str(airport_output);\n\n reader.trim_text(true);\n\n\n\n let mut txt = Vec::new();\n\n let mut buf = Vec::new();\n\n\n\n // The `Reader` does not implement `Iterator` because it outputs borrowed data (`Cow`s)\n\n loop {\n\n match reader.read_event(&mut buf) {\n\n Ok(Event::Start(ref e)) => {\n\n match e.name() {\n\n b\"key\" => {\n\n if let Ok(Event::Text(e)) = reader.read_event(&mut buf) {\n\n if e.unescape_and_decode(&reader).unwrap() == \"SSID_STR\" {\n\n let _ = reader.read_event(&mut buf); // </key>\n", "file_path": "src/wifiscan/osx_parse.rs", "rank": 79, "score": 21973.61474456562 }, { "content": " use anyhow::Result;\n\n #[test]\n\n fn extract_expected_ssid() -> Result<()> {\n\n let res = r#\"\n\nInterface name : Wireless Network Connection\n\nThere are 22 networks currently visible.\n\n\n\nSSID 1 : SKYXXXXX\n\n Network type : Infrastructure\n\n Authentication : WPA2-Personal\n\n Encryption : CCMP\n\n\n\nSSID 2 : SKYXXXXX\n\n Network type : Infrastructure\n\n Authentication : WPA2-Personal\n\n Encryption : CCMP\n\n\n\nSSID 3 : XXXXX\n\n Network type : Infrastructure\n\n Authentication : WPA2-Personal\n", "file_path": "src/wifiscan/windows_parse.rs", "rank": 80, "score": 21972.78773638437 }, { "content": " let _ = reader.read_event(&mut buf); // </string>\n\n if let Ok(Event::Text(e)) = reader.read_event(&mut buf) {\n\n txt.push(e.unescape_and_decode(&reader).unwrap());\n\n } else {\n\n error!(\"Bad xml structure\")\n\n }\n\n }\n\n }\n\n }\n\n _ => (),\n\n }\n\n }\n\n Ok(Event::Eof) => break,\n\n Err(e) => panic!(\"Error at position {}: {:?}\", reader.buffer_position(), e),\n\n _ => (), // There are several other `Event`s we do not consider here\n\n }\n\n }\n\n // if we don't keep a borrow elsewhere, we can clear the buffer to keep memory usage low\n\n buf.clear();\n\n txt\n", "file_path": "src/wifiscan/osx_parse.rs", "rank": 81, "score": 21970.78528941926 }, { "content": " Encryption : CCMP\n\n\n\nSSID 4 : BTOpenzoneXXX\n\n Network type : Infrastructure\n\n Authentication : Open\n\n Encryption : None\n\n\"#;\n\n\n\n assert_eq!(\n\n extract_netsh_ssid(res),\n\n [\"SKYXXXXX\", \"SKYXXXXX\", \"XXXXX\", \"BTOpenzoneXXX\"]\n\n );\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "src/wifiscan/windows_parse.rs", "rank": 82, "score": 21968.21004375394 }, { "content": "## Configuration\n\n*Automattermostatus* get configuration from both a config file and a command\n\nline (the later override the former).\n\n\n\n### Config File\n\nThe config file is created if it does not exist. It is created or read in the following places depending on your OS:\n\n- the [XDG user directory](https://www.freedesktop.org/wiki/Software/xdg-user-dirs/) specifications on Linux (usually `~/.config/automattermostatus/automattermostatus.toml`),\n\n- the [Known Folder system](https://msdn.microsoft.com/en-us/library/windows/desktop/dd378457.aspx) on Windows (usually `{FOLDERID_RoamingAppData}\\ams\\automattermostatus\\config`),\n\n- the [Standard Directories](https://developer.apple.com/library/content/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/FileSystemOverview/FileSystemOverview.html#//apple_ref/doc/uid/TP40010672-CH2-SW6) on macOS (usually `$HOME/Library/Application Support/net.ams.automattermost`).\n\n\n\nA sample config file is:\n\n\n\n<!-- `$ cat config.toml.example` as toml -->\n\n```toml\n\n# Automattermostatus example configuration\n\n#\n\n# Wifi interface name. Use to check that wifi is enabled (Mac and Windows)\n\ninterface_name = 'wlp0s20f3'\n\n\n\n# Status string containing 3 fields separated by `::`\n\n# - First field is the wifi substring that should be contained in a visible SSID\n\n# for this status to be set. If empty the associated status wil be used for\n\n# off times.\n\n# - Second field is the emoji string for the custom status.\n\n# - Third field is the description text foir the custom status.\n\n#\n\nstatus = [\"corporatewifi::corplogo::On premise work\",\n\n\t \"homenet::house::Working home\",\n\n\t \"::sleeping::Off time\"]\n\n\n\n# Base url of the mattermost instanbce\n\nmm_url = 'https://mattermost.example.com'\n\n\n\n# Level of verbosity among Off, Error, Warn, Info, Debug, Trace\n\nverbose = 'Info'\n\n\n\n# The type of the secret given by `mm_secret`, `mm_secret_cmd` or `kering_*`\n\n# parameters. Either:\n\n# secret_type = \"Token\" # for using a private acces token\n\n# secret_type = \"Password\" # for using login and password credentials where\n", "file_path": "README.md", "rank": 83, "score": 15139.366673723605 }, { "content": "# auto*mat-termo-st*atus\n\nAutomate your mattermost custom status with the help of visible Wi-Fi SSID.\n\n\n\nDevelopment site is hosted on [gitlab](https://gitlab.com/matclab/automattermostatus).\n\n\n\nReleased binaries are available from [this\n\npage](https://gitlab.com/matclab/automattermostatus/-/releases).\n\n\n\n## Usage\n\nHere after is the command line help.\n\n<!-- `$ target/debug/automattermostatus --help` as text -->\n\n```text\n\nautomattermostatus 0.2.1\n\nAutomate mattermost status with the help of wifi network\n\n\n\nUse current visible wifi SSID to automate your mattermost status. This program is meant to either be running in\n\nbackground or be call regularly with option `--delay 0`. It will then update your mattermost custom status according to\n\nthe config file\n\n\n\nUSAGE:\n\n automattermostatus [FLAGS] [OPTIONS]\n\n\n\nFLAGS:\n\n -h, --help \n\n Prints help information\n\n\n\n -q, --quiet \n\n Decrease the output's verbosity level.\n\n \n\n Used once, it will set error log level. Used twice, will silent the log completely\n\n -v, --verbose \n\n Increase the output's verbosity level\n\n \n\n Pass many times to increase verbosity level, up to 3.\n\n -V, --version \n\n Prints version information\n\n\n\n\n\nOPTIONS:\n\n -b, --begin <begin hh:mm> \n\n beginning of status update with the format hh:mm\n\n \n\n Before this time the status won't be updated [env: BEGIN=]\n\n --state-dir <cache dir> \n\n directory for state file\n\n \n\n Will use content of XDG_CACHE_HOME if unset. [env: STATE_DIR=]\n\n --mm-secret-cmd <command> \n\n mattermost secret command\n\n \n\n The secret is either a `password` (default) or a`token` according to `secret_type` option [env:\n\n MM_SECRET_CMD=]\n\n --delay <delay> \n", "file_path": "README.md", "rank": 84, "score": 15138.904258841538 }, { "content": "# v0.2.1 \n\n\n\nTentative fix for #2 where mattermosr instance does not always take the \n\ncustom status into account despite returning 200.\n\n\n\n# v0.2.0 \n\n\n\n## Breaking change\n\nConfiguration file has move. Please move it before upgrading.\n\n- On windows configuration file is read from `{FOLDERID_RoamingAppData}\\ams\\automattermostatus\\config` instead of `{FOLDERID_RoamingAppData}\\clabaut\\automattermostatus\\config`.\n\n- On MacOS configuration file is read from `$HOME/Library/Application Support/net.ams.automattermost` instead of `$HOME/Library/Application Support/net.clabaut.automattermost`.\n\n\n\n# v0.1.10 : Bug correction\n\n\n\n## Bugs\n\n\n\n- #2 which prevent a status update if there is some connection problem at\n\n startup\n\n\n\n# v0.1.9 : debian package\n\n\n\nNo functional changes nor bug fixes.\n\n\n\n# v0.1.8 : login simplification\n\n\n\n## Feature\n\n\n\n- possibility to use login+password to connect to mattermost server\n\n\n\n## Bugs\n\n\n\n- allow to pass `delay` of more than 255s on command line\n\n- continue to match wifi event after finding an empty SSID\n\n\n\n# v0.1.7 : use OS keyring and allow status for off time\n\n\n\n- Lookup OS keyring for mattermost token. Should work on all three supported OS.\n\n- Use empty `wifi_substring` to define a status that will be used for off\n\n times.\n\n\n\n# v0.1.6 : Correct typo in XDG desktop file\n\n\n\n# v0.1.5 : Correct message expiry implementation\n\n\n\n- expiry is now computed when sending message\n\n\n\n# v0.1.4 : Implement Off time and message expiry\n\n\n\n- add begin and end time parameters\n\n- add off days configuration with week selection by parity\n\n- add expires_at to define mattermosts status expiration time\n\n- better error messages\n\n\n\n# v0.1.3 : Still working release process\n\n\n\nNo functional change.\n\n\n\n# v0.1.2 : Working on CI and release process\n\nNo functional change.\n\n\n\n# v0.1.0 : Initial release\n\nAll basic functionalities :\n\n- get visible SSID on three major OS,\n\n- update mattermost custom status,\n\n- use configuration file.\n", "file_path": "CHANGELOG.md", "rank": 85, "score": 15138.017745019959 }, { "content": " delay between wifi SSID polling in seconds [env: DELAY=]\n\n\n\n -e, --end <end hh:mm> \n\n end of status update with the format hh:mm\n\n \n\n After this time the status won't be updated [env: END=]\n\n --expires-at <expiry hh:mm> \n\n Expiration time with the format hh:mm\n\n \n\n This parameter is used to set the custom status expiration time Set to \"0\" to avoid setting expiration time\n\n [env: EXPIRES_AT=]\n\n -i, --interface-name <itf_name> \n\n wifi interface name [env: INTERFACE_NAME=]\n\n\n\n -t, --secret-type <secret-type>\n\n Type of secret. Either `Password` (default) or `Token` [env: SECRET_TYPE=] [possible values: Token,\n\n Password]\n\n --mm-secret <token> \n\n mattermost private Token\n\n \n\n Usage of this option may leak your personal token. It is recommended to use `mm_token_cmd` or\n\n `keyring_service`.\n\n \n\n The secret is either a `password` (default) or a`token` according to `secret_type` option [env: MM_SECRET]\n\n --keyring-service <token service name>\n\n Service name used for mattermost secret lookup in OS keyring.\n\n \n\n The secret is either a `password` (default) or a`token` according to `secret_type` option [env:\n\n KEYRING_SERVICE=]\n\n -u, --mm-url <url> \n\n mattermost URL [env: MM_URL=]\n\n\n\n --mm-user <username>\n\n User name used for mattermost login or for password or private token lookup in OS keyring [env: MM_USER=]\n\n\n\n -s, --status <wifi_substr::emoji::text>... \n\n Status configuration triplets (:: separated)\n\n \n\n Each triplet shall have the format: \"wifi_substring::emoji_name::status_text\". If `wifi_substring` is empty,\n\n the ssociated status will be used for off time.\n\n```\n", "file_path": "README.md", "rank": 86, "score": 15136.134692551928 }, { "content": "# the login is given by `mm_user`\n\nsecret_type = \"Token\"\n\n\n\n# mattermost authentication secret. It is recommended to use `mm_secret_cmd` or\n\n# better the OS keyring with `keyring_user` and `keyring_service`.\n\n# mm_secret= 'cieVee1Ohgeixaevo0Oiquiu'\n\n\n\n# Command that should be executed to get mattermost authentication secret (the\n\n# secret shall be printed on stdout). See\n\n# https://docs.mattermost.com/integrations/cloud-personal-access-secrets.html#creating-a-personal-access-secret.\n\n# It is recommended to use the OS keyring with `keyring_service`.\n\n# mm_secret_cmd = \"secret-tool lookup name automattermostatus\"\n\n\n\n\n\n# *service* name used to query OS keyring in order to retrieve your\n\n# mattermost private access secret. The user used to query the keyring is\n\n# `mm_user`\n\nkeyring_service = 'mattermost_secret'\n\n\n\n# set expiry time for custom mattermost status\n\nexpires_at = \"19:30\"\n\n\n\n# set begin and end time of the working period. Outside of this period, custom\n\n# status won't be set.\n\nbegin = \"8:00\"\n\nend = \"19:30\"\n\n\n\n# Definition of the day off (when automattermostatus do not update the user\n\n# custom status). If a day is no present then it is considered as a workday.\n\n# The attributes may be:\n\n# - `EveryWeek`: the day is always off\n\n# - `EvenWeek`: the day is off on even week (iso week number)\n\n# - `OddWeek`: the day is off on odd week (iso week number)\n\n[offdays]\n\nSat = 'EveryWeek'\n\nSun = 'EveryWeek'\n\nWed = 'EvenWeek'\n\n```\n\n\n", "file_path": "README.md", "rank": 87, "score": 15135.701570857196 }, { "content": "### Mattermost Authentication Secret\n\nThe secret use to authenticate to the mattermost instance may be either a\n\n*private access token* or a password associated with your username (see\n\n`secret_type` configuration parameter).\n\n\n\nThe advantage of using your private access token is that it would work even if\n\nyou've set up a MFA (multi-factor authentication). The cons is that your\n\naccount shall have been explicitly authorized to use a *private access token*\n\nby your mattermost instance administrator.\n\n\n\nYour [private\n\ntoken](https://docs.mattermost.com/integrations/cloud-personal-access-tokens.html#creating-a-personal-access-token), if enabled on your account,\n\nis available under `Account Parameters > Security > Personal Access Token`.\n\nYou should avoid to use `mm_secret` parameter as it may leak your token to\n\nother people having access to your computer. It is recommended to use the\n\n`mm_secret_cmd` option or better your local OS keyring with `mm_user` and\n\n`keyring_service` parameters. \n\n\n\nFor example, on linux you may use `secret-tool`:\n\n```sh\n\n# store your token (it will ask you the token)\n\nsecret-tool store --label='token' name automattermostatus\n\n# use the following command in `mm_token_cmd` to retrieve your token:\n\nsecret-tool lookup name automattermostatus\n\n```\n\nor the `keyring` command:\n\n```sh\n\n# store your token (it will ask you the token)\n\nkeyring set mattermost_token mattermost_username\n\n```\n\n```toml\n\n# use the following configuration\n\nsecret_type = \"Token\"\n\nmm_user = 'username'\n\nkeyring_service = 'mattermost_token'\n\n```\n\nOn Mac OS you may use\n\n[Keychain](https://en.wikipedia.org/wiki/Keychain_%28software%29) to store the\n\nmattermost access token, and it will be looked up by *automattermostatus* with\n\na configuration similar to the one given here before.\n\n\n\nOn Windows, I have no mean to test, but it looks like you may use any software\n\nbased upon\n\n[Microsoft Credential Locker](https://docs.microsoft.com/en-us/windows/uwp/security/credential-locker) to store your mattermost access token.\n\n\n\n\n", "file_path": "README.md", "rank": 88, "score": 15134.746005577319 }, { "content": "## Dependencies\n\nOn linux *automattermostatus* depends upon `NetworkManager` for getting the\n\nvisible SSIDs without root rights.\n\n\n\n## Installation\n\nYou can either compile yourself, download the latest binaries from the\n\n[release page](https://gitlab.com/matclab/automattermostatus/-/releases) or\n\ninstall one of the available packages.\n\n\n\n### Arch linux\n\nUse your favorite aur helper. For example:\n\n```\n\nyay -S automattermostatus\n\n```\n\n\n\n\n\n## Compilation\n\n\n\nYou can build the `automattermostatus` binary with:\n\n```\n\ncargo build --release --locked\n\n```\n\nThe binaries are then found in the `target/release` directory.\n\n\n\n## Launch at Startup\n\n### Linux\n\nYou may either copy the `distrib/automattermostatus.desktop` in\n\n`/etc/xdg/autostart` or in `$HOME/.config/autostart` or if you use systemd,\n\nyou may copy the *auttoolmostatus systemd unit*\n\n`distrib/automattermostatus.service` in `$HOME/.config/systemd/user` and do \n\n```sh\n\nsystemctl status --user enable --now automattermostatus\n\n```\n\nThe logs are then visible with \n\n```sh\n\njournalctl --user -u automattermostatus\n\n```\n\n\n\n### Windows\n\n\n\n**TODO**\n\n\n\n### Mac OS\n\n\n\n**TODO**\n\n\n\n# License\n\n\n\nLicensed under Apache License, Version 2.0 ([LICENSE-APACHE](https://www.apache.org/licenses/LICENSE-2.0)).\n\n\n\n## Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally\n\nsubmitted for inclusion in the work by you, as defined in the Apache-2.0\n\nlicense, shall be licensed as above, without any additional terms or\n\nconditions.\n\n\n\n### Issues\n\nYou may open issues or feature requests on [the gitlab issue\n\npage](https://gitlab.com/matclab/automattermostatus/-/issues).\n\n\n\n### Patch or Features\n\nYou may [fork](https://gitlab.com/matclab/automattermostatus/-/forks/new) the\n\nproject on gitlab, develop your patch or feature on a new branch and submit a\n\nnew merge request after having push back to your forked repo.\n\n\n\nDo not hesitate to open an issue beforehand to discuss the bug fix strategy or\n\nto ask about the feature you imagine.\n", "file_path": "README.md", "rank": 89, "score": 15132.867323769568 }, { "content": "#![warn(missing_docs)]\n\n#![doc = include_str!(\"../README.md\")]\n\n\n\nuse ::lib::config::Args;\n\nuse ::lib::*;\n\nuse anyhow::{Context, Result};\n\n\n\n#[paw::main]\n", "file_path": "src/main.rs", "rank": 92, "score": 13.883227448346304 } ]
Rust
imtui-sys/src/bindings.rs
richinseattle/imtui-rs
09ecddbd9a0e535068406d580c9aaf21d096d7af
/* automatically generated by rust-bindgen */ extern crate imgui_sys; #[allow(non_snake_case, non_camel_case_types, non_upper_case_globals)] pub mod root { #[allow(unused_imports)] use self::super::root; use imgui_sys::ImDrawData; pub mod ImTui { #[allow(unused_imports)] use self::super::super::root; pub type TCell = u32; #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct TScreen { pub nx: ::std::os::raw::c_int, pub ny: ::std::os::raw::c_int, pub nmax: ::std::os::raw::c_int, pub data: *mut root::ImTui::TCell, } #[test] fn bindgen_test_layout_TScreen() { assert_eq!( ::std::mem::size_of::<TScreen>(), 24usize, concat!("Size of: ", stringify!(TScreen)) ); assert_eq!( ::std::mem::align_of::<TScreen>(), 8usize, concat!("Alignment of ", stringify!(TScreen)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<TScreen>())).nx as *const _ as usize }, 0usize, concat!( "Offset of field: ", stringify!(TScreen), "::", stringify!(nx) ) ); assert_eq!( unsafe { &(*(::std::ptr::null::<TScreen>())).ny as *const _ as usize }, 4usize, concat!( "Offset of field: ", stringify!(TScreen), "::", stringify!(ny) ) ); assert_eq!( unsafe { &(*(::std::ptr::null::<TScreen>())).nmax as *const _ as usize }, 8usize, concat!( "Offset of field: ", stringify!(TScreen), "::", stringify!(nmax) ) ); assert_eq!( unsafe { &(*(::std::ptr::null::<TScreen>())).data as *const _ as usize }, 16usize, concat!( "Offset of field: ", stringify!(TScreen), "::", stringify!(data) ) ); } extern "C" { #[link_name = "\u{1}__ZNK5ImTui7TScreen4sizeEv"] pub fn TScreen_size(this: *const root::ImTui::TScreen) -> ::std::os::raw::c_int; } extern "C" { #[link_name = "\u{1}__ZN5ImTui7TScreen5clearEv"] pub fn TScreen_clear(this: *mut root::ImTui::TScreen); } extern "C" { #[link_name = "\u{1}__ZN5ImTui7TScreen6resizeEii"] pub fn TScreen_resize( this: *mut root::ImTui::TScreen, pnx: ::std::os::raw::c_int, pny: ::std::os::raw::c_int, ); } impl TScreen { #[inline] pub unsafe fn size(&self) -> ::std::os::raw::c_int { TScreen_size(self) } #[inline] pub unsafe fn clear(&mut self) { TScreen_clear(self) } #[inline] pub unsafe fn resize( &mut self, pnx: ::std::os::raw::c_int, pny: ::std::os::raw::c_int, ) { TScreen_resize(self, pnx, pny) } } } extern "C" { #[link_name = "\u{1}__Z19ImTui_ImplText_Initv"] pub fn ImTui_ImplText_Init() -> bool; } extern "C" { #[link_name = "\u{1}__Z23ImTui_ImplText_Shutdownv"] pub fn ImTui_ImplText_Shutdown(); } extern "C" { #[link_name = "\u{1}__Z23ImTui_ImplText_NewFramev"] pub fn ImTui_ImplText_NewFrame(); } extern "C" { #[link_name = "\u{1}__Z29ImTui_ImplText_RenderDrawDataP10ImDrawDataPN5ImTui7TScreenE"] pub fn ImTui_ImplText_RenderDrawData( drawData: *mut root::ImDrawData, screen: *mut root::ImTui::TScreen, ); } pub mod std { #[allow(unused_imports)] use self::super::super::root; } extern "C" { #[link_name = "\u{1}__Z22ImTui_ImplNcurses_Initbff"] pub fn ImTui_ImplNcurses_Init( mouseSupport: bool, fps_active: f32, fps_idle: f32, ) -> *mut root::ImTui::TScreen; } extern "C" { #[link_name = "\u{1}__Z26ImTui_ImplNcurses_Shutdownv"] pub fn ImTui_ImplNcurses_Shutdown(); } extern "C" { #[link_name = "\u{1}__Z26ImTui_ImplNcurses_NewFramev"] pub fn ImTui_ImplNcurses_NewFrame() -> bool; } extern "C" { #[link_name = "\u{1}__Z28ImTui_ImplNcurses_DrawScreenb"] pub fn ImTui_ImplNcurses_DrawScreen(active: bool); } extern "C" { #[link_name = "\u{1}__Z30ImTui_ImplNcurses_ProcessEventv"] pub fn ImTui_ImplNcurses_ProcessEvent() -> bool; } }
/* automatically generated by rust-bindgen */ extern crate imgui_sys; #[allow(non_snake_case, non_camel_case_types, non_upper_case_globals)] pub mod root { #[allow(unused_imports)] use self::super::root; use imgui_sys::ImDrawData; pub mod ImTui { #[allow(unused_imports)] use self::super::super::root; pub type TCell = u32; #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct TScreen { pub nx: ::std::os::raw::c_int, pub ny: ::std::os::raw::c_int, pub nmax: ::std::os::raw::c_int, pub data: *mut root::ImTui::TCell, } #[test] fn bindgen_test_layout_TScreen() { assert_eq!( ::std::mem::size_of::<TScreen>(), 24usize, concat!("Size of: ", stringify!(TScreen)) ); assert_eq!( ::std::mem::align_of::<TScreen>(), 8usize, concat!("Alignment of ", stringify!(TScreen)) ); assert_eq!( unsafe { &(*(::std::ptr::null::<TScreen>())).nx as *const _ as usize }, 0usize, concat!( "Offset of field: ", stringify!(TScreen), "::", stringify!(nx) ) ); assert_eq!( unsafe { &(*(::std::ptr::null::<TScreen>())).ny as *const _ as usize }, 4usize, concat!( "Offset of field: ", stringify!(TScreen), "::", stringify!(ny) ) ); assert_eq!( unsafe { &(*(::std::ptr::null::<TScreen>())).nmax as *const _ as usize }, 8usize, concat!( "Offset of field: ", stringify!(TScreen), "::", stringify!(nmax) ) ); assert_eq!( unsafe { &(*(::std::ptr::null::<TScreen>())).data as *const _ as usize }, 16usize, concat!( "Offset of field: ", stringify!(TScreen), "::", stringify!(data) ) ); } extern "C" { #[link_name = "\u{1}__ZNK5ImTui7TScreen4sizeEv"] pub fn TScreen_size(this: *const root::ImTui::TScreen) -> ::std::os::raw::c_int; } extern "C" { #[link_name = "\u{1}__ZN5ImTui7TScreen5clearEv"] pub fn TScreen_clear(this: *mut root::ImTui::TScreen); } extern "C" { #[link_name = "\u{1}__ZN5ImTui7TScreen6resizeEii"] pub fn TScreen_resize( this: *mut root::ImTui::TScreen, pnx: ::std::os::raw::c_int, pny: ::std::os::raw::c_int, ); } impl TScreen { #[inline] pub unsafe fn size(&self) -> ::std::os::raw::c_int { TScreen_size(self) } #[inline] pub unsafe fn clear(&mut self) { TScreen_clear(self) } #[inline] pub unsafe fn resize( &mut self,
} } extern "C" { #[link_name = "\u{1}__Z19ImTui_ImplText_Initv"] pub fn ImTui_ImplText_Init() -> bool; } extern "C" { #[link_name = "\u{1}__Z23ImTui_ImplText_Shutdownv"] pub fn ImTui_ImplText_Shutdown(); } extern "C" { #[link_name = "\u{1}__Z23ImTui_ImplText_NewFramev"] pub fn ImTui_ImplText_NewFrame(); } extern "C" { #[link_name = "\u{1}__Z29ImTui_ImplText_RenderDrawDataP10ImDrawDataPN5ImTui7TScreenE"] pub fn ImTui_ImplText_RenderDrawData( drawData: *mut root::ImDrawData, screen: *mut root::ImTui::TScreen, ); } pub mod std { #[allow(unused_imports)] use self::super::super::root; } extern "C" { #[link_name = "\u{1}__Z22ImTui_ImplNcurses_Initbff"] pub fn ImTui_ImplNcurses_Init( mouseSupport: bool, fps_active: f32, fps_idle: f32, ) -> *mut root::ImTui::TScreen; } extern "C" { #[link_name = "\u{1}__Z26ImTui_ImplNcurses_Shutdownv"] pub fn ImTui_ImplNcurses_Shutdown(); } extern "C" { #[link_name = "\u{1}__Z26ImTui_ImplNcurses_NewFramev"] pub fn ImTui_ImplNcurses_NewFrame() -> bool; } extern "C" { #[link_name = "\u{1}__Z28ImTui_ImplNcurses_DrawScreenb"] pub fn ImTui_ImplNcurses_DrawScreen(active: bool); } extern "C" { #[link_name = "\u{1}__Z30ImTui_ImplNcurses_ProcessEventv"] pub fn ImTui_ImplNcurses_ProcessEvent() -> bool; } }
pnx: ::std::os::raw::c_int, pny: ::std::os::raw::c_int, ) { TScreen_resize(self, pnx, pny) }
function_block-function_prefix_line
[ { "content": "type HnItemId = u32;\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 0, "score": 67556.55634805921 }, { "content": "pub fn generate_bindings(imtui_path: &Path, imgui_include_path: &Path) -> Result<Bindings, Error> {\n\n let imtui_include_path = imtui_path.join(\"include\");\n\n let bindings = bindgen::builder()\n\n //.rust_target(RustTarget::Stable_1_33)\n\n .header(\"src/wrapper.hpp\")\n\n .clang_arg(\"-xc++\")\n\n .clang_arg(\"-std=c++14\")\n\n .clang_arg(format!(\"-I{}\", imtui_include_path.to_str().expect(\"No path\")))\n\n .clang_arg(format!(\"-I{}\", imgui_include_path.to_str().expect(\"No path\")))\n\n .clang_arg(\"-fkeep-inline-functions\")\n\n .enable_cxx_namespaces()\n\n .blacklist_type(\"ImDrawData\") // comes from imgui\n\n .whitelist_type(\"ImTui::TScreen\")\n\n .whitelist_function(\"ImTui_.*\")\n\n .opaque_type(\"std::*\")\n\n .raw_line(\"extern crate imgui_sys;\")\n\n .module_raw_line(\"root\", \"use imgui_sys::ImDrawData;\")\n\n .generate_inline_functions(true)\n\n .generate()\n\n .expect(\"Unable to generate bindings\");\n\n Ok(bindings)\n\n}\n", "file_path": "imtui-sys-bindgen/src/lib.rs", "rank": 1, "score": 56751.75847801179 }, { "content": "fn main() {\n\n let cwd = env::current_dir().expect(\"Failed to read current directory\");\n\n let imtui_path = cwd\n\n .join(\"../imtui-sys/third-party/imtui\")\n\n .canonicalize()\n\n .expect(\"Failed to find imtui\");\n\n let imgui_path = cwd\n\n .join(\"include\")\n\n .canonicalize()\n\n .expect(\"Failed to find local include path\");\n\n let bindings = generate_bindings(imtui_path.as_path(), imgui_path.as_path())\n\n .expect(\"Failed to generate bindings\");\n\n let output_path = cwd.join(\"../imtui-sys/src/bindings.rs\");\n\n bindings\n\n .write_to_file(&output_path)\n\n .expect(\"Failed to write bindings\");\n\n println!(\"Wrote bindings to {}\", output_path.to_string_lossy());\n\n}\n", "file_path": "imtui-sys-bindgen/src/main.rs", "rank": 2, "score": 54286.991893962164 }, { "content": "struct WindowData {\n\n title: String,\n\n window_content: WindowContent,\n\n hn_state: Rc<RefCell<HnState>>,\n\n active: bool,\n\n show_comments: bool,\n\n selected_story_id: Option<HnItemId>,\n\n hovered_story_id: Option<HnItemId>,\n\n hovered_comment_id: Option<HnItemId>,\n\n max_stories: Cell<u32>,\n\n}\n\n\n\nimpl WindowData {\n\n fn new(window_content: WindowContent, hn_state: &Rc<RefCell<HnState>>) -> WindowData {\n\n WindowData {\n\n title: String::from(\"[Y] Hacker News\"),\n\n window_content: window_content,\n\n hn_state: Rc::clone(hn_state),\n\n active: false,\n\n show_comments: false,\n", "file_path": "examples/hnterm/main.rs", "rank": 3, "score": 53763.30879025013 }, { "content": "fn main() -> io::Result<()> {\n\n println!(\"cargo:rerun-if-env-changed=PKG_CONFIG_PATH\");\n\n\n\n let mut build = cc::Build::new();\n\n let files = vec![\n\n \"third-party/imtui/src/imtui-impl-text.cpp\",\n\n \"third-party/imtui/src/imtui-impl-ncurses.cpp\"\n\n ];\n\n build\n\n .cpp(true)\n\n .include(\"third-party/imtui/include\")\n\n .include(\"../imgui-rs/imgui-sys/third-party/cimgui\")\n\n .flag(\"-std=c++17\")\n\n .files(files.iter())\n\n .compile(\"libimtui.a\");\n\n\n\n println!(\"cargo:rustc-link-lib=ncurses\");\n\n Ok(())\n\n}\n", "file_path": "imtui-sys/build.rs", "rank": 4, "score": 50322.15006171627 }, { "content": "fn set_color_scheme(context: &mut imgui::Context, dark: bool) {\n\n let light_colors = [\n\n (imgui::StyleColor::Text, [0.0, 0.0, 0.0, 1.0]),\n\n (imgui::StyleColor::TextDisabled, [0.6, 0.6, 0.6, 1.0]),\n\n (imgui::StyleColor::WindowBg, [0.96, 0.96, 0.94, 1.0]),\n\n (imgui::StyleColor::TitleBg, [1.0, 0.4, 0.0, 1.0]),\n\n (imgui::StyleColor::TitleBgActive, [1.0, 0.4, 0.0, 1.0]),\n\n (imgui::StyleColor::TitleBgCollapsed, [0.69, 0.25, 0.0, 1.0]),\n\n (imgui::StyleColor::ChildBg, [0.96, 0.96, 0.94, 1.0]),\n\n (imgui::StyleColor::PopupBg, [0.96, 0.96, 0.94, 1.0]),\n\n (imgui::StyleColor::ModalWindowDimBg, [0.0, 0.0, 0.0, 0.0])\n\n ];\n\n let dark_colors = [\n\n (imgui::StyleColor::Text, [0.0, 1.0, 0.0, 1.0]),\n\n (imgui::StyleColor::TextDisabled, [0.6, 0.6, 0.6, 1.0]),\n\n (imgui::StyleColor::WindowBg, [0.0, 0.0, 0.0, 1.0]),\n\n (imgui::StyleColor::TitleBg, [0.1, 0.2, 0.1, 1.0]),\n\n (imgui::StyleColor::TitleBgActive, [0.1, 0.2, 0.1, 1.0]),\n\n (imgui::StyleColor::TitleBgCollapsed, [0.5, 1.0, 0.5, 1.0]),\n\n (imgui::StyleColor::ChildBg, [0.0, 0.0, 0.0, 1.0]),\n\n (imgui::StyleColor::PopupBg, [0.0, 0.1, 0.0, 1.0]),\n\n (imgui::StyleColor::ModalWindowDimBg, [0.0, 0.0, 0.0, 0.0])\n\n ];\n\n let colors = if dark { dark_colors } else { light_colors };\n\n for (style_color, values) in colors.iter() {\n\n context.style_mut()[*style_color] = *values;\n\n }\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 5, "score": 47042.19941724339 }, { "content": "fn main() {\n\n let mut imgui = imgui::Context::create();\n\n imgui.set_ini_filename(None);\n\n\n\n let mut imtui = imtui::Ncurses::init(true, 60.0, -1.0);\n\n let now = SystemTime::now();\n\n let mut nframes = 0 as i32;\n\n let mut fval = 123.0;\n\n\n\n loop {\n\n\n\n nframes += 1;\n\n imtui.set_active();\n\n imtui.new_frame();\n\n let ui = imgui.frame();\n\n let title = imgui::ImString::new(\"Hello, world!\");\n\n let window = imgui::Window::new(&title)\n\n .position([0.0, 0.0], imgui::Condition::FirstUseEver)\n\n .size([50.0, 10.0], imgui::Condition::FirstUseEver);\n\n if let Some(windowToken) = window.begin(&ui) {\n", "file_path": "examples/ncurses0.rs", "rank": 6, "score": 39350.841442229415 }, { "content": "fn main() {\n\n let mut imgui = Context::create();\n\n imgui.set_ini_filename(None);\n\n\n\n unsafe {\n\n let screen = imtui::sys::ImTui_ImplNcurses_Init(false, 60.0, -1.0);\n\n imtui::sys::ImTui_ImplText_Init();\n\n\n\n let now = SystemTime::now();\n\n let mut nframes = 0 as i32;\n\n let mut fval = 123.0;\n\n loop {\n\n if now.elapsed().unwrap().as_secs() > 10 {\n\n break;\n\n }\n\n\n\n imtui::sys::ImTui_ImplNcurses_NewFrame();\n\n imtui::sys::ImTui_ImplText_NewFrame();\n\n\n\n imgui::sys::igNewFrame();\n", "file_path": "examples/ncurses0-sys.rs", "rank": 7, "score": 38015.593902754496 }, { "content": "#[derive(Clap)]\n\nstruct Opts {\n\n #[clap(short, long, about = \"Wait for debugger at startup\")]\n\n debug: bool,\n\n #[clap(short, long, about = \"Verbose logging to stderr\")]\n\n verbose: bool,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<(), Box<dyn error::Error>> {\n\n let opts = Opts::parse();\n\n if opts.debug {\n\n debug_here!();\n\n }\n\n\n\n if opts.verbose {\n\n let mut log_builder = env_logger::Builder::new();\n\n log_builder.target(env_logger::Target::Stderr)\n\n .filter_module(\"fetch_queue\", log::LevelFilter::Trace)\n\n .filter_module(\"reqwest::connect\", log::LevelFilter::Trace)\n\n .filter_module(\"reqwest::async_impl::client\", log::LevelFilter::Trace)\n", "file_path": "examples/hnterm/main.rs", "rank": 8, "score": 34279.68827404628 }, { "content": "struct UpdateStatus {\n\n update_in_progress: bool,\n\n last_update_time: Option<Instant>,\n\n next_update: Instant,\n\n}\n\n\n\nimpl UpdateStatus {\n\n fn new() -> UpdateStatus {\n\n UpdateStatus {\n\n update_in_progress: false,\n\n last_update_time: None,\n\n next_update: Instant::now()\n\n }\n\n }\n\n\n\n fn update_needed(&self) -> bool {\n\n if self.update_in_progress {\n\n return false;\n\n }\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 9, "score": 33132.20341492341 }, { "content": "struct HnState {\n\n hn_api: Rc<RefCell<HnApiClient>>,\n\n items: Rc<RefCell<HashMap<HnItemId, HnItem>>>,\n\n items_to_fetch: ItemFetchQueue,\n\n item_fetch_task: JoinHandle<()>,\n\n last_list_refresh: Option<HnRefreshResult>,\n\n}\n\n\n\nimpl HnState {\n\n pub fn new() -> HnState {\n\n let items = Rc::new(RefCell::new(HashMap::new()));\n\n let api = Rc::new(RefCell::new(HnApiClient::new()));\n\n let fetch_queue = ItemFetchQueue::new();\n\n let join_handle = fetch_queue.start(api.clone(), items.clone());\n\n HnState {\n\n hn_api: api,\n\n items: items,\n\n items_to_fetch: fetch_queue,\n\n item_fetch_task: join_handle,\n\n last_list_refresh: None,\n", "file_path": "examples/hnterm/main.rs", "rank": 10, "score": 33132.20341492341 }, { "content": "struct AppState {\n\n windows: Vec<WindowData>,\n\n hn_state: Rc<RefCell<HnState>>,\n\n list_update_status: Rc<RefCell<UpdateStatus>>,\n\n show_status_window: bool,\n\n active_window: Option<usize>,\n\n view_mode: StoryListViewMode,\n\n}\n\n\n\nimpl AppState {\n\n fn new() -> AppState {\n\n let hn_state = Rc::new(RefCell::new(HnState::new()));\n\n AppState {\n\n windows: vec![\n\n WindowData::new(WindowContent::Top, &hn_state),\n\n WindowData::new(WindowContent::Top, &hn_state),\n\n WindowData::new(WindowContent::Top, &hn_state),\n\n ],\n\n hn_state: hn_state,\n\n list_update_status: Rc::new(RefCell::new(UpdateStatus::new())),\n", "file_path": "examples/hnterm/main.rs", "rank": 11, "score": 33132.20341492341 }, { "content": "struct HntermApp {\n\n imgui: imgui::Context,\n\n imtui: imtui::Ncurses,\n\n state: AppState,\n\n}\n\n\n\nimpl HntermApp {\n\n fn new(imgui: imgui::Context, imtui: imtui::Ncurses) -> HntermApp {\n\n HntermApp {\n\n imgui,\n\n imtui,\n\n state: AppState::new(),\n\n }\n\n }\n\n\n\n fn process_frame(&mut self) -> bool {\n\n self.state.update();\n\n\n\n for (i, wd) in self.state.windows.iter_mut().enumerate() {\n\n wd.title = format!(\n", "file_path": "examples/hnterm/main.rs", "rank": 12, "score": 33132.20341492341 }, { "content": "#[derive(Deserialize, Clone)]\n\nstruct HnPollItem {\n\n id: HnItemId,\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 13, "score": 32107.64203814756 }, { "content": "#[derive(Deserialize, Clone)]\n\n#[serde(default)]\n\nstruct HnCommentItem {\n\n id: HnItemId,\n\n by: String,\n\n score: i32,\n\n #[serde(with = \"chrono::serde::ts_seconds\")]\n\n time: DateTime<Utc>,\n\n text: String,\n\n children: Vec<HnItemId>,\n\n parent: HnItemId,\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 14, "score": 32107.51254636933 }, { "content": "#[derive(Deserialize, Clone)]\n\n#[serde(default)]\n\nstruct HnJobItem {\n\n id: HnItemId,\n\n by: String,\n\n score: i32,\n\n #[serde(with = \"chrono::serde::ts_seconds\")]\n\n time: DateTime<Utc>,\n\n title: String,\n\n url: String,\n\n domain: String,\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 15, "score": 32107.51254636933 }, { "content": "#[derive(Deserialize, Clone)]\n\n#[serde(default)]\n\nstruct HnStoryItem {\n\n id: HnItemId,\n\n by: String,\n\n score: i32,\n\n #[serde(with = \"chrono::serde::ts_seconds\")]\n\n time: DateTime<Utc>,\n\n text: String,\n\n title: String,\n\n url: String,\n\n domain: String,\n\n descendants: u32,\n\n children: Vec<HnItemId>,\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 16, "score": 32107.51254636933 }, { "content": "#[derive(Deserialize)]\n\nstruct HnUpdatesResponse {\n\n items: Vec<HnItemId>,\n\n profiles: Vec<String>,\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 17, "score": 32104.88377191901 }, { "content": "struct HnApiClient {\n\n last_url: RefCell<Option<reqwest::Url>>,\n\n request_bytes: Cell<usize>,\n\n request_count: Cell<u32>,\n\n}\n\n\n\nimpl HnApiClient {\n\n fn new() -> HnApiClient {\n\n HnApiClient {\n\n last_url: RefCell::new(None),\n\n request_bytes: Cell::new(0),\n\n request_count: Cell::new(0),\n\n }\n\n }\n\n\n\n async fn fetch_url<T: for<'de> Deserialize<'de>>(&self, url: reqwest::Url) -> Result<T> {\n\n let url_str = String::from(url.as_str());\n\n let response = reqwest::get(url.clone())\n\n .await.wrap_err(format!(\"Failed to fetch data {}\", url_str))?;\n\n let result = response.text().await?;\n", "file_path": "examples/hnterm/main.rs", "rank": 18, "score": 32104.88377191901 }, { "content": "struct ItemFetchQueue {\n\n queue: Rc<RefCell<VecDeque<HnItemId>>>,\n\n pending: Rc<RefCell<HashSet<HnItemId>>>,\n\n waker: Rc<RefCell<Option<Waker>>>,\n\n}\n\n\n\nimpl ItemFetchQueue {\n\n fn new() -> ItemFetchQueue {\n\n ItemFetchQueue {\n\n queue: Rc::new(RefCell::new(VecDeque::new())),\n\n pending: Rc::new(RefCell::new(HashSet::new())),\n\n waker: Rc::new(RefCell::new(None)),\n\n }\n\n }\n\n\n\n fn queue_item(&self, item_id: HnItemId) {\n\n if !self.pending.borrow().contains(&item_id) {\n\n log::trace!(target: \"fetch_queue\", \"queued {}\", item_id);\n\n self.pending.borrow_mut().insert(item_id);\n\n self.queue.borrow_mut().push_back(item_id);\n", "file_path": "examples/hnterm/main.rs", "rank": 19, "score": 32104.88377191901 }, { "content": "struct HnRefreshResult {\n\n top_ids: Vec<HnItemId>,\n\n show_ids: Vec<HnItemId>,\n\n ask_ids: Vec<HnItemId>,\n\n new_ids: Vec<HnItemId>,\n\n changed_ids: HnUpdatesResponse,\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 20, "score": 32104.88377191901 }, { "content": "#[derive(Deserialize, Clone)]\n\nstruct HnPollOptItem {\n\n id: HnItemId,\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 21, "score": 31182.551123789224 }, { "content": "struct DrawContext<'a, 'b> {\n\n imtui: &'a imtui::Ncurses,\n\n ui: &'a mut imgui::Ui<'b>,\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 22, "score": 29889.326386854613 }, { "content": "use std::io;\n\n\n", "file_path": "imtui-sys/build.rs", "rank": 23, "score": 21976.95095614308 }, { "content": "mod bindings;\n\n\n\npub use crate::bindings::*;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n assert_eq!(2 + 2, 4);\n\n }\n\n}\n", "file_path": "imtui-sys/src/lib.rs", "rank": 31, "score": 20701.93787471728 }, { "content": "extern crate imtui_sys_bindgen;\n\n\n\nuse imtui_sys_bindgen::generate_bindings;\n\nuse std::env;\n\n\n", "file_path": "imtui-sys-bindgen/src/main.rs", "rank": 33, "score": 19556.049991519365 }, { "content": "extern crate bindgen;\n\n#[macro_use]\n\nextern crate failure;\n\n\n\nuse bindgen::{Bindings, RustTarget};\n\nuse failure::Error;\n\nuse std::path::Path;\n\n\n", "file_path": "imtui-sys-bindgen/src/lib.rs", "rank": 34, "score": 19554.412342462103 }, { "content": "#include \"imtui/imtui.h\"\n\n#include \"imtui/imtui-impl-ncurses.h\"\n", "file_path": "imtui-sys-bindgen/src/wrapper.hpp", "rank": 35, "score": 19551.783090293706 }, { "content": "`imtui-rs`\n\n==========\n\n\n\n`imtui-rs` is a rust crate that provides safe rust bindings for [`imtui`](https://github.com/ggerganov/imtui).\n\n\n\n# Screenshots\n\n\n\nHere is a screenshot of a rust port of the `hnterm` example from `imtui`:\n\n\n\n[![imtui-rs-hnterm-demo](https://asciinema.org/a/3qbgy8bHhK9oVhmJWlUAUER3p.svg)](https://asciinema.org/a/3qbgy8bHhK9oVhmJWlUAUER3p)\n\n\n\n# Build `imtui-rs`\n\n\n\n```bash\n\ngit clone --recursive https://github.com/visigoth/imtui-rs\n\ncd imtui-rs\n\ncargo build\n\n```\n\n\n\n# Build and Run `hnterm`\n\n\n\nThis example illustrates combining `imtui-rs` with `tokio` to create a single threaded asynchronous terminal app with an interactive UI.\n\n\n\n```bash\n\ncargo run --example hnterm\n\n```\n\n\n\n## Debugging `hnterm`\n\n\n\n```bash\n\ncargo run --example hnterm -- -d\n\n```\n", "file_path": "README.md", "rank": 36, "score": 13559.058818675607 }, { "content": "use imgui;\n\nuse imgui::internal::{RawCast};\n\nuse std;\n\n\n\npub use imtui_sys::root as sys;\n\n\n\npub struct Ncurses {\n\n screen: *mut sys::ImTui::TScreen,\n\n is_active: bool,\n\n}\n\n\n\nimpl Ncurses {\n\n pub fn init(mouse_support: bool, active_fps: f32, idle_fps: f32) -> Ncurses {\n\n let screen: *mut sys::ImTui::TScreen;\n\n unsafe {\n\n screen = sys::ImTui_ImplNcurses_Init(mouse_support, active_fps, idle_fps);\n\n sys::ImTui_ImplText_Init();\n\n }\n\n Ncurses {\n\n screen: screen,\n", "file_path": "src/lib.rs", "rank": 37, "score": 11.290112912589512 }, { "content": "\n\n pub fn render(&self, draw_data: &imgui::DrawData) {\n\n unsafe {\n\n let raw_ptr = draw_data.raw() as *const imgui::sys::ImDrawData as *mut imgui::sys::ImDrawData;\n\n sys::ImTui_ImplText_RenderDrawData(raw_ptr, self.screen);\n\n sys::ImTui_ImplNcurses_DrawScreen(self.is_active);\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for Ncurses {\n\n fn drop(&mut self) {\n\n unsafe {\n\n sys::ImTui_ImplText_Shutdown();\n\n sys::ImTui_ImplNcurses_Shutdown();\n\n }\n\n self.screen = std::ptr::null_mut();\n\n self.is_active = false;\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 38, "score": 10.091445815402913 }, { "content": "extern crate variant_count;\n\n#[macro_use]\n\nextern crate lazy_static;\n\n#[macro_use]\n\nextern crate maplit;\n\n#[macro_use]\n\nextern crate debug_here;\n\n\n\nuse imtui;\n\nuse std::collections::HashSet;\n\nuse futures::future;\n\nuse futures::stream::{self, Stream, StreamExt};\n\nuse std::task::Waker;\n\nuse std::task::Poll;\n\nuse std::pin::Pin;\n\nuse std::future::Future;\n\nuse std::collections::VecDeque;\n\nuse tokio::task::JoinHandle;\n\nuse std::time::Instant;\n\nuse std::fmt;\n", "file_path": "examples/hnterm/main.rs", "rank": 39, "score": 9.268758016943117 }, { "content": " imgui::sys::igSameLine(0.0, -1.0);\n\n\n\n let s6 = std::ffi::CString::new(\"##float\").expect(\"\");\n\n let s7 = std::ffi::CString::new(\"%.3f\").expect(\"\");\n\n imgui::sys::igSliderFloat(s6.as_ptr(), &mut fval, 0.0, 10.0, s7.as_ptr(), 1.0);\n\n imgui::sys::igEnd();\n\n\n\n //imtui::sys::ShowDemoWindow(&demo);\n\n\n\n imgui::sys::igRender();\n\n\n\n imtui::sys::ImTui_ImplText_RenderDrawData(imgui::sys::igGetDrawData(), screen);\n\n imtui::sys::ImTui_ImplNcurses_DrawScreen(true);\n\n }\n\n\n\n imtui::sys::ImTui_ImplNcurses_Shutdown();\n\n }\n\n}\n", "file_path": "examples/ncurses0-sys.rs", "rank": 40, "score": 8.45283949191129 }, { "content": " is_active: false,\n\n }\n\n }\n\n\n\n pub fn set_active(&mut self) {\n\n self.is_active = true;\n\n }\n\n\n\n pub fn set_inactive(&mut self) {\n\n self.is_active = false;\n\n }\n\n\n\n pub fn new_frame(&self) -> bool {\n\n let input_pending: bool;\n\n unsafe {\n\n input_pending = sys::ImTui_ImplNcurses_NewFrame();\n\n sys::ImTui_ImplText_NewFrame();\n\n }\n\n input_pending\n\n }\n", "file_path": "src/lib.rs", "rank": 41, "score": 8.153477675643332 }, { "content": " .init();\n\n }\n\n\n\n let mut imgui = imgui::Context::create();\n\n imgui.set_ini_filename(None);\n\n let imtui = imtui::Ncurses::init(true, 60.0, -1.0);\n\n\n\n set_color_scheme(&mut imgui, false);\n\n\n\n let local_set = tokio::task::LocalSet::new();\n\n\n\n local_set.run_until(async move {\n\n let mut app = HntermApp::new(imgui, imtui);\n\n\n\n let future_fn = |cx: &mut Context| {\n\n if app.process_frame() {\n\n cx.waker().wake_by_ref();\n\n Poll::Pending\n\n } else {\n\n Poll::Ready(())\n\n }\n\n };\n\n\n\n let future = poll_fn(future_fn);\n\n future.await\n\n }).await;\n\n Ok(())\n\n}\n", "file_path": "examples/hnterm/main.rs", "rank": 42, "score": 6.529497185979714 }, { "content": " self.wake();\n\n }\n\n }\n\n\n\n fn queue_items<'a>(&mut self, items: impl IntoIterator<Item = &'a HnItemId>) {\n\n let new = items.into_iter().filter(|&x| !self.pending.borrow().contains(x)).map(|&x| x).collect::<Vec<HnItemId>>();\n\n self.pending.borrow_mut().extend(new.clone());\n\n self.queue.borrow_mut().extend(new.clone());\n\n new.into_iter().for_each(|x| log::trace!(target: \"fetch_queue\", \"queued {}\", x));\n\n self.wake()\n\n }\n\n\n\n fn len(&self) -> usize {\n\n self.pending.borrow().len()\n\n }\n\n\n\n fn wake(&self) {\n\n let w = self.waker.borrow();\n\n match &*w {\n\n Some(waker) => waker.wake_by_ref(),\n", "file_path": "examples/hnterm/main.rs", "rank": 43, "score": 6.259509767270072 }, { "content": "use imtui;\n\nuse imgui;\n\nuse std::time::SystemTime;\n\nuse std;\n\n\n", "file_path": "examples/ncurses0.rs", "rank": 44, "score": 5.745853321002431 }, { "content": "use imtui;\n\nuse imgui::{Context};\n\nuse std::time::SystemTime;\n\nuse std;\n\nuse std::os as os;\n\n\n", "file_path": "examples/ncurses0-sys.rs", "rank": 45, "score": 5.524842151133669 }, { "content": " log::trace!(target: \"fetch_queue\", \"received unknown item type\");\n\n }\n\n }\n\n },\n\n Err(e) => {\n\n log::error!(target: \"fetch_queue\", \"failed to fetch {}: {:?}\", item_id, e);\n\n }\n\n };\n\n pending_rc.borrow_mut().remove(&item_id);\n\n future::ready(())\n\n });\n\n tokio::task::spawn_local(fut)\n\n }\n\n}\n\n\n\nimpl Stream for ItemFetchQueue {\n\n type Item = HnItemId;\n\n\n\n fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n let state = self.get_mut();\n", "file_path": "examples/hnterm/main.rs", "rank": 46, "score": 5.473650951684775 }, { "content": "use std::time::{Duration};\n\nuse imgui;\n\nuse variant_count::VariantCount;\n\nuse std::collections::HashMap;\n\nuse std::vec::Vec;\n\nuse chrono::{DateTime, Utc};\n\nuse chrono::naive::NaiveDateTime;\n\nuse std::ops::Add;\n\nuse reqwest;\n\nuse std::error;\n\nuse futures::task::LocalSpawnExt;\n\nuse futures::executor::LocalPool;\n\nuse futures::task::Context;\n\nuse futures::future::poll_fn;\n\nuse std::cell::{Cell, RefCell};\n\nuse std::rc::Rc;\n\nuse clap::Clap;\n\nuse eyre::{WrapErr, Result};\n\nuse serde::{Deserialize};\n\nuse serde_json::Value;\n\nuse timeago;\n\nuse log;\n\nuse env_logger;\n\n\n\n#[derive(VariantCount, PartialEq, Eq, Hash, Clone)]\n", "file_path": "examples/hnterm/main.rs", "rank": 47, "score": 5.091956835849988 }, { "content": " show_status_window: true,\n\n active_window: Some(0),\n\n view_mode: StoryListViewMode::Normal,\n\n }\n\n }\n\n\n\n fn process_input(&mut self, ui: &imgui::Ui) -> bool {\n\n if ui.is_key_pressed('+' as u32) && self.windows.len() < 3 {\n\n self.windows.push(WindowData::new(WindowContent::Top, &self.hn_state))\n\n }\n\n\n\n !ui.is_key_pressed('q' as u32)\n\n }\n\n\n\n fn update(&mut self) {\n\n // Update the list of items to be shown every 30 seconds\n\n if self.list_update_status.borrow().update_needed() {\n\n {\n\n let mut update_status = self.list_update_status.borrow_mut();\n\n update_status.update_in_progress = true;\n", "file_path": "examples/hnterm/main.rs", "rank": 48, "score": 5.066297030748137 }, { "content": " \"[{}] Hacker News ({})\",\n\n i,\n\n CONTENT_TITLE_MAP.get(&wd.window_content).unwrap()\n\n );\n\n }\n\n\n\n self.imtui.set_active();\n\n self.imtui.new_frame();\n\n\n\n let mut ui = self.imgui.frame();\n\n if !self.state.process_input(&ui) {\n\n return false;\n\n }\n\n\n\n let draw_context = DrawContext {\n\n imtui: &self.imtui,\n\n ui: &mut ui,\n\n };\n\n\n\n HntermApp::render(&self.state, &draw_context);\n", "file_path": "examples/hnterm/main.rs", "rank": 49, "score": 4.742836750293235 }, { "content": " None => ()\n\n }\n\n }\n\n\n\n fn start(&self, hn_api: Rc<RefCell<HnApiClient>>, items: Rc<RefCell<HashMap<HnItemId, HnItem>>>) -> JoinHandle<()> {\n\n let item_ids_rc = self.queue.clone();\n\n let waker_rc = self.waker.clone();\n\n let pending_rc = self.pending.clone();\n\n let fut = stream::poll_fn(move |cx| {\n\n match item_ids_rc.borrow_mut().pop_front() {\n\n Some(item_id) => {\n\n log::trace!(target: \"fetch_queue\", \"dequeued {}\", item_id);\n\n Poll::Ready(Some(item_id))\n\n },\n\n None => {\n\n waker_rc.borrow_mut().replace(cx.waker().clone());\n\n Poll::Pending\n\n }\n\n }\n\n }).map(move |item_id| {\n", "file_path": "examples/hnterm/main.rs", "rank": 50, "score": 4.658539459293403 }, { "content": " let draw_data = ui.render();\n\n self.imtui.render(draw_data);\n\n true\n\n }\n\n\n\n fn render(state: &AppState, draw_context: &DrawContext) {\n\n if state.windows.len() == 0 {\n\n return;\n\n }\n\n\n\n let display_size = draw_context.ui.io().display_size;\n\n {\n\n let windows_to_draw = if display_size[0] < 50.0 {\n\n &state.windows.as_slice()[0..1]\n\n } else {\n\n state.windows.as_slice()\n\n };\n\n\n\n let window_width = display_size[0] / windows_to_draw.len() as f32;\n\n let mut window_height = display_size[1];\n", "file_path": "examples/hnterm/main.rs", "rank": 51, "score": 4.442055318067076 }, { "content": " let c = hn_api.clone();\n\n async move {\n\n log::trace!(target: \"fetch_queue\", \"starting fetch for {}\", item_id);\n\n (item_id, c.borrow().fetch_item(item_id).await)\n\n }\n\n }).buffer_unordered(10)\n\n .for_each(move |(item_id, result)| {\n\n match result {\n\n Ok(item) => {\n\n log::trace!(target: \"fetch_queue\", \"successfully fetched {}\", item_id);\n\n // Need a copy in order to move the value into the map (without Rc).\n\n let item_clone = item.clone();\n\n let mut item_map = items.borrow_mut();\n\n match item {\n\n HnItem::Story(story) => { item_map.insert(story.id, item_clone); }\n\n HnItem::Comment(comment) => { item_map.insert(comment.id, item_clone); }\n\n HnItem::Job(job) => { item_map.insert(job.id, item_clone); }\n\n HnItem::Poll(poll) => { item_map.insert(poll.id, item_clone); }\n\n HnItem::PollOpt(pollopt) => { item_map.insert(pollopt.id, item_clone); }\n\n _ => {\n", "file_path": "examples/hnterm/main.rs", "rank": 52, "score": 4.42015228913287 }, { "content": " match state.queue.borrow_mut().pop_front() {\n\n Some(id) => Poll::Ready(Some(id)),\n\n None => {\n\n state.waker.borrow_mut().replace(cx.waker().clone());\n\n Poll::Pending\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 53, "score": 4.238901894900349 }, { "content": " }\n\n\n\n let state_ref = Rc::clone(&self.hn_state);\n\n let update_status_ref = Rc::clone(&self.list_update_status);\n\n\n\n let fetch_and_assign = async move {\n\n let result = {\n\n state_ref.borrow().fetch().await\n\n };\n\n match result {\n\n Ok(result) => {\n\n let mut state = state_ref.borrow_mut();\n\n\n\n // Update the items in the changed list that have\n\n // already been downloaded\n\n let items = state.items.clone();\n\n result.changed_ids.items.clone()\n\n .into_iter()\n\n .filter(move |id| items.borrow().contains_key(id))\n\n .for_each(|id| state.items_to_fetch.queue_item(id));\n", "file_path": "examples/hnterm/main.rs", "rank": 54, "score": 3.953859066733959 }, { "content": "impl Default for HnCommentItem {\n\n fn default() -> Self {\n\n HnCommentItem {\n\n id: 0,\n\n by: String::from(\"\"),\n\n score: 0,\n\n time: DateTime::from_utc(NaiveDateTime::from_timestamp(0, 0), Utc),\n\n text: String::from(\"\"),\n\n children: vec![],\n\n parent: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl HnItem {\n\n fn from_json_value(value: Value) -> Result<HnItem> {\n\n match &value[\"type\"] {\n\n Value::String(s) => {\n\n match s.as_str() {\n\n \"story\" => {\n", "file_path": "examples/hnterm/main.rs", "rank": 55, "score": 3.516226133281481 }, { "content": " ui.text(format!(\"NFrames = {}\", nframes));\n\n\n\n let imgui_io = ui.io();\n\n ui.text(format!(\"Mouse Post: x = {}, y = {}\", imgui_io.mouse_pos[0], imgui_io.mouse_pos[1]));\n\n ui.text(format!(\"Time per frame {0:.3} ms/frame ({1:.1} FPS)\", 1000.0 / imgui_io.framerate, imgui_io.framerate));\n\n\n\n ui.text(\"Float:\");\n\n ui.same_line(0.0);\n\n\n\n let range = std::ops::RangeInclusive::new(0.0, 1000.0);\n\n let label = imgui::ImString::new(\"##float\");\n\n let slider_builder = imgui::Slider::new(&label, range);\n\n slider_builder.build(&ui, &mut fval);\n\n windowToken.end(&ui);\n\n }\n\n\n\n let draw_data = ui.render();\n\n imtui.render(draw_data);\n\n }\n\n}\n", "file_path": "examples/ncurses0.rs", "rank": 56, "score": 3.469531205272857 }, { "content": " descendants: 0,\n\n children: vec![],\n\n }\n\n }\n\n}\n\n\n\nimpl Default for HnJobItem {\n\n fn default() -> Self {\n\n HnJobItem {\n\n id: 0,\n\n by: String::from(\"\"),\n\n score: 0,\n\n time: DateTime::from_utc(NaiveDateTime::from_timestamp(0, 0), Utc),\n\n title: String::from(\"\"),\n\n url: String::from(\"\"),\n\n domain: String::from(\"\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 57, "score": 3.392306346668649 }, { "content": "#[derive(Clone)]\n\nenum HnItem {\n\n Unknown,\n\n Story(HnStoryItem),\n\n Comment(HnCommentItem),\n\n Job(HnJobItem),\n\n Poll(HnPollItem),\n\n PollOpt(HnPollOptItem),\n\n}\n\n\n\nimpl Default for HnStoryItem {\n\n fn default() -> Self {\n\n HnStoryItem {\n\n id: 0,\n\n by: String::from(\"\"),\n\n score: 0,\n\n time: DateTime::from_utc(NaiveDateTime::from_timestamp(0, 0), Utc),\n\n text: String::from(\"\"),\n\n title: String::from(\"\"),\n\n url: String::from(\"\"),\n\n domain: String::from(\"\"),\n", "file_path": "examples/hnterm/main.rs", "rank": 58, "score": 2.827916934508612 }, { "content": "\n\n imgui::sys::igSetNextWindowPos(imgui::sys::ImVec2 {x: 4.0, y: 2.0}, imgui::sys::ImGuiCond_Once as i32, imgui::sys::ImVec2 {x: 0.0, y: 0.0});\n\n imgui::sys::igSetNextWindowSize(imgui::sys::ImVec2{x: 50.0, y: 10.0}, imgui::sys::ImGuiCond_Once as i32);\n\n let s1 = std::ffi::CString::new(\"Hello, world!\").expect(\"\");\n\n let mut p_open = false;\n\n imgui::sys::igBegin(s1.as_ptr(), &mut p_open, 0);\n\n nframes += 1;\n\n\n\n let s2 = std::ffi::CString::new(\"NFrames = %d\").expect(\"\");\n\n imgui::sys::igText(s2.as_ptr(), nframes);\n\n\n\n let s3 = std::ffi::CString::new(\"Mouse Pos : x = %g, y = %g\").expect(\"\");\n\n let imgui_io = *imgui::sys::igGetIO();\n\n imgui::sys::igText(s3.as_ptr(), imgui_io.MousePos.x as os::raw::c_double, imgui_io.MousePos.y as os::raw::c_double);\n\n\n\n let s4 = std::ffi::CString::new(\"Time per frame %.3f ms/frame (%.1f FPS)\").expect(\"\");\n\n imgui::sys::igText(s4.as_ptr(), 1000.0 / imgui_io.Framerate as os::raw::c_double, imgui_io.Framerate as os::raw::c_double);\n\n\n\n let s5 = std::ffi::CString::new(\"Float:\").expect(\"\");\n\n imgui::sys::igText(s5.as_ptr());\n", "file_path": "examples/ncurses0-sys.rs", "rank": 59, "score": 2.8214860346185993 }, { "content": "#[derive(VariantCount, PartialEq, Eq, Hash, Clone)]\n\nenum WindowContent {\n\n Top,\n\n Show,\n\n Ask,\n\n New,\n\n}\n\n\n\nlazy_static! {\n\n static ref CONTENT_TITLE_MAP: HashMap<WindowContent, &'static str> = {\n\n hashmap! {\n\n WindowContent::Top => \"Top\",\n\n WindowContent::Show => \"Show\",\n\n WindowContent::Ask => \"Ask\",\n\n WindowContent::New => \"New\",\n\n }\n\n };\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 60, "score": 2.455834113840361 }, { "content": " None\n\n }\n\n } {\n\n let color_stack = match self.hovered_story_id {\n\n Some(hovered_story_id) => {\n\n if self.active && hovered_story_id == *story_id {\n\n let style = ui.clone_style();\n\n let text_color = ui.push_style_color(\n\n imgui::StyleColor::Text,\n\n style[imgui::StyleColor::WindowBg]\n\n );\n\n let background_color = ui.push_style_color(\n\n imgui::StyleColor::WindowBg,\n\n style[imgui::StyleColor::Text]\n\n );\n\n\n\n let mut p0 = ui.cursor_screen_pos();\n\n p0[0] = p0[0] + 1.0;\n\n let mut p1 = p0;\n\n p1[0] = p1[0] + ui.calc_text_size(&imgui::ImString::new(&story.title), false, -1.0)[0] + 4.0;\n", "file_path": "examples/hnterm/main.rs", "rank": 61, "score": 2.451610724722227 }, { "content": " selected_story_id: None,\n\n hovered_story_id: None,\n\n hovered_comment_id: None,\n\n max_stories: Cell::new(10),\n\n }\n\n }\n\n\n\n fn set_active(&mut self, active: bool) {\n\n self.active = active;\n\n }\n\n\n\n fn render(&self, state: &AppState, draw_context: &DrawContext, pos: &(f32, f32), size: &(f32, f32)) {\n\n let title = imgui::ImString::new(&self.title);\n\n let window = imgui::Window::new(&title)\n\n .position([pos.0, pos.1], imgui::Condition::Always)\n\n .size([size.0, size.1], imgui::Condition::Always)\n\n .flags(imgui::WindowFlags::NO_COLLAPSE |\n\n imgui::WindowFlags::NO_RESIZE |\n\n imgui::WindowFlags::NO_MOVE |\n\n imgui::WindowFlags::NO_SCROLLBAR);\n", "file_path": "examples/hnterm/main.rs", "rank": 62, "score": 1.945316238488597 }, { "content": " if let Some(last_list_refresh) = &self.hn_state.borrow().last_list_refresh {\n\n for window in self.windows.iter_mut() {\n\n let story_ids = match window.window_content {\n\n WindowContent::Top => &last_list_refresh.top_ids,\n\n WindowContent::Show => &last_list_refresh.show_ids,\n\n WindowContent::Ask => &last_list_refresh.ask_ids,\n\n WindowContent::New => &last_list_refresh.new_ids,\n\n };\n\n\n\n // TODO: detect when selected story id is no longer in the data\n\n // TODO: detect when selected story id would no longer be visible on screen\n\n if window.hovered_story_id.is_none() {\n\n window.hovered_story_id = Some(story_ids[0]);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 63, "score": 1.9231299538235502 }, { "content": " state.last_list_refresh = Some(result);\n\n },\n\n _ => ()\n\n }\n\n\n\n update_status_ref.borrow_mut().set_last_update(Instant::now());\n\n };\n\n tokio::task::spawn_local(fetch_and_assign);\n\n }\n\n\n\n let active_window = self.active_window;\n\n for (i, window) in self.windows.iter_mut().enumerate() {\n\n let active = match active_window {\n\n Some(index) => i == index,\n\n _ => false,\n\n };\n\n window.set_active(active);\n\n }\n\n\n\n // Update selected story id\n", "file_path": "examples/hnterm/main.rs", "rank": 64, "score": 1.8732382622276356 }, { "content": " if let Some(color_stack) = color_stack {\n\n color_stack.1.pop(ui);\n\n color_stack.0.pop(ui);\n\n }\n\n\n\n ui.text_disabled(format!(\" ({})\", &story.domain));\n\n if state.view_mode != StoryListViewMode::Micro {\n\n let since = timeago::Formatter::new().convert_chrono(story.time, Utc::now());\n\n ui.text_disabled(format!(\" {} points by {} {} | {} comments\", story.score, &story.by, &since, story.descendants))\n\n }\n\n\n\n let screen_pos = ui.cursor_screen_pos();\n\n if screen_pos[1] + 3.0 > size.1 {\n\n self.max_stories.set((i + 1) as u32);\n\n break;\n\n } else {\n\n if i == (self.max_stories.get() - 1) as usize && screen_pos[1] + 2.0 < size.1 {\n\n self.max_stories.set(self.max_stories.get() + 1);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn render_single_story(&self, draw_context: &DrawContext, window_token: &imgui::WindowToken, pos: &(f32, f32), size: &(f32, f32)) {\n\n }\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 65, "score": 1.8067358270662526 }, { "content": " match self.last_update_time {\n\n Some(then) => Instant::now().duration_since(then) >= Duration::new(30, 0),\n\n None => true\n\n }\n\n }\n\n\n\n fn set_last_update(&mut self, t: Instant) {\n\n self.last_update_time = Some(t);\n\n self.next_update = t.add(Duration::new(30, 0));\n\n self.update_in_progress = false;\n\n }\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 66, "score": 1.5922978480477004 }, { "content": " if state.show_status_window {\n\n window_height -= STATUS_WINDOW_HEIGHT;\n\n }\n\n\n\n let window_size = (window_width, window_height);\n\n\n\n let mut window_pos = (0.0, 0.0);\n\n let num_windows = windows_to_draw.len();\n\n for (i, wd) in windows_to_draw.iter().enumerate() {\n\n let mut actual_window_size = window_size;\n\n if i != num_windows - 1 {\n\n actual_window_size.0 = (actual_window_size.0 - 1.1).floor();\n\n }\n\n wd.render(state, draw_context, &window_pos, &actual_window_size);\n\n window_pos.0 = (window_pos.0 + window_width).ceil();\n\n }\n\n }\n\n\n\n // Draw the status window\n\n if state.show_status_window {\n", "file_path": "examples/hnterm/main.rs", "rank": 67, "score": 1.5645989448013382 }, { "content": "\n\n self.last_url.replace(Some(url));\n\n self.request_bytes.set(self.request_bytes.get() + result.len());\n\n self.request_count.set(self.request_count.get() + 1);\n\n\n\n serde_json::from_str::<T>(result.as_str()).wrap_err(\n\n format!(\"Failed to parse response from {}\", url_str)\n\n )\n\n }\n\n\n\n async fn fetch_item(&self, item_id: u32) -> Result<HnItem> {\n\n HnItem::from_json_value(self.fetch_item_json(item_id).await?)\n\n }\n\n\n\n async fn fetch_item_json(&self, item_id: u32) -> Result<Value> {\n\n let base_url = reqwest::Url::parse(\n\n \"https://hacker-news.firebaseio.com/v0/item/\"\n\n ).unwrap();\n\n let item_path = format!(\"{}.json\", item_id);\n\n let url = base_url.join(&item_path).unwrap();\n\n self.fetch_url::<Value>(url).await\n\n }\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 68, "score": 1.504250995997812 }, { "content": "\n\n let last_list_refresh = hn_state.last_list_refresh.as_ref().unwrap();\n\n let story_ids = match self.window_content {\n\n WindowContent::Top => &last_list_refresh.top_ids,\n\n WindowContent::Show => &last_list_refresh.show_ids,\n\n WindowContent::Ask => &last_list_refresh.ask_ids,\n\n WindowContent::New => &last_list_refresh.new_ids,\n\n };\n\n\n\n let num_to_show = (self.max_stories.get() as usize).min(story_ids.len());\n\n let ui = &draw_context.ui;\n\n for (i, story_id) in story_ids[..num_to_show].iter().enumerate() {\n\n // TODO: draw jobs too?\n\n if let Some(story) = match hn_state.items.borrow().get(story_id) {\n\n Some(item) => match item {\n\n HnItem::Story(story) => Some(story),\n\n _ => None,\n\n },\n\n None => {\n\n hn_state.items_to_fetch.queue_item(*story_id);\n", "file_path": "examples/hnterm/main.rs", "rank": 69, "score": 0.8600441767951508 }, { "content": " let title = imgui::ImString::new(\n\n format!(\"Status | Story List Mode: {}\", state.view_mode.to_string())\n\n );\n\n let window = imgui::Window::new(&title)\n\n .position([0., display_size[1] - STATUS_WINDOW_HEIGHT], imgui::Condition::Always)\n\n .size([display_size[0], STATUS_WINDOW_HEIGHT], imgui::Condition::Always)\n\n .flags(imgui::WindowFlags::NO_COLLAPSE |\n\n imgui::WindowFlags::NO_RESIZE |\n\n imgui::WindowFlags::NO_MOVE |\n\n imgui::WindowFlags::NO_SCROLLBAR);\n\n if let Some(window_token) = window.begin(draw_context.ui) {\n\n let now = Instant::now();\n\n let time_left = if now >= state.list_update_status.borrow().next_update {\n\n Duration::new(0,0)\n\n } else {\n\n state.list_update_status.borrow().next_update.duration_since(now)\n\n };\n\n let hn_state = state.hn_state.borrow();\n\n let api = hn_state.hn_api.borrow();\n\n let opt = api.last_url.borrow();\n", "file_path": "examples/hnterm/main.rs", "rank": 70, "score": 0.8426586859769483 }, { "content": " let mut story = serde_json::from_value::<HnStoryItem>(value)?;\n\n story.domain = reqwest::Url::parse(&story.url)?.host_str().unwrap_or(&story.domain).to_string();\n\n Ok(HnItem::Story(story))\n\n },\n\n \"job\" => Ok(HnItem::Job(serde_json::from_value::<HnJobItem>(value)?)),\n\n \"comment\" => Ok(HnItem::Comment(serde_json::from_value::<HnCommentItem>(value)?)),\n\n \"poll\" => Ok(HnItem::Poll(serde_json::from_value::<HnPollItem>(value)?)),\n\n \"pollopt\" => Ok(HnItem::PollOpt(serde_json::from_value::<HnPollOptItem>(value)?)),\n\n // TODO: this should be an error\n\n _ => Ok(HnItem::Unknown),\n\n }\n\n },\n\n // TODO: this should be an error\n\n _ => Ok(HnItem::Unknown)\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/hnterm/main.rs", "rank": 71, "score": 0.8250854764496087 } ]
Rust
procgen/src/connections.rs
gridbugs/rip
4bb5388df5d8fd694399773bf2faeba51f2cf2e5
use crate::hull::HullCell; use direction::{CardinalDirection, Direction}; use grid_2d::{ coord_2d::{static_axis, Axis, StaticAxis}, Coord, Grid, }; use rand::{seq::SliceRandom, Rng}; use std::collections::{HashMap, HashSet, VecDeque}; type RoomId = usize; type DoorCandidateId = usize; pub enum ConnectedCell { Floor, Wall, Space, Door, Window, } #[derive(Clone, Copy)] enum ClassifiedCell { Space, Wall, Room(RoomId), } fn classify_cells(grid: &Grid<HullCell>) -> Grid<ClassifiedCell> { let mut intermediate: Grid<Option<ClassifiedCell>> = Grid::new_fn(grid.size(), |_| None); let mut room_count = 0; let mut flood_fill_buffer = VecDeque::new(); for coord in grid.coord_iter() { if intermediate.get_checked(coord).is_some() { continue; } let classified_cell = match grid.get_checked(coord) { HullCell::Wall => ClassifiedCell::Wall, HullCell::Space => ClassifiedCell::Space, HullCell::Floor => { let classified_cell = ClassifiedCell::Room(room_count); flood_fill_buffer.push_back(coord); while let Some(coord) = flood_fill_buffer.pop_front() { for direction in Direction::all() { let neighbour_coord = coord + direction.coord(); if let Some(HullCell::Floor) = grid.get(neighbour_coord) { let cell = intermediate.get_checked_mut(neighbour_coord); if cell.is_none() { *cell = Some(classified_cell); flood_fill_buffer.push_back(neighbour_coord); } } } } room_count += 1; classified_cell } }; *intermediate.get_checked_mut(coord) = Some(classified_cell); } Grid::new_grid_map(intermediate, |maybe_cell| maybe_cell.unwrap()) } #[derive(Debug)] struct WindowCandidate { top_left: Coord, length: u32, axis: Axis, room: RoomId, } impl WindowCandidate { fn choose<'a, R: Rng>(&'a self, rng: &mut R) -> impl 'a + Iterator<Item = Coord> { let min_length = 1 + self.length / 8; let max_length = 1 + self.length / 3; let length = rng.gen_range(min_length, max_length + 1); let remaining_candidate_length = self.length - length; let min_offset = remaining_candidate_length / 4; let max_offset = remaining_candidate_length - min_offset; let offset = rng.gen_range(min_offset, max_offset + 1); (0..(length as i32)).map(move |i| self.top_left + Coord::new_axis(i + offset as i32, 0, self.axis.other())) } } #[derive(Debug, Clone)] struct DoorCandidate { top_left: Coord, length: u32, axis: Axis, left_room: RoomId, right_room: RoomId, } impl DoorCandidate { fn choose<R: Rng>(&self, rng: &mut R) -> Coord { let offset = rng.gen_range(0, self.length as i32); self.top_left + Coord::new_axis(offset, 0, self.axis.other()) } fn all<'a>(&'a self) -> impl 'a + Iterator<Item = Coord> { (0..(self.length as i32)).map(move |i| self.top_left + Coord::new_axis(i, 0, self.axis.other())) } } #[derive(Debug)] enum Candidate { Window(WindowCandidate), Door(DoorCandidate), } impl Candidate { fn length_mut(&mut self) -> &mut u32 { match self { Self::Door(DoorCandidate { ref mut length, .. }) => length, Self::Window(WindowCandidate { ref mut length, .. }) => length, } } } #[derive(Clone, Copy, PartialEq, Eq)] enum AbstractCandidate { Window { room: RoomId }, Door { left_room: RoomId, right_room: RoomId }, } fn classify_connection_candidates_in_axis<A: StaticAxis>(grid: &Grid<ClassifiedCell>, candidates: &mut Vec<Candidate>) { for i in 1..(grid.size().get_static::<A>().saturating_sub(1)) { let mut current_abstract_candidate = None; for j in 0..grid.size().get_static::<A::Other>() { let mid_coord = Coord::new_static_axis::<A>(i as i32, j as i32); if let ClassifiedCell::Wall = grid.get_checked(mid_coord) { let left_coord = mid_coord - Coord::new_static_axis::<A>(1, 0); let right_coord = mid_coord + Coord::new_static_axis::<A>(1, 0); let left = grid.get_checked(left_coord); let right = grid.get_checked(right_coord); if let Some(abstract_candidate) = match (*left, *right) { (ClassifiedCell::Space, ClassifiedCell::Room(room)) | (ClassifiedCell::Room(room), ClassifiedCell::Space) => Some(AbstractCandidate::Window { room }), (ClassifiedCell::Room(left_room), ClassifiedCell::Room(right_room)) => { Some(AbstractCandidate::Door { left_room, right_room }) } _ => None, } { if current_abstract_candidate != Some(abstract_candidate) { let new_candidate = match abstract_candidate { AbstractCandidate::Window { room } => Candidate::Window(WindowCandidate { top_left: mid_coord, length: 0, axis: A::axis(), room, }), AbstractCandidate::Door { left_room, right_room } => Candidate::Door(DoorCandidate { top_left: mid_coord, length: 0, axis: A::axis(), left_room, right_room, }), }; candidates.push(new_candidate); } current_abstract_candidate = Some(abstract_candidate); *candidates.last_mut().unwrap().length_mut() += 1; continue; } } current_abstract_candidate = None; } } } #[derive(Default)] struct Candidates { door: Vec<DoorCandidate>, window: Vec<WindowCandidate>, } fn classify_connection_candidates(grid: &Grid<ClassifiedCell>) -> Candidates { let mut all_candidatces = Vec::new(); classify_connection_candidates_in_axis::<static_axis::X>(grid, &mut all_candidatces); classify_connection_candidates_in_axis::<static_axis::Y>(grid, &mut all_candidatces); let mut candidates = Candidates::default(); for candidate in all_candidatces { match candidate { Candidate::Door(door) => candidates.door.push(door), Candidate::Window(window) => candidates.window.push(window), } } candidates } #[derive(Debug)] struct RoomEdge { to_room: RoomId, via: DoorCandidateId, } #[derive(Default, Debug)] struct RoomNode { edges: Vec<RoomEdge>, } type DoorCandidateGraph = HashMap<RoomId, RoomNode>; fn make_door_candidate_graph(door_candidates: &[DoorCandidate]) -> DoorCandidateGraph { let mut graph: DoorCandidateGraph = HashMap::new(); for (door_candidate_id, door_candidate) in door_candidates.into_iter().enumerate() { graph.entry(door_candidate.left_room).or_default().edges.push(RoomEdge { to_room: door_candidate.right_room, via: door_candidate_id, }); graph .entry(door_candidate.right_room) .or_default() .edges .push(RoomEdge { to_room: door_candidate.left_room, via: door_candidate_id, }); } graph } fn make_random_door_candidate_graph_minimum_spanning_tree<R: Rng>( door_candidate_graph: &DoorCandidateGraph, door_candidates: &[DoorCandidate], rng: &mut R, ) -> HashSet<DoorCandidateId> { let mut mst = HashSet::new(); let mut visited_room_ids = HashSet::new(); let mut to_visit = vec![rng.gen_range(0, door_candidates.len())]; while !to_visit.is_empty() { let door_candidate_id = to_visit.swap_remove(rng.gen_range(0, to_visit.len())); let door_candidate = &door_candidates[door_candidate_id]; let new_left = visited_room_ids.insert(door_candidate.left_room); let new_right = visited_room_ids.insert(door_candidate.right_room); if !(new_left || new_right) { continue; } mst.insert(door_candidate_id); for edge in door_candidate_graph[&door_candidate.left_room] .edges .iter() .chain(door_candidate_graph[&door_candidate.right_room].edges.iter()) { if !visited_room_ids.contains(&edge.to_room) { to_visit.push(edge.via); } } } mst } fn choose_door_candidates<R: Rng>( door_candidate_graph: &DoorCandidateGraph, door_candidates: &[DoorCandidate], rng: &mut R, ) -> Vec<DoorCandidateId> { let mut chosen_door_candidates = make_random_door_candidate_graph_minimum_spanning_tree(&door_candidate_graph, door_candidates, rng); let mut extrta_door_candidates = (0..door_candidates.len()) .filter(|id| !chosen_door_candidates.contains(id)) .collect::<Vec<_>>(); extrta_door_candidates.shuffle(rng); let num_extra_door_candidates_to_choose = extrta_door_candidates.len() / 2; chosen_door_candidates.extend(extrta_door_candidates.iter().take(num_extra_door_candidates_to_choose)); let mut chosen_door_candidates = chosen_door_candidates.into_iter().collect::<Vec<_>>(); chosen_door_candidates.sort(); chosen_door_candidates.shuffle(rng); chosen_door_candidates } fn trim_non_dividing_walls(grid: &Grid<HullCell>) -> Grid<HullCell> { let mut grid = grid.clone(); loop { let mut to_clear = Vec::new(); for (coord, cell) in grid.enumerate() { if let HullCell::Wall = cell { let mut wall_neighbour_count = 0; for direction in CardinalDirection::all() { let neighbour_coord = coord + direction.coord(); if let Some(HullCell::Wall) = grid.get(neighbour_coord) { wall_neighbour_count += 1; } } if wall_neighbour_count <= 1 { to_clear.push(coord); } } } if to_clear.is_empty() { break; } for coord in to_clear { *grid.get_checked_mut(coord) = HullCell::Floor; } } grid } fn place_door<R: Rng>(candidate: &DoorCandidate, grid: &mut Grid<ConnectedCell>, rng: &mut R) { let coord = candidate.choose(rng); *grid.get_checked_mut(coord) = ConnectedCell::Door; } fn place_window<R: Rng>(candidate: &WindowCandidate, grid: &mut Grid<ConnectedCell>, rng: &mut R) { if rng.gen_range(0, 3) > 0 { for coord in candidate.choose(rng) { *grid.get_checked_mut(coord) = ConnectedCell::Window; } } } pub fn add_connections<R: Rng>(grid: &Grid<HullCell>, rng: &mut R) -> Grid<ConnectedCell> { let classified = classify_cells(grid); let candidates = classify_connection_candidates(&classified); let door_candidate_graph = make_door_candidate_graph(&candidates.door); let chosen_door_candidates = choose_door_candidates(&door_candidate_graph, &candidates.door, rng); let mut grid = grid.clone(); for &door_candidate_id in chosen_door_candidates.iter() { if rng.gen_range(0, 10) == 0 { let candidate = &candidates.door[door_candidate_id]; for coord in candidate.all() { *grid.get_checked_mut(coord) = HullCell::Floor; } } } let grid = trim_non_dividing_walls(&grid); let classified = classify_cells(&grid); let candidates = classify_connection_candidates(&classified); let door_candidate_graph = make_door_candidate_graph(&candidates.door); let chosen_door_candidates = choose_door_candidates(&door_candidate_graph, &candidates.door, rng); let mut grid = Grid::new_grid_map_ref(&grid, |cell| match cell { HullCell::Floor => ConnectedCell::Floor, HullCell::Wall => ConnectedCell::Wall, HullCell::Space => ConnectedCell::Space, }); for &door_candidate_id in chosen_door_candidates.iter() { place_door(&candidates.door[door_candidate_id], &mut grid, rng); } for window_candidate in candidates.window.iter() { place_window(window_candidate, &mut grid, rng); } grid }
use crate::hull::HullCell; use direction::{CardinalDirection, Direction}; use grid_2d::{ coord_2d::{static_axis, Axis, StaticAxis}, Coord, Grid, }; use rand::{seq::SliceRandom, Rng}; use std::collections::{HashMap, HashSet, VecDeque}; type RoomId = usize; type DoorCandidateId = usize; pub enum ConnectedCell { Floor, Wall, Space, Door, Window, } #[derive(Clone, Copy)] enum ClassifiedCell { Space, Wall, Room(RoomId), } fn classify_cells(grid: &Grid<HullCell>) -> Grid<ClassifiedCell> { let mut intermediate: Grid<Option<ClassifiedCell>> = Grid::new_fn(grid.size(), |_| None); let mut room_count = 0; let mut flood_fill_buffer = VecDeque::new(); for coord in grid.coord_iter() { if intermediate.get_checked(coord).is_some() { continue; } let classified_cell = match grid.get_checked(coord) { HullCell::Wall => ClassifiedCell::Wall, HullCell::Space => ClassifiedCell::Space, HullCell::Floor => { let classified_cell = ClassifiedCell::Room(room_count); flood_fill_buffer.push_back(coord); while let Some(coord) = flood_fill_buffer.pop_front() { for direction in Direction::all() { let neighbour_coord = coord + direction.coord(); if let Some(HullCell::Floor) = grid.get(neighbour_coord) { let cell = intermediate.get_checked_mut(neighbour_coord); if cell.is_none() { *cell = Some(classified_cell); flood_fill_buffer.push_back(neighbour_coord); } } } } room_count += 1; classified_cell } }; *intermediate.get_checked_mut(coord) = Some(classified_cell); } Grid::new_grid_map(intermediate, |maybe_cell| maybe_cell.unwrap()) } #[derive(Debug)] struct WindowCandidate { top_left: Coord, length: u32, axis: Axis, room: RoomId, } impl WindowCandidate { fn choose<'a, R: Rng>(&'a self, rng: &mut R) -> impl 'a + Iterator<Item = Coord> { let min_length = 1 + self.length / 8; let max_length = 1 + self.length / 3; let length = rng.gen_range(min_length, max_length + 1); let remaining_candidate_length = self.length - length; let min_offset = remaining_candidate_length / 4; let max_offset = remaining_candidate_length - min_offset; let offset = rng.gen_range(min_offset, max_offset + 1); (0..(length as i32)).map(move |i| self.top_left + Coord::new_axis(i + offset as i32, 0, self.axis.other())) } } #[derive(Debug, Clone)] struct DoorCandidate { top_left: Coord, length: u32, axis: Axis, left_room: RoomId, right_room: RoomId, } impl DoorCandidate { fn choose<R: Rng>(&self, rng: &mut R) -> Coord { let offset = rng.gen_range(0, self.length as i32); self.top_left + Coord::new_axis(offset, 0, self.axis.other()) } fn all<'a>(&'a self) -> impl 'a + Iterator<Item = Coord> { (0..(self.length as i32)).map(move |i| self.top_left + Coord::new_axis(i, 0, self.axis.other())) } } #[derive(Debug)] enum Candidate { Window(WindowCandidate), Door(DoorCandidate), } impl Candidate { fn length_mut(&mut self) -> &mut u32 { match self { Self::Door(DoorCandidate { ref mut length, .. }) => length, Self::Window(WindowCandidate { ref mut length, .. }) => length, } } } #[derive(Clone, Copy, PartialEq, Eq)] enum AbstractCandidate { Window { room: RoomId }, Door { left_room: RoomId, right_room: RoomId }, } fn classify_connection_candidates_in_axis<A: StaticAxis>(grid: &Grid<ClassifiedCell>, candidates: &mut Vec<Candidate>) { for i in 1..(grid.size().get_static::<A>().saturating_sub(1)) { let mut current_abstract_candidate = None; for j in 0..grid.size().get_static::<A::Other>() { let mid_coord = Coord::new_static_axis::<A>(i as i32, j as i32); if let ClassifiedCell::Wall = grid.get_checked(mid_coord) { let left_coord = mid_coord - Coord::new_static_axis::<A>(1, 0); let right_coord = mid_coord + Coord::new_static_axis::<A>(1, 0); let left = grid.get_checked(left_coord); let right = grid.get_checked(right_coord); if let Some(abstract_candidate) = match (*left, *right) { (ClassifiedCell::Space, ClassifiedCell::Room(room)) | (ClassifiedCell::Room(room), ClassifiedCell::Space) => Some(AbstractCandidate::Window { room }), (ClassifiedCell::Room(left_room), ClassifiedCell::Room(right_room)) => { Some(AbstractCandidate::Door { left_room, right_room }) } _ => None, } { if current_abstract_candidate != Some(abstract_candidate) { let new_candidate = match abstract_candidate { AbstractCandidate::Window { room } => Candidate::Window(WindowCandidate { top_left: mid_coord, length: 0, axis: A::axis(), room, }), AbstractCandidate::Door { left_room, right_room } => Candidate::Door(DoorCandidate { top_left: mid_coord, length: 0, axis: A::axis(), left_room, right_room, }), }; candidates.push(new_candidate); } current_abstract_candidate = Some(abstract_candidate); *candidates.last_mut().unwrap().length_mut() += 1; continue; } } current_abstract_candidate = None; } } } #[derive(Default)] struct Candidates { door: Vec<DoorCandidate>, window: Vec<WindowCandidate>, } fn classify_connection_candidates(grid: &Grid<ClassifiedCell>) -> Candidates { let mut all_candidatces = Vec::new(); classify_connection_candidates_in_axis::<static_axis::X>(grid, &mut all_candidatces); classify_connection_candidates_in_axis::<static_axis::Y>(grid, &mut all_candidatces); let mut candidates = Candidates::default(); for candidate in all_candidatces { match candidate { Candidate::Door(door) => candidates.door.push(door), Candidate::Window(window) => candidates.window.push(window), } } candidates } #[derive(Debug)] struct RoomEdge { to_room: RoomId, via: DoorCandidateId, } #[derive(Default, Debug)] struct RoomNode { edges: Vec<RoomEdge>, } type DoorCandidateGraph = HashMap<RoomId, RoomNode>; fn make_door_candidate_graph(door_candidates: &[DoorCandidate]) -> DoorCandidateGraph { let mut graph: DoorCandidateGraph = HashMap::new(); for (door_candidate_id, door_candidate) in door_candidates.into_iter().enumerate() { graph.entry(door_candidate.left_room).or_default().edges.push(RoomEdge { to_room: door_candidate.right_room, via: door_candidate_id, }); graph .entry(door_candidate.right_room) .or_default() .edges .push(RoomEdge { to_room: door_candidate.left_room, via: door_candidate_id, }); } graph }
fn choose_door_candidates<R: Rng>( door_candidate_graph: &DoorCandidateGraph, door_candidates: &[DoorCandidate], rng: &mut R, ) -> Vec<DoorCandidateId> { let mut chosen_door_candidates = make_random_door_candidate_graph_minimum_spanning_tree(&door_candidate_graph, door_candidates, rng); let mut extrta_door_candidates = (0..door_candidates.len()) .filter(|id| !chosen_door_candidates.contains(id)) .collect::<Vec<_>>(); extrta_door_candidates.shuffle(rng); let num_extra_door_candidates_to_choose = extrta_door_candidates.len() / 2; chosen_door_candidates.extend(extrta_door_candidates.iter().take(num_extra_door_candidates_to_choose)); let mut chosen_door_candidates = chosen_door_candidates.into_iter().collect::<Vec<_>>(); chosen_door_candidates.sort(); chosen_door_candidates.shuffle(rng); chosen_door_candidates } fn trim_non_dividing_walls(grid: &Grid<HullCell>) -> Grid<HullCell> { let mut grid = grid.clone(); loop { let mut to_clear = Vec::new(); for (coord, cell) in grid.enumerate() { if let HullCell::Wall = cell { let mut wall_neighbour_count = 0; for direction in CardinalDirection::all() { let neighbour_coord = coord + direction.coord(); if let Some(HullCell::Wall) = grid.get(neighbour_coord) { wall_neighbour_count += 1; } } if wall_neighbour_count <= 1 { to_clear.push(coord); } } } if to_clear.is_empty() { break; } for coord in to_clear { *grid.get_checked_mut(coord) = HullCell::Floor; } } grid } fn place_door<R: Rng>(candidate: &DoorCandidate, grid: &mut Grid<ConnectedCell>, rng: &mut R) { let coord = candidate.choose(rng); *grid.get_checked_mut(coord) = ConnectedCell::Door; } fn place_window<R: Rng>(candidate: &WindowCandidate, grid: &mut Grid<ConnectedCell>, rng: &mut R) { if rng.gen_range(0, 3) > 0 { for coord in candidate.choose(rng) { *grid.get_checked_mut(coord) = ConnectedCell::Window; } } } pub fn add_connections<R: Rng>(grid: &Grid<HullCell>, rng: &mut R) -> Grid<ConnectedCell> { let classified = classify_cells(grid); let candidates = classify_connection_candidates(&classified); let door_candidate_graph = make_door_candidate_graph(&candidates.door); let chosen_door_candidates = choose_door_candidates(&door_candidate_graph, &candidates.door, rng); let mut grid = grid.clone(); for &door_candidate_id in chosen_door_candidates.iter() { if rng.gen_range(0, 10) == 0 { let candidate = &candidates.door[door_candidate_id]; for coord in candidate.all() { *grid.get_checked_mut(coord) = HullCell::Floor; } } } let grid = trim_non_dividing_walls(&grid); let classified = classify_cells(&grid); let candidates = classify_connection_candidates(&classified); let door_candidate_graph = make_door_candidate_graph(&candidates.door); let chosen_door_candidates = choose_door_candidates(&door_candidate_graph, &candidates.door, rng); let mut grid = Grid::new_grid_map_ref(&grid, |cell| match cell { HullCell::Floor => ConnectedCell::Floor, HullCell::Wall => ConnectedCell::Wall, HullCell::Space => ConnectedCell::Space, }); for &door_candidate_id in chosen_door_candidates.iter() { place_door(&candidates.door[door_candidate_id], &mut grid, rng); } for window_candidate in candidates.window.iter() { place_window(window_candidate, &mut grid, rng); } grid }
fn make_random_door_candidate_graph_minimum_spanning_tree<R: Rng>( door_candidate_graph: &DoorCandidateGraph, door_candidates: &[DoorCandidate], rng: &mut R, ) -> HashSet<DoorCandidateId> { let mut mst = HashSet::new(); let mut visited_room_ids = HashSet::new(); let mut to_visit = vec![rng.gen_range(0, door_candidates.len())]; while !to_visit.is_empty() { let door_candidate_id = to_visit.swap_remove(rng.gen_range(0, to_visit.len())); let door_candidate = &door_candidates[door_candidate_id]; let new_left = visited_room_ids.insert(door_candidate.left_room); let new_right = visited_room_ids.insert(door_candidate.right_room); if !(new_left || new_right) { continue; } mst.insert(door_candidate_id); for edge in door_candidate_graph[&door_candidate.left_room] .edges .iter() .chain(door_candidate_graph[&door_candidate.right_room].edges.iter()) { if !visited_room_ids.contains(&edge.to_room) { to_visit.push(edge.via); } } } mst }
function_block-full_function
[ { "content": "pub fn add_internal_walls<R: Rng>(grid: &Grid<HullCell>, rng: &mut R) -> Grid<HullCell> {\n\n let external_walls = ExternalWalls::classify(grid);\n\n let mut internal_walls = Vec::new();\n\n add_internal_walls_rec(\n\n &external_walls,\n\n Rect {\n\n coord: Coord::new(0, 0),\n\n size: grid.size(),\n\n },\n\n Size::new(4, 4),\n\n &mut internal_walls,\n\n rng,\n\n );\n\n let mut grid = grid.clone();\n\n for wall in internal_walls {\n\n wall.draw(&mut grid);\n\n }\n\n grid\n\n}\n", "file_path": "procgen/src/internal_walls.rs", "rank": 2, "score": 365676.1508357633 }, { "content": "pub fn generate_hull<R: Rng>(output_size: Size, space_width: u32, rng: &mut R) -> Grid<HullCell> {\n\n let input_grid = input_grid_from_strs(INPUT);\n\n let pattern_size = NonZeroU32::new(4).unwrap();\n\n generate_hull_internal(input_grid, output_size, space_width, pattern_size, rng)\n\n}\n", "file_path": "procgen/src/hull.rs", "rank": 3, "score": 351968.0371146966 }, { "content": "pub fn add_stars<R: Rng>(grid: &Grid<ConnectedCell>, rng: &mut R) -> Grid<StarCell> {\n\n Grid::new_grid_map_ref(grid, |cell| match cell {\n\n ConnectedCell::Door => StarCell::Door,\n\n ConnectedCell::Wall => StarCell::Wall,\n\n ConnectedCell::Floor => StarCell::Floor,\n\n ConnectedCell::Window => StarCell::Window,\n\n ConnectedCell::Space => {\n\n if rng.gen_range(0, 20) == 0 {\n\n StarCell::Star\n\n } else {\n\n StarCell::Space\n\n }\n\n }\n\n })\n\n}\n", "file_path": "procgen/src/stars.rs", "rank": 5, "score": 339082.7393256001 }, { "content": "pub fn choose_lights<R: Rng>(grid: &Grid<HullCell>, rng: &mut R) -> Vec<Light> {\n\n let mut lights = Vec::new();\n\n let mut flood_fill_buffer = VecDeque::new();\n\n let mut visited = HashSet::new();\n\n for (coord, cell) in grid.enumerate() {\n\n if let HullCell::Floor = cell {\n\n if visited.insert(coord) {\n\n flood_fill_buffer.push_back(coord);\n\n let mut total = Coord::new(0, 0);\n\n let mut count = 0;\n\n while let Some(coord) = flood_fill_buffer.pop_front() {\n\n total += coord;\n\n count += 1;\n\n for direction in CardinalDirection::all() {\n\n let neighbour_coord = coord + direction.coord();\n\n if let Some(HullCell::Floor) = grid.get(neighbour_coord) {\n\n if visited.insert(neighbour_coord) {\n\n flood_fill_buffer.push_back(neighbour_coord);\n\n }\n\n }\n", "file_path": "procgen/src/lights.rs", "rank": 6, "score": 322616.798343498 }, { "content": "pub fn all_room_means(grid: &Grid<HullCell>) -> Vec<Coord> {\n\n let mut means = Vec::new();\n\n let mut flood_fill_buffer = VecDeque::new();\n\n let mut visited = HashSet::new();\n\n for (coord, cell) in grid.enumerate() {\n\n if let HullCell::Floor = cell {\n\n if visited.insert(coord) {\n\n flood_fill_buffer.push_back(coord);\n\n let mut total = Coord::new(0, 0);\n\n let mut count = 0;\n\n while let Some(coord) = flood_fill_buffer.pop_front() {\n\n total += coord;\n\n count += 1;\n\n for direction in CardinalDirection::all() {\n\n let neighbour_coord = coord + direction.coord();\n\n if let Some(HullCell::Floor) = grid.get(neighbour_coord) {\n\n if visited.insert(neighbour_coord) {\n\n flood_fill_buffer.push_back(neighbour_coord);\n\n }\n\n }\n\n }\n\n }\n\n let mean = total / count;\n\n means.push(mean);\n\n }\n\n }\n\n }\n\n means\n\n}\n", "file_path": "procgen/src/spawns.rs", "rank": 7, "score": 268197.30166972044 }, { "content": "pub fn spaceship<R: Rng>(spec: SpaceshipSpec, player_data: EntityData, rng: &mut R) -> Terrain {\n\n let mut world = World::new(spec.size);\n\n let mut agents = ComponentTable::default();\n\n let spaceship = Spaceship::generate(spec, rng);\n\n let mut npc_candidates = Vec::new();\n\n for (coord, cell) in spaceship.map.enumerate() {\n\n match cell {\n\n SpaceshipCell::Wall => {\n\n world.spawn_wall(coord);\n\n }\n\n SpaceshipCell::Floor => {\n\n world.spawn_floor(coord);\n\n npc_candidates.push(coord);\n\n }\n\n SpaceshipCell::Space => {\n\n world.spawn_space(coord);\n\n }\n\n SpaceshipCell::Door => {\n\n world.spawn_floor(coord);\n\n world.spawn_door(coord);\n", "file_path": "game/src/terrain.rs", "rank": 11, "score": 235593.92068553314 }, { "content": "fn surround_by_space(grid: &Grid<HullCell>, width: u32) -> Grid<HullCell> {\n\n let offset = Size::new(width, width);\n\n Grid::new_fn(grid.size() + offset * 2, |coord| {\n\n if let Some(&cell) = grid.get(coord - offset.to_coord().unwrap()) {\n\n cell\n\n } else {\n\n HullCell::Space\n\n }\n\n })\n\n}\n\n\n", "file_path": "procgen/src/hull.rs", "rank": 12, "score": 229470.63246777919 }, { "content": "fn split_rect_with_wall<R: Rng, A: StaticAxis>(\n\n split_candidates: &InternalWallCandidatesInAxis<A>,\n\n rect: Rect,\n\n rng: &mut R,\n\n) -> Split {\n\n let &split_position = split_candidates\n\n .candidates\n\n .choose(rng)\n\n .expect(\"split_candidates should not be empty\");\n\n let left = Rect {\n\n coord: rect.coord,\n\n size: rect\n\n .size\n\n .set_static::<A::Other>(split_position - rect.coord.get_static::<A::Other>() as u32),\n\n };\n\n let right = Rect {\n\n coord: rect.coord + left.size.to_coord().unwrap().set_static::<A>(0) + Coord::new_static_axis::<A>(0, 1),\n\n size: rect.size - left.size.set_static::<A>(0) - Size::new_static_axis::<A>(0, 1),\n\n };\n\n let internal_wall = InternalWall {\n", "file_path": "procgen/src/internal_walls.rs", "rank": 14, "score": 222715.1734818688 }, { "content": "fn classify_walls<A: StaticAxis>(grid: &Grid<HullCell>) -> ExternalWallsInAxis<A> {\n\n let mut walls = Vec::new();\n\n for i in 0..grid.size().get_static::<A::Other>() {\n\n let mut consecutive_count = 0;\n\n for j in 0..grid.size().get_static::<A>() {\n\n let coord = Coord::new_static_axis::<A>(j as i32, i as i32);\n\n let high_coord = coord + Coord::new_static_axis::<A>(0, 1);\n\n let low_coord = coord - Coord::new_static_axis::<A>(0, 1);\n\n if let Some(HullCell::Wall) = grid.get(high_coord) {\n\n consecutive_count = 0;\n\n continue;\n\n }\n\n if let Some(HullCell::Wall) = grid.get(low_coord) {\n\n consecutive_count = 0;\n\n continue;\n\n }\n\n if *grid.get_checked(coord) == HullCell::Wall {\n\n if consecutive_count == 0 {\n\n walls.push(ExternalWall {\n\n top_left: coord,\n", "file_path": "procgen/src/internal_walls.rs", "rank": 15, "score": 212601.11409726355 }, { "content": "fn add_internal_walls_rec<R: Rng>(\n\n external_walls: &ExternalWalls,\n\n rect: Rect,\n\n min_rect_size: Size,\n\n internal_walls: &mut Vec<InternalWall>,\n\n rng: &mut R,\n\n) {\n\n assert!(rect.size.width() >= min_rect_size.width());\n\n assert!(rect.size.height() >= min_rect_size.height());\n\n let internal_wall_candidates = external_walls.internal_wall_candidates(&rect, min_rect_size);\n\n let candidates_x = &internal_wall_candidates.y_coord_of_horizontal_wall_candidates;\n\n let candidates_y = &internal_wall_candidates.x_coord_of_vertical_wall_candidates;\n\n let Split {\n\n left,\n\n right,\n\n internal_wall,\n\n } = if rect.size.width() < min_rect_size.width() * 2 + 1 {\n\n if rect.size.height() < min_rect_size.height() * 2 + 1 {\n\n return;\n\n } else {\n", "file_path": "procgen/src/internal_walls.rs", "rank": 16, "score": 205994.52204575966 }, { "content": "fn strip_walls_from_outside(grid: &Grid<GenerationCell>) -> Grid<HullCell> {\n\n Grid::new_grid_map_ref_with_coord(grid, |coord, cell| match cell {\n\n GenerationCell::Open => HullCell::Floor,\n\n GenerationCell::Closed => {\n\n for direction in Direction::all() {\n\n let neighbour_coord = coord + direction.coord();\n\n if let Some(GenerationCell::Open) = grid.get(neighbour_coord) {\n\n return HullCell::Wall;\n\n }\n\n }\n\n HullCell::Space\n\n }\n\n })\n\n}\n\n\n", "file_path": "procgen/src/hull.rs", "rank": 19, "score": 189376.0236149996 }, { "content": "pub fn explode(world: &mut World, coord: Coord, explosion: spec::Explosion, external_events: &mut Vec<ExternalEvent>) {\n\n world.spawn_explosion_emitter(coord, &explosion.particle_emitter);\n\n apply_mechanics(world, coord, &explosion.mechanics);\n\n external_events.push(ExternalEvent::Explosion(coord));\n\n}\n", "file_path": "game/src/world/explosion.rs", "rank": 22, "score": 185938.32999899326 }, { "content": "fn grow_enclosed_areas(grid: &Grid<GenerationCell>, by: usize) -> Grid<GenerationCell> {\n\n let mut grid = grid.clone();\n\n let mut coords_to_grow = grid\n\n .enumerate()\n\n .filter_map(|(coord, cell)| {\n\n if let GenerationCell::Open = cell {\n\n Some((coord, by))\n\n } else {\n\n None\n\n }\n\n })\n\n .collect::<Vec<_>>();\n\n while let Some((coord, remaining)) = coords_to_grow.pop() {\n\n if remaining == 0 {\n\n continue;\n\n }\n\n for direction in Direction::all() {\n\n let next_coord = coord + direction.coord();\n\n match grid.get_mut(next_coord) {\n\n None | Some(GenerationCell::Open) => continue,\n\n Some(cell @ GenerationCell::Closed) => {\n\n *cell = GenerationCell::Open;\n\n coords_to_grow.push((next_coord, remaining - 1));\n\n }\n\n }\n\n }\n\n }\n\n grid\n\n}\n\n\n", "file_path": "procgen/src/hull.rs", "rank": 23, "score": 182828.04629923383 }, { "content": "fn apply_direct_hit(world: &mut World, explosion_coord: Coord, mechanics: &spec::Mechanics, character_entity: Entity) {\n\n let mut solid_neighbour_vector = Coord::new(0, 0);\n\n for direction in Direction::all() {\n\n let neighbour_coord = explosion_coord + direction.coord();\n\n if let Some(spatial_cell) = world.spatial.layers_at(neighbour_coord) {\n\n if spatial_cell.feature.is_some() || spatial_cell.character.is_some() {\n\n solid_neighbour_vector += direction.coord();\n\n }\n\n }\n\n }\n\n let CharacterEffect { push_back, damage } = character_effect_direct_hit(mechanics);\n\n if solid_neighbour_vector.is_zero() {\n\n log::warn!(\"Direct hit with no solid neighbours shouldn't be possible.\");\n\n } else {\n\n let travel_vector = -solid_neighbour_vector;\n\n world.components.realtime.insert(character_entity, ());\n\n world.realtime_components.movement.insert(\n\n character_entity,\n\n ScheduledRealtimePeriodicState {\n\n state: movement::spec::Movement {\n", "file_path": "game/src/world/explosion.rs", "rank": 24, "score": 179011.5053493383 }, { "content": "fn remove_small_closed_areas(grid: &Grid<GenerationCell>, min_count: usize) -> Grid<GenerationCell> {\n\n let mut grid = grid.clone();\n\n let mut visited_ids: Grid<Option<usize>> = Grid::new_clone(grid.size(), None);\n\n let mut flood_fill_buffer = Vec::new();\n\n let mut current_id = 0usize;\n\n let mut counts_by_id = Vec::new();\n\n let mut ids_to_remove = HashSet::new();\n\n for (coord, cell) in grid.enumerate() {\n\n if let GenerationCell::Open = cell {\n\n if visited_ids.get_checked(coord).is_none() {\n\n flood_fill_buffer.push(coord);\n\n *visited_ids.get_checked_mut(coord) = Some(current_id);\n\n let mut count = 0usize;\n\n while let Some(coord) = flood_fill_buffer.pop() {\n\n count += 1;\n\n for direction in Direction::all() {\n\n let next_coord = coord + direction.coord();\n\n match grid.get(next_coord) {\n\n None | Some(GenerationCell::Open) => continue,\n\n Some(GenerationCell::Closed) => (),\n", "file_path": "procgen/src/hull.rs", "rank": 25, "score": 175654.16542748065 }, { "content": "fn wfc_map<R: Rng>(\n\n input_grid: Grid<GenerationCell>,\n\n output_size: Size,\n\n pattern_size: NonZeroU32,\n\n rng: &mut R,\n\n) -> Grid<GenerationCell> {\n\n let mut output_grid = Grid::new_clone(output_size, GenerationCell::Open);\n\n let overlapping_patterns = OverlappingPatterns::new_all_orientations(input_grid, pattern_size);\n\n let global_stats = overlapping_patterns.global_stats();\n\n let run = RunOwn::new_wrap_forbid(output_size, &global_stats, wrap::WrapXY, ForbidNothing, rng);\n\n let wave = run.collapse_retrying(retry::Forever, rng);\n\n for (coord, wave_cell) in wave.grid().enumerate() {\n\n let pattern_id = wave_cell.chosen_pattern_id().expect(\"unexpected contradiction\");\n\n let cell = overlapping_patterns.pattern_top_left_value(pattern_id);\n\n *output_grid.get_checked_mut(coord) = *cell;\n\n }\n\n output_grid\n\n}\n\n\n", "file_path": "procgen/src/hull.rs", "rank": 26, "score": 173978.2838227672 }, { "content": "fn generate_hull_internal<R: Rng>(\n\n input_grid: Grid<GenerationCell>,\n\n output_size: Size,\n\n space_width: u32,\n\n pattern_size: NonZeroU32,\n\n rng: &mut R,\n\n) -> Grid<HullCell> {\n\n let output_grid = wfc_map(\n\n input_grid,\n\n output_size - Size::new(space_width + 1, space_width + 1) * 2,\n\n pattern_size,\n\n rng,\n\n );\n\n let output_grid = keep_largest_enclosed_area(&output_grid);\n\n let output_grid = grow_enclosed_areas(&output_grid, 1);\n\n let output_grid = remove_small_closed_areas(&output_grid, 40);\n\n let output_grid = wrap_in_closed_area(&output_grid);\n\n let output_grid = strip_walls_from_outside(&output_grid);\n\n let output_grid = surround_by_space(&output_grid, space_width);\n\n output_grid\n\n}\n\n\n", "file_path": "procgen/src/hull.rs", "rank": 27, "score": 171090.4703629908 }, { "content": "struct InternalWallCandidatesInAxis<A: StaticAxis> {\n\n axis_aligned_with_walls: PhantomData<A>,\n\n candidates: Vec<u32>,\n\n}\n\n\n", "file_path": "procgen/src/internal_walls.rs", "rank": 29, "score": 165600.35211101128 }, { "content": "fn wrap_in_closed_area(grid: &Grid<GenerationCell>) -> Grid<GenerationCell> {\n\n Grid::new_fn(grid.size() + Size::new(2, 2), |coord| {\n\n if let Some(cell) = grid.get(coord - Coord::new(1, 1)) {\n\n *cell\n\n } else {\n\n GenerationCell::Closed\n\n }\n\n })\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq, Eq)]\n\npub enum HullCell {\n\n Wall,\n\n Floor,\n\n Space,\n\n}\n\n\n", "file_path": "procgen/src/hull.rs", "rank": 30, "score": 161237.43102862348 }, { "content": "fn keep_largest_enclosed_area(grid: &Grid<GenerationCell>) -> Grid<GenerationCell> {\n\n let mut visited_ids: Grid<Option<usize>> = Grid::new_clone(grid.size(), None);\n\n let mut flood_fill_buffer = Vec::new();\n\n let mut current_id = 0usize;\n\n let mut counts_by_id = Vec::new();\n\n for (coord, cell) in grid.enumerate() {\n\n if let GenerationCell::Open = cell {\n\n if visited_ids.get_checked(coord).is_none() {\n\n flood_fill_buffer.push(coord);\n\n *visited_ids.get_checked_mut(coord) = Some(current_id);\n\n let mut count = 0usize;\n\n while let Some(coord) = flood_fill_buffer.pop() {\n\n count += 1;\n\n for direction in Direction::all() {\n\n let next_coord = coord + direction.coord();\n\n match grid.get(next_coord) {\n\n None | Some(GenerationCell::Closed) => continue,\n\n Some(GenerationCell::Open) => (),\n\n }\n\n let maybe_visited_id = visited_ids.get_checked_mut(next_coord);\n", "file_path": "procgen/src/hull.rs", "rank": 32, "score": 159352.28588707448 }, { "content": "fn find_internal_wall_candidates<A: StaticAxis>(\n\n walls: &ExternalWallsInAxis<A>,\n\n rect: &Rect,\n\n min_rect_size: Size,\n\n) -> InternalWallCandidatesInAxis<A> {\n\n let min_distance_from_wall = min_rect_size.get_static::<A::Other>();\n\n let top_left = rect.coord.get_static::<A::Other>() as u32;\n\n let low = top_left + min_distance_from_wall;\n\n let high = (top_left + rect.size.get_static::<A::Other>()).saturating_sub(min_distance_from_wall);\n\n let mut candidates = (low..high).collect::<BTreeSet<_>>();\n\n for wall in &walls.walls {\n\n if !wall_intersects_rect::<A>(wall, rect) {\n\n continue;\n\n }\n\n let position = wall.top_left.get_static::<A::Other>() as u32;\n\n let range = match wall.inside {\n\n Side::Low => ((position - min_distance_from_wall)..=(position - 1)),\n\n Side::High => ((position + 1)..=(position + min_distance_from_wall)),\n\n };\n\n for index in range {\n", "file_path": "procgen/src/internal_walls.rs", "rank": 33, "score": 154562.16474339124 }, { "content": "pub fn period_per_frame(num_per_frame: u32) -> Duration {\n\n FRAME_DURATION / num_per_frame\n\n}\n\n\n\ncrate::realtime_periodic! {\n\n realtime_periodic {\n\n movement: MovementState,\n\n particle_emitter: ParticleEmitterState,\n\n fade: FadeState,\n\n light_colour_fade: LightColourFadeState,\n\n flicker: FlickerState,\n\n }\n\n}\n\n\n\npub use realtime_periodic::RealtimeComponents;\n\n\n\n#[derive(Debug, Clone, Copy, Serialize, Deserialize)]\n\npub enum FadeProgress {\n\n Fading(u8),\n\n Complete,\n", "file_path": "game/src/world/realtime_periodic/data.rs", "rank": 34, "score": 145331.44050038277 }, { "content": "fn apply_mechanics(world: &mut World, explosion_coord: Coord, mechanics: &spec::Mechanics) {\n\n for character_entity in world.components.character.entities().collect::<Vec<_>>() {\n\n if let Some(character_coord) = world.spatial.coord_of(character_entity) {\n\n if character_coord == explosion_coord {\n\n apply_direct_hit(world, explosion_coord, mechanics, character_entity);\n\n } else {\n\n if !is_in_explosion_range(explosion_coord, mechanics, character_coord) {\n\n continue;\n\n }\n\n let explosion_to_character = LineSegment::new(explosion_coord, character_coord);\n\n if !world.is_solid_feature_in_line_segment(explosion_to_character) {\n\n apply_indirect_hit(world, mechanics, character_entity, explosion_to_character);\n\n } else {\n\n continue;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "game/src/world/explosion.rs", "rank": 35, "score": 141619.87599428542 }, { "content": "fn input_grid_from_strs(input: &[&str]) -> Grid<GenerationCell> {\n\n let width = input[0].len();\n\n let height = input.len();\n\n let size = Size::new(width as u32, height as u32);\n\n let mut grid = Grid::new_clone(size, GenerationCell::Open);\n\n for (y, row) in input.iter().enumerate() {\n\n for (x, ch) in row.chars().enumerate() {\n\n let coord = Coord::new(x as i32, y as i32);\n\n let cell = match ch {\n\n '.' => GenerationCell::Open,\n\n '#' => GenerationCell::Closed,\n\n ch => panic!(\"unexpected char: {}\", ch),\n\n };\n\n *grid.get_checked_mut(coord) = cell;\n\n }\n\n }\n\n grid\n\n}\n\n\n", "file_path": "procgen/src/hull.rs", "rank": 36, "score": 140496.32894896588 }, { "content": "struct ExternalWallsInAxis<A: StaticAxis> {\n\n axis: PhantomData<A>,\n\n walls: Vec<ExternalWall>,\n\n}\n\n\n", "file_path": "procgen/src/internal_walls.rs", "rank": 37, "score": 137400.61457254642 }, { "content": "struct InternalWallCandidates {\n\n y_coord_of_horizontal_wall_candidates: InternalWallCandidatesInAxis<static_axis::X>,\n\n x_coord_of_vertical_wall_candidates: InternalWallCandidatesInAxis<static_axis::Y>,\n\n}\n\n\n", "file_path": "procgen/src/internal_walls.rs", "rank": 38, "score": 137295.3243672871 }, { "content": "fn wall_intersects_rect<A: StaticAxis>(wall: &ExternalWall, rect: &Rect) -> bool {\n\n // variables are named as if wall_axis is X\n\n let top = rect.coord.get_static::<A::Other>();\n\n let bottom = top + rect.size.get_static::<A::Other>() as i32 - 1;\n\n let left = rect.coord.get_static::<A>();\n\n let right = left + rect.size.get_static::<A>() as i32 - 1;\n\n let wall_y = wall.top_left.get_static::<A::Other>();\n\n if wall_y < top || wall_y > bottom {\n\n return false;\n\n }\n\n let wall_start_x = wall.top_left.get_static::<A>();\n\n let wall_end_x = wall_start_x + wall.length as i32 - 1;\n\n if wall_start_x > right || wall_end_x < left {\n\n return false;\n\n }\n\n true\n\n}\n\n\n", "file_path": "procgen/src/internal_walls.rs", "rank": 42, "score": 134419.93206090998 }, { "content": "#[derive(Clone, Copy)]\n\nstruct GameCoord(Coord);\n\n\n", "file_path": "app/src/game.rs", "rank": 43, "score": 131900.16110535926 }, { "content": "#[derive(Clone, Copy)]\n\nstruct PlayerCoord(Coord);\n\n\n\nimpl GameCoord {\n\n fn of_player(player_info: &CharacterInfo) -> Self {\n\n Self(player_info.coord)\n\n }\n\n}\n\n\n", "file_path": "app/src/game.rs", "rank": 44, "score": 131900.16110535926 }, { "content": "pub fn app(\n\n game_config: GameConfig,\n\n frontend: Frontend,\n\n controls: Controls,\n\n storage: StaticStorage,\n\n save_key: String,\n\n audio_player: AppAudioPlayer,\n\n rng_seed: RngSeed,\n\n auto_play: Option<AutoPlay>,\n\n fullscreen: Option<Fullscreen>,\n\n env: Box<dyn Env>,\n\n) -> impl app::App {\n\n let app_data = AppData::new(\n\n game_config,\n\n frontend,\n\n controls,\n\n storage,\n\n save_key,\n\n audio_player,\n\n rng_seed,\n\n fullscreen,\n\n env,\n\n );\n\n let app_view = AppView::new();\n\n event_routine(auto_play).app_one_shot_ignore_return(app_data, app_view)\n\n}\n", "file_path": "app/src/app.rs", "rank": 45, "score": 123172.93963161245 }, { "content": "struct Wander<'a, R> {\n\n world: &'a World,\n\n last_seen_grid: &'a LastSeenGrid,\n\n min_last_seen_coord: Option<Coord>,\n\n min_last_seen_count: u64,\n\n entity: Entity,\n\n avoid: bool,\n\n rng: &'a mut R,\n\n}\n\n\n\nimpl<'a, R: Rng> BestSearch for Wander<'a, R> {\n\n fn is_at_max_depth(&self, _depth: Depth) -> bool {\n\n false\n\n }\n\n fn can_enter_initial_updating_best(&mut self, coord: Coord) -> bool {\n\n if self.world.can_npc_traverse_feature_at_coord(coord) {\n\n if let Some(entity) = self.world.get_character_at_coord(coord) {\n\n if entity != self.entity {\n\n let my_coord = self.world.entity_coord(self.entity).unwrap();\n\n if my_coord.manhattan_distance(coord) < 4 {\n", "file_path": "game/src/behaviour.rs", "rank": 46, "score": 120346.97974259558 }, { "content": "fn is_in_explosion_range(explosion_coord: Coord, mechanics: &spec::Mechanics, coord: Coord) -> bool {\n\n explosion_coord.distance2(coord) <= mechanics.range.pow(2)\n\n}\n\n\n", "file_path": "game/src/world/explosion.rs", "rank": 47, "score": 118721.26337487143 }, { "content": "fn options_menu() -> impl EventRoutine<\n\n Return = Result<OrBack<OptionsMenuEntry>, menu::Escape>,\n\n Data = AppData,\n\n View = AppView,\n\n Event = CommonEvent,\n\n> {\n\n SideEffectThen::new_with_view(|data: &mut AppData, _: &_| {\n\n let config = data.game.config();\n\n let fullscreen = data.env.fullscreen();\n\n let fullscreen_requires_restart = data.env.fullscreen_requires_restart();\n\n let menu_entry_string = MenuEntryStringFn::new(\n\n move |entry: MenuEntryToRender<OrBack<OptionsMenuEntry>>, buf: &mut String| {\n\n use std::fmt::Write;\n\n use OptionsMenuEntry::*;\n\n use OrBack::*;\n\n match entry.entry {\n\n Back => write!(buf, \"back\").unwrap(),\n\n Selection(entry) => match entry {\n\n ToggleMusic => {\n\n write!(buf, \"(m) Music enabled [{}]\", if config.music { '*' } else { ' ' }).unwrap()\n", "file_path": "app/src/app.rs", "rank": 48, "score": 113405.59275388453 }, { "content": "pub fn make_player() -> EntityData {\n\n EntityData {\n\n tile: Some(Tile::Player),\n\n light: Some(Light {\n\n colour: Rgb24::new(187, 187, 187),\n\n vision_distance: Circle::new_squared(70),\n\n diminish: Rational {\n\n numerator: 1,\n\n denominator: 1,\n\n },\n\n }),\n\n character: Some(()),\n\n hit_points: Some(HitPoints::new_full(100)),\n\n player: Some(()),\n\n ..Default::default()\n\n }\n\n}\n\n\n\nimpl World {\n\n pub fn insert_entity_data(&mut self, location: Location, entity_data: EntityData) -> Entity {\n", "file_path": "game/src/world/spawn.rs", "rank": 49, "score": 111152.30938782483 }, { "content": "#[wasm_bindgen(start)]\n\npub fn run() -> Result<(), JsValue> {\n\n wasm_logger::init(wasm_logger::Config::new(log::Level::Info));\n\n console_error_panic_hook::set_once();\n\n let audio_player = Some(StaticAudioPlayer::new(WebAudioPlayer::new_with_mime(\"video/ogg\")));\n\n let storage = StaticStorage::new(LocalStorage::new());\n\n let context = Context::new(Size::new(60, 40), \"content\");\n\n let app = app(\n\n GameConfig { omniscient: None },\n\n Frontend::Web,\n\n Controls::default(),\n\n storage,\n\n SAVE_KEY.to_string(),\n\n audio_player,\n\n RngSeed::Random,\n\n Some(AutoPlay),\n\n None,\n\n Box::new(EnvNull),\n\n );\n\n context.run_app(app);\n\n Ok(())\n\n}\n", "file_path": "web/src/lib.rs", "rank": 50, "score": 109456.76749823596 }, { "content": "#[derive(Debug)]\n\nstruct ExternalWall {\n\n top_left: Coord,\n\n length: usize,\n\n inside: Side,\n\n}\n\n\n", "file_path": "procgen/src/internal_walls.rs", "rank": 51, "score": 105855.73536588896 }, { "content": "struct InternalWall {\n\n top_left: Coord,\n\n length: u32,\n\n axis: Axis,\n\n}\n\n\n\nimpl InternalWall {\n\n fn draw(&self, grid: &mut Grid<HullCell>) {\n\n let step = Coord::new_axis(1, 0, self.axis);\n\n for i in 0..self.length {\n\n let coord = self.top_left + step * i as i32;\n\n let cell = grid.get_checked_mut(coord);\n\n if let HullCell::Floor = cell {\n\n *cell = HullCell::Wall;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "procgen/src/internal_walls.rs", "rank": 52, "score": 105851.64767289636 }, { "content": "struct ExternalWalls {\n\n horizontal: ExternalWallsInAxis<static_axis::X>,\n\n vertical: ExternalWallsInAxis<static_axis::Y>,\n\n}\n\n\n", "file_path": "procgen/src/internal_walls.rs", "rank": 53, "score": 105851.64767289636 }, { "content": "fn character_effect_direct_hit(mechanics: &spec::Mechanics) -> CharacterEffect {\n\n let push_back = mechanics.range / 3;\n\n CharacterEffect {\n\n push_back,\n\n damage: mechanics.range * 10,\n\n }\n\n}\n\n\n", "file_path": "game/src/world/explosion.rs", "rank": 54, "score": 104560.94736574715 }, { "content": "struct GameCoordToScreenCoord {\n\n game_coord: GameCoord,\n\n player_coord: GameCoord,\n\n}\n\n\n\nimpl GameCoordToScreenCoord {\n\n fn compute(self) -> ScreenCoord {\n\n ScreenCoord(self.game_coord.0 - self.player_coord.0 + PLAYER_OFFSET)\n\n }\n\n}\n\n\n", "file_path": "app/src/game.rs", "rank": 55, "score": 104235.1481734675 }, { "content": "struct ScreenCoordToGameCoord {\n\n screen_coord: ScreenCoord,\n\n player_coord: GameCoord,\n\n}\n\n\n\nimpl ScreenCoordToGameCoord {\n\n fn compute(self) -> GameCoord {\n\n GameCoord(self.screen_coord.0 + self.player_coord.0 - PLAYER_OFFSET)\n\n }\n\n}\n\n\n", "file_path": "app/src/game.rs", "rank": 56, "score": 104235.1481734675 }, { "content": "#[derive(Debug)]\n\nenum Side {\n\n High,\n\n Low,\n\n}\n\n\n", "file_path": "procgen/src/internal_walls.rs", "rank": 60, "score": 101192.89706499118 }, { "content": "#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]\n\nenum GenerationCell {\n\n Closed,\n\n Open,\n\n}\n\n\n", "file_path": "procgen/src/hull.rs", "rank": 61, "score": 101188.39430209636 }, { "content": "fn aim() -> impl EventRoutine<Return = Option<ScreenCoord>, Data = AppData, View = AppView, Event = CommonEvent> {\n\n make_either!(Ei = A | B);\n\n SideEffectThen::new_with_view(|data: &mut AppData, view: &AppView| {\n\n let game_relative_mouse_coord = view\n\n .game\n\n .absolute_coord_to_game_relative_screen_coord(data.last_mouse_coord);\n\n if let Ok(initial_aim_coord) = data.game.initial_aim_coord(game_relative_mouse_coord) {\n\n Ei::A(\n\n AimEventRoutine::new(initial_aim_coord)\n\n .select(SelectGame::new())\n\n .decorated(DecorateGame::new()),\n\n )\n\n } else {\n\n Ei::B(Value::new(None))\n\n }\n\n })\n\n}\n\n\n", "file_path": "app/src/app.rs", "rank": 63, "score": 100637.59591785108 }, { "content": "fn has_line_of_sight(eye: Coord, dest: Coord, world: &World, vision_distance: vision_distance::Circle) -> bool {\n\n for coord in LineSegment::new(eye, dest).iter() {\n\n let eye_to_coord = coord - eye;\n\n if !vision_distance.in_range(eye_to_coord) {\n\n return false;\n\n }\n\n if !world.can_npc_traverse_feature_at_coord(coord) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "game/src/behaviour.rs", "rank": 64, "score": 99531.43634936266 }, { "content": "#[derive(Clone, Copy)]\n\nenum MainMenuType {\n\n Init,\n\n Pause,\n\n}\n\n\n", "file_path": "app/src/app.rs", "rank": 65, "score": 99301.86524167992 }, { "content": "#[derive(Debug)]\n\nstruct Rect {\n\n coord: Coord,\n\n size: Size,\n\n}\n\n\n", "file_path": "procgen/src/internal_walls.rs", "rank": 67, "score": 98902.45732493942 }, { "content": "struct Split {\n\n left: Rect,\n\n right: Rect,\n\n internal_wall: InternalWall,\n\n}\n\n\n", "file_path": "procgen/src/internal_walls.rs", "rank": 68, "score": 98898.36963194681 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct VisibilityCell {\n\n last_seen: u64,\n\n last_seen_next: u64,\n\n last_lit: u64,\n\n visible_directions: DirectionBitmap,\n\n light_colour: Rgb24,\n\n}\n\n\n\nimpl Default for VisibilityCell {\n\n fn default() -> Self {\n\n Self {\n\n last_seen: 0,\n\n last_seen_next: 0,\n\n last_lit: 0,\n\n visible_directions: DirectionBitmap::empty(),\n\n light_colour: Rgb24::new(0, 0, 0),\n\n }\n\n }\n\n}\n\n\n", "file_path": "game/src/visibility.rs", "rank": 69, "score": 98882.14191485534 }, { "content": "struct RngSeedSource {\n\n rng: Isaac64Rng,\n\n next: u64,\n\n}\n\n\n\nimpl RngSeedSource {\n\n fn new(rng_seed: RngSeed) -> Self {\n\n let mut rng = Isaac64Rng::from_entropy();\n\n let next = match rng_seed {\n\n RngSeed::Random => rng.gen(),\n\n RngSeed::U64(seed) => seed,\n\n };\n\n Self { rng, next }\n\n }\n\n fn next_seed(&mut self) -> u64 {\n\n let seed = self.next;\n\n self.next = self.rng.gen();\n\n seed\n\n }\n\n}\n", "file_path": "app/src/game.rs", "rank": 70, "score": 96851.55091019742 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize)]\n\nstruct LastSeenGrid {\n\n count: u64,\n\n last_seen: Grid<LastSeenCell>,\n\n}\n\n\n", "file_path": "game/src/behaviour.rs", "rank": 71, "score": 96766.29990625521 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize)]\n\nstruct LastSeenCell {\n\n count: u64,\n\n avoid_until: u64,\n\n}\n\n\n", "file_path": "game/src/behaviour.rs", "rank": 72, "score": 96719.24513479223 }, { "content": "#[allow(dead_code)]\n\npub fn from_str(s: &str, player_data: EntityData) -> Terrain {\n\n let rows = s.split('\\n').filter(|s| !s.is_empty()).collect::<Vec<_>>();\n\n let size = Size::new_u16(rows[0].len() as u16, rows.len() as u16);\n\n let mut world = World::new(size);\n\n let mut agents = ComponentTable::default();\n\n let mut player_data = Some(player_data);\n\n let mut player = None;\n\n for (y, row) in rows.iter().enumerate() {\n\n for (x, ch) in row.chars().enumerate() {\n\n if ch.is_control() {\n\n continue;\n\n }\n\n let coord = Coord::new(x as i32, y as i32);\n\n match ch {\n\n '.' => {\n\n world.spawn_floor(coord);\n\n }\n\n '*' => {\n\n world.spawn_floor(coord);\n\n world.spawn_light(coord, Rgb24::new(187, 187, 187));\n", "file_path": "game/src/terrain.rs", "rank": 73, "score": 96281.62049377941 }, { "content": "fn map() -> impl EventRoutine<Return = (), Data = AppData, View = AppView, Event = CommonEvent> {\n\n make_either!(Ei = A | B);\n\n SideEffectThen::new_with_view(|data: &mut AppData, _: &_| {\n\n if let Some(instance) = data.game.instance() {\n\n Ei::A(\n\n MapEventRoutine::new_centred_on_player(instance)\n\n .select(SelectGame::new())\n\n .decorated(DecorateGame::new()),\n\n )\n\n } else {\n\n Ei::B(Value::new(()))\n\n }\n\n })\n\n}\n\n\n", "file_path": "app/src/app.rs", "rank": 74, "score": 82820.57573848925 }, { "content": "fn game_over() -> impl EventRoutine<Return = (), Data = AppData, View = AppView, Event = CommonEvent> {\n\n GameOverEventRoutine::new()\n\n .select(SelectGame::new())\n\n .decorated(DecorateGame::new())\n\n}\n\n\n", "file_path": "app/src/app.rs", "rank": 75, "score": 82820.57573848925 }, { "content": "fn game_loop() -> impl EventRoutine<Return = (), Data = AppData, View = AppView, Event = CommonEvent> {\n\n make_either!(Ei = A | B | C);\n\n SideEffect::new_with_view(|data: &mut AppData, _: &_| data.game.pre_game_loop())\n\n .then(|| {\n\n Ei::A(game())\n\n .repeat(|game_return| match game_return {\n\n GameReturn::Pause => Handled::Return(GameLoopBreak::Pause),\n\n GameReturn::GameOver => Handled::Return(GameLoopBreak::GameOver),\n\n GameReturn::Map => Handled::Continue(Ei::C(map().then(|| game()))),\n\n GameReturn::Aim => Handled::Continue(Ei::B(aim().and_then(|maybe_screen_coord| {\n\n make_either!(Ei = A | B);\n\n if let Some(screen_coord) = maybe_screen_coord {\n\n Ei::A(game_injecting_inputs(vec![InjectedInput::Fire(screen_coord)]))\n\n } else {\n\n Ei::B(game())\n\n }\n\n }))),\n\n })\n\n .and_then(|game_loop_break| {\n\n make_either!(Ei = A | B);\n", "file_path": "app/src/app.rs", "rank": 76, "score": 81675.61026342666 }, { "content": "fn options_menu_cycle() -> impl EventRoutine<Return = (), Data = AppData, View = AppView, Event = CommonEvent> {\n\n make_either!(Ei = A | B);\n\n use OptionsMenuEntry::*;\n\n use OrBack::*;\n\n Ei::A(options_menu()).repeat(|choice| match choice {\n\n Ok(Back) | Err(menu::Escape) => Handled::Return(()),\n\n Ok(Selection(selection)) => Handled::Continue(Ei::B(SideEffectThen::new_with_view(\n\n move |data: &mut AppData, _: &_| {\n\n let mut config = data.game.config();\n\n match selection {\n\n ToggleMusic => config.music = !config.music,\n\n ToggleSfx => config.sfx = !config.sfx,\n\n ToggleFullscreen => {\n\n data.env.set_fullscreen(!data.env.fullscreen());\n\n config.fullscreen = data.env.fullscreen();\n\n }\n\n }\n\n data.game.set_config(config);\n\n options_menu()\n\n },\n\n ))),\n\n })\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct AutoPlay;\n\n\n", "file_path": "app/src/app.rs", "rank": 77, "score": 80581.1326524654 }, { "content": "fn game() -> impl EventRoutine<Return = GameReturn, Data = AppData, View = AppView, Event = CommonEvent> {\n\n GameEventRoutine::new()\n\n .select(SelectGame::new())\n\n .decorated(DecorateGame::new())\n\n}\n\n\n", "file_path": "app/src/app.rs", "rank": 78, "score": 79053.95304487992 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize)]\n\nenum Behaviour {\n\n Wander {\n\n avoid: bool,\n\n },\n\n Chase {\n\n last_seen_player_coord: Coord,\n\n accurate: bool,\n\n },\n\n Flee,\n\n}\n\n\n", "file_path": "game/src/behaviour.rs", "rank": 79, "score": 67046.55677950395 }, { "content": "#[derive(PartialEq, Eq, Clone, Copy)]\n\nenum GameStatus {\n\n Playing,\n\n Over,\n\n}\n\n\n\npub struct GameToRender<'a> {\n\n game: &'a Game,\n\n screen_shake: Option<ScreenShake>,\n\n status: GameStatus,\n\n camera_mode: CameraMode,\n\n}\n\n\n", "file_path": "app/src/game.rs", "rank": 80, "score": 65883.44799592301 }, { "content": "#[allow(dead_code)]\n\n#[derive(Clone, Copy)]\n\nenum CameraMode {\n\n Fixed,\n\n FollowPlayer,\n\n}\n\n\n", "file_path": "app/src/game.rs", "rank": 81, "score": 65878.80148337955 }, { "content": "#[derive(Clone, Copy, Debug, PartialOrd, Ord, PartialEq, Eq)]\n\nenum OptionsMenuEntry {\n\n ToggleMusic,\n\n ToggleSfx,\n\n ToggleFullscreen,\n\n}\n\n\n\nimpl OptionsMenuEntry {\n\n fn instance(env: &Box<dyn Env>) -> menu::MenuInstanceChooseOrEscape<OrBack<OptionsMenuEntry>> {\n\n use OptionsMenuEntry::*;\n\n use OrBack::*;\n\n menu::MenuInstanceBuilder {\n\n items: if env.fullscreen_supported() {\n\n vec![\n\n Selection(ToggleMusic),\n\n Selection(ToggleSfx),\n\n Selection(ToggleFullscreen),\n\n Back,\n\n ]\n\n } else {\n\n vec![Selection(ToggleMusic), Selection(ToggleSfx), Back]\n", "file_path": "app/src/app.rs", "rank": 82, "score": 64791.04711479518 }, { "content": "#[derive(Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\nenum MainMenuEntry {\n\n NewGame,\n\n Resume,\n\n Quit,\n\n Save,\n\n SaveQuit,\n\n Clear,\n\n Options,\n\n}\n\n\n\nimpl MainMenuEntry {\n\n fn init(frontend: Frontend) -> menu::MenuInstance<Self> {\n\n use MainMenuEntry::*;\n\n let (items, hotkeys) = match frontend {\n\n Frontend::Graphical | Frontend::AnsiTerminal => (\n\n vec![NewGame, Options, Quit],\n\n hashmap!['n' => NewGame, 'o' => Options, 'q' => Quit],\n\n ),\n\n Frontend::Web => (vec![NewGame, Options], hashmap!['n' => NewGame, 'o' => Options]),\n\n };\n", "file_path": "app/src/app.rs", "rank": 83, "score": 64787.393127039395 }, { "content": "enum GameLoopBreak {\n\n GameOver,\n\n Pause,\n\n}\n\n\n", "file_path": "app/src/app.rs", "rank": 84, "score": 64775.60537269942 }, { "content": "struct Args {\n\n native_common: NativeCommon,\n\n fullscreen: Option<Fullscreen>,\n\n}\n\n\n\nimpl Args {\n\n fn parser() -> impl meap::Parser<Item = Self> {\n\n meap::let_map! {\n\n let {\n\n native_common = NativeCommon::parser();\n\n fullscreen = flag('f').name(\"fullscreen\").desc(\"start in fullscreen\");\n\n } in {{\n\n let fullscreen = if fullscreen {\n\n Some(Fullscreen)\n\n } else {\n\n None\n\n };\n\n Self { native_common, fullscreen }\n\n }}\n\n }\n\n }\n\n}\n\n\n", "file_path": "graphical/src/main.rs", "rank": 85, "score": 64731.43934485181 }, { "content": "struct Quit;\n\n\n", "file_path": "app/src/app.rs", "rank": 86, "score": 64731.43934485181 }, { "content": "struct Blink {\n\n cycle_length: Duration,\n\n min_alpha: u8,\n\n max_alpha: u8,\n\n}\n\n\n\nimpl Blink {\n\n fn intensity(&self, duration: Duration) -> u8 {\n\n let cycle_length_micros = self.cycle_length.as_micros();\n\n let duration_micros = duration.as_micros();\n\n let progress_through_cycle_micros = duration_micros % cycle_length_micros;\n\n let scaled_progress = (progress_through_cycle_micros * 512) / cycle_length_micros;\n\n if scaled_progress < 256 {\n\n scaled_progress as u8\n\n } else {\n\n (511 - scaled_progress) as u8\n\n }\n\n }\n\n fn alpha(&self, duration: Duration) -> u8 {\n\n let intensity = self.intensity(duration);\n", "file_path": "app/src/game.rs", "rank": 87, "score": 64731.43934485181 }, { "content": "#[derive(Debug, Clone, Serialize, Deserialize)]\n\nenum Path {\n\n Forever(InfiniteStepIter),\n\n Once(StepIter),\n\n Steps {\n\n infinite_step_iter: InfiniteStepIter,\n\n remaining_steps: usize,\n\n },\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct MovementState {\n\n path: Path,\n\n cardinal_step_duration: Duration,\n\n ordinal_step_duration: Duration,\n\n}\n\n\n", "file_path": "game/src/world/realtime_periodic/movement.rs", "rank": 88, "score": 63753.56930963487 }, { "content": "#[derive(Clone)]\n\nenum ColEncodeChoice {\n\n TrueColour,\n\n Rgb,\n\n Greyscale,\n\n Ansi,\n\n}\n\n\n\nimpl ColEncodeChoice {\n\n fn parser() -> impl meap::Parser<Item = Self> {\n\n use meap::Parser;\n\n use ColEncodeChoice::*;\n\n meap::choose_at_most_one!(\n\n flag(\"true-colour\").some_if(TrueColour),\n\n flag(\"rgb\").some_if(Rgb),\n\n flag(\"greyscale\").some_if(Greyscale),\n\n flag(\"ansi\").some_if(Ansi),\n\n )\n\n .with_default_general(TrueColour)\n\n }\n\n}\n\n\n", "file_path": "ansi-terminal/src/main.rs", "rank": 89, "score": 63749.82779340692 }, { "content": "#[derive(Serialize, Deserialize, Clone, Copy)]\n\nstruct ScreenShake {\n\n remaining_frames: u8,\n\n direction: Direction,\n\n}\n\n\n\nimpl ScreenShake {\n\n fn coord(&self) -> Coord {\n\n self.direction.coord()\n\n }\n\n fn next(self) -> Option<Self> {\n\n self.remaining_frames.checked_sub(1).map(|remaining_frames| Self {\n\n remaining_frames,\n\n direction: self.direction,\n\n })\n\n }\n\n}\n\n\n", "file_path": "app/src/game.rs", "rank": 90, "score": 63588.47108139562 }, { "content": "struct StorageWrapper {\n\n storage: StaticStorage,\n\n save_key: String,\n\n}\n\n\n\nimpl StorageWrapper {\n\n pub fn save_instance(&mut self, instance: &GameInstance) {\n\n self.storage\n\n .store(&self.save_key, instance, STORAGE_FORMAT)\n\n .expect(\"failed to save instance\");\n\n }\n\n pub fn clear_instance(&mut self) {\n\n let _ = self.storage.remove(&self.save_key);\n\n }\n\n}\n\n\n", "file_path": "app/src/game.rs", "rank": 91, "score": 63580.664300725926 }, { "content": "struct AppData {\n\n frontend: Frontend,\n\n game: GameData,\n\n main_menu: menu::MenuInstanceChooseOrEscape<MainMenuEntry>,\n\n main_menu_type: MainMenuType,\n\n options_menu: menu::MenuInstanceChooseOrEscape<OrBack<OptionsMenuEntry>>,\n\n last_mouse_coord: Coord,\n\n env: Box<dyn Env>,\n\n}\n\n\n", "file_path": "app/src/app.rs", "rank": 92, "score": 63580.664300725926 }, { "content": "struct SelectGame {}\n\nimpl SelectGame {\n\n fn new() -> Self {\n\n Self {}\n\n }\n\n}\n\nimpl DataSelector for SelectGame {\n\n type DataInput = AppData;\n\n type DataOutput = GameData;\n\n fn data<'a>(&self, input: &'a Self::DataInput) -> &'a Self::DataOutput {\n\n &input.game\n\n }\n\n fn data_mut<'a>(&self, input: &'a mut Self::DataInput) -> &'a mut Self::DataOutput {\n\n &mut input.game\n\n }\n\n}\n\nimpl ViewSelector for SelectGame {\n\n type ViewInput = AppView;\n\n type ViewOutput = GameView;\n\n fn view<'a>(&self, input: &'a Self::ViewInput) -> &'a Self::ViewOutput {\n\n &input.game\n\n }\n\n fn view_mut<'a>(&self, input: &'a mut Self::ViewInput) -> &'a mut Self::ViewOutput {\n\n &mut input.game\n\n }\n\n}\n\nimpl Selector for SelectGame {}\n\n\n", "file_path": "app/src/app.rs", "rank": 93, "score": 63580.664300725926 }, { "content": "struct Args {\n\n native_common: NativeCommon,\n\n col_encode_choice: ColEncodeChoice,\n\n}\n\n\n\nimpl Args {\n\n fn parser() -> impl meap::Parser<Item = Self> {\n\n meap::let_map! {\n\n let {\n\n native_common = NativeCommon::parser();\n\n col_encode_choice = ColEncodeChoice::parser();\n\n } in {\n\n Self { native_common, col_encode_choice }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "ansi-terminal/src/main.rs", "rank": 94, "score": 63580.664300725926 }, { "content": "struct AppView {\n\n game: GameView,\n\n main_menu: FadeMenuInstanceView,\n\n options_menu: FadeMenuInstanceView,\n\n}\n\n\n\nimpl AppData {\n\n fn new(\n\n game_config: GameConfig,\n\n frontend: Frontend,\n\n controls: Controls,\n\n storage: StaticStorage,\n\n save_key: String,\n\n audio_player: AppAudioPlayer,\n\n rng_seed: RngSeed,\n\n fullscreen: Option<Fullscreen>,\n\n env: Box<dyn Env>,\n\n ) -> Self {\n\n let mut game_data = GameData::new(\n\n game_config,\n", "file_path": "app/src/app.rs", "rank": 95, "score": 63580.664300725926 }, { "content": "struct DecorateGame {}\n\nimpl DecorateGame {\n\n fn new() -> Self {\n\n Self {}\n\n }\n\n}\n\n\n\nimpl Decorate for DecorateGame {\n\n type View = AppView;\n\n type Data = AppData;\n\n fn view<E, F, C>(\n\n &self,\n\n data: &Self::Data,\n\n mut event_routine_view: EventRoutineView<E>,\n\n context: ViewContext<C>,\n\n frame: &mut F,\n\n ) where\n\n E: EventRoutine<Data = Self::Data, View = Self::View>,\n\n F: Frame,\n\n C: ColModify,\n\n {\n\n event_routine_view.view(data, context, frame);\n\n }\n\n}\n\n\n", "file_path": "app/src/app.rs", "rank": 96, "score": 63580.664300725926 }, { "content": "#[derive(Clone, Copy, Debug, PartialOrd, Ord, PartialEq, Eq)]\n\nenum OrBack<T> {\n\n Selection(T),\n\n Back,\n\n}\n\n\n", "file_path": "app/src/app.rs", "rank": 97, "score": 63346.082208262895 }, { "content": "pub trait Env {\n\n fn fullscreen(&self) -> bool;\n\n fn fullscreen_requires_restart(&self) -> bool;\n\n fn fullscreen_supported(&self) -> bool;\n\n // hack to get around fact that changing fullscreen mid-game on windows crashes\n\n fn set_fullscreen_init(&self, fullscreen: bool);\n\n fn set_fullscreen(&self, fullscreen: bool);\n\n}\n\npub struct EnvNull;\n\nimpl Env for EnvNull {\n\n fn fullscreen(&self) -> bool {\n\n false\n\n }\n\n fn fullscreen_requires_restart(&self) -> bool {\n\n false\n\n }\n\n fn fullscreen_supported(&self) -> bool {\n\n false\n\n }\n\n fn set_fullscreen(&self, _fullscreen: bool) {}\n\n fn set_fullscreen_init(&self, _fullscreen: bool) {}\n\n}\n\n\n\npub struct Fullscreen;\n\n\n", "file_path": "app/src/app.rs", "rank": 98, "score": 63330.64046616713 }, { "content": "fn main() {\n\n use meap::Parser;\n\n env_logger::init();\n\n let Args {\n\n native_common:\n\n NativeCommon {\n\n rng_seed,\n\n file_storage,\n\n controls,\n\n save_file,\n\n audio_player,\n\n game_config,\n\n },\n\n fullscreen,\n\n } = Args::parser().with_help_default().parse_env_or_exit();\n\n let context = Context::new(ContextDescriptor {\n\n font_bytes: FontBytes {\n\n normal: include_bytes!(\"./fonts/PxPlus_IBM_CGAthin-with-quadrant-blocks.ttf\").to_vec(),\n\n bold: include_bytes!(\"./fonts/PxPlus_IBM_CGA-with-quadrant-blocks.ttf\").to_vec(),\n\n },\n", "file_path": "graphical/src/main.rs", "rank": 99, "score": 63278.924373689166 } ]
Rust
alacritty/src/renderer/graphics/draw.rs
SonaliBendre/alacritty-sixel
4b143ad2c623f54610a71dba1687d63a2b0f751a
use std::collections::BTreeMap; use std::mem::{self, MaybeUninit}; use crate::display::content::RenderableCell; use crate::gl::{self, types::*}; use crate::renderer::graphics::{shader, GraphicsRenderer}; use alacritty_terminal::graphics::GraphicId; use alacritty_terminal::index::{Column, Line}; use alacritty_terminal::term::SizeInfo; use log::trace; struct RenderPosition { column: Column, line: Line, offset_x: u16, offset_y: u16, padding_y: u16, } #[derive(Default)] pub struct RenderList { items: BTreeMap<GraphicId, RenderPosition>, } impl RenderList { #[inline] pub fn update(&mut self, cell: &RenderableCell, size_info: &SizeInfo) { if let Some(graphic) = &cell.graphic { let graphic_id = graphic.graphic_id(); if self.items.contains_key(&graphic_id) { return; } let render_item = RenderPosition { column: cell.point.column, line: Line(cell.point.line as i32), offset_x: graphic.offset_x, offset_y: graphic.offset_y, padding_y: size_info.padding_y() as u16, }; self.items.insert(graphic_id, render_item); } } #[inline] pub fn is_empty(&self) -> bool { self.items.is_empty() } pub fn build_vertices(self, renderer: &GraphicsRenderer) -> Vec<shader::Vertex> { use shader::VertexSide::{BottomLeft, BottomRight, TopLeft, TopRight}; let mut vertices = Vec::new(); for (graphics_id, render_item) in self.items { let graphic_texture = match renderer.graphic_textures.get(&graphics_id) { Some(tex) => tex, None => continue, }; vertices.reserve(6); let vertex = shader::Vertex { texture_id: graphic_texture.texture.0, sides: TopLeft, column: render_item.column.0 as GLuint, line: render_item.line.0 as GLuint, height: graphic_texture.height, width: graphic_texture.width, offset_x: render_item.offset_x, offset_y: render_item.offset_y, base_cell_height: graphic_texture.cell_height, padding_y: render_item.padding_y, }; vertices.push(vertex); for &sides in &[TopRight, BottomLeft, TopRight, BottomRight, BottomLeft] { vertices.push(shader::Vertex { sides, ..vertex }); } } vertices } pub fn draw(self, renderer: &GraphicsRenderer, size_info: &SizeInfo) { let vertices = self.build_vertices(renderer); unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, renderer.program.vbo); gl::BindVertexArray(renderer.program.vao); gl::UseProgram(renderer.program.id()); gl::Uniform2f( renderer.program.u_cell_dimensions, size_info.cell_width(), size_info.cell_height(), ); gl::Uniform2f( renderer.program.u_view_dimensions, size_info.width(), size_info.height(), ); gl::BlendFuncSeparate(gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA, gl::SRC_ALPHA, gl::ONE); } let mut batch = [MaybeUninit::uninit(); shader::TEXTURES_ARRAY_SIZE * 6]; let mut batch_size = 0; macro_rules! send_batch { () => { #[allow(unused_assignments)] if batch_size > 0 { trace!("Call glDrawArrays with {} items", batch_size); unsafe { gl::BufferData( gl::ARRAY_BUFFER, (batch_size * mem::size_of::<shader::Vertex>()) as isize, batch.as_ptr().cast(), gl::STREAM_DRAW, ); gl::DrawArrays(gl::TRIANGLES, 0, batch_size as GLint); } batch_size = 0; } }; } let tex_slots_generator = (gl::TEXTURE0..=gl::TEXTURE31) .zip(renderer.program.u_textures.iter()) .zip(0_u32..) .map(|((tex_enum, &u_texture), index)| (tex_enum, u_texture, index)); let mut tex_slots = tex_slots_generator.clone(); let mut last_tex_slot = (0, 0); for mut vertex in vertices { if last_tex_slot.0 != vertex.texture_id { last_tex_slot = loop { match tex_slots.next() { None => { send_batch!(); tex_slots = tex_slots_generator.clone(); }, Some((tex_enum, u_texture, index)) => { unsafe { gl::ActiveTexture(tex_enum); gl::BindTexture(gl::TEXTURE_2D, vertex.texture_id); gl::Uniform1i(u_texture, index as GLint); } break (vertex.texture_id, index); }, } }; } vertex.texture_id = last_tex_slot.1; batch[batch_size] = MaybeUninit::new(vertex); batch_size += 1; if batch_size == batch.len() { send_batch!(); } } send_batch!(); unsafe { gl::BlendFunc(gl::SRC1_COLOR, gl::ONE_MINUS_SRC1_COLOR); gl::ActiveTexture(gl::TEXTURE0); gl::BindTexture(gl::TEXTURE_2D, 0); gl::UseProgram(0); gl::BindVertexArray(0); gl::BindBuffer(gl::ARRAY_BUFFER, 0); } } }
use std::collections::BTreeMap; use std::mem::{self, MaybeUninit}; use crate::display::content::RenderableCell; use crate::gl::{self, types::*}; use crate::renderer::graphics::{shader, GraphicsRenderer}; use alacritty_terminal::graphics::GraphicId; use alacritty_terminal::index::{Column, Line}; use alacritty_terminal::term::SizeInfo; use log::trace; struct RenderPosition { column: Column, line: Line, offset_x: u16, offset_y: u16, padding_y: u16, } #[derive(Default)] pub struct RenderList { items: BTreeMap<GraphicId, RenderPosition>, } impl RenderList { #[inline] pub fn update(&mut self, cell: &RenderableCell, size_info: &SizeInfo) { if let Some(graphic) = &cell.graphic { let graphic_id = graphic.graphic_id(); if self.items.contains_key(&graphic_id) { return; } let render_item = RenderPosition { column: cell.point.column, line: Line(cell.point.line as i32), offset_x: graphic.offset_x, offset_y: graphic.offset_
#[inline] pub fn is_empty(&self) -> bool { self.items.is_empty() } pub fn build_vertices(self, renderer: &GraphicsRenderer) -> Vec<shader::Vertex> { use shader::VertexSide::{BottomLeft, BottomRight, TopLeft, TopRight}; let mut vertices = Vec::new(); for (graphics_id, render_item) in self.items { let graphic_texture = match renderer.graphic_textures.get(&graphics_id) { Some(tex) => tex, None => continue, }; vertices.reserve(6); let vertex = shader::Vertex { texture_id: graphic_texture.texture.0, sides: TopLeft, column: render_item.column.0 as GLuint, line: render_item.line.0 as GLuint, height: graphic_texture.height, width: graphic_texture.width, offset_x: render_item.offset_x, offset_y: render_item.offset_y, base_cell_height: graphic_texture.cell_height, padding_y: render_item.padding_y, }; vertices.push(vertex); for &sides in &[TopRight, BottomLeft, TopRight, BottomRight, BottomLeft] { vertices.push(shader::Vertex { sides, ..vertex }); } } vertices } pub fn draw(self, renderer: &GraphicsRenderer, size_info: &SizeInfo) { let vertices = self.build_vertices(renderer); unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, renderer.program.vbo); gl::BindVertexArray(renderer.program.vao); gl::UseProgram(renderer.program.id()); gl::Uniform2f( renderer.program.u_cell_dimensions, size_info.cell_width(), size_info.cell_height(), ); gl::Uniform2f( renderer.program.u_view_dimensions, size_info.width(), size_info.height(), ); gl::BlendFuncSeparate(gl::SRC_ALPHA, gl::ONE_MINUS_SRC_ALPHA, gl::SRC_ALPHA, gl::ONE); } let mut batch = [MaybeUninit::uninit(); shader::TEXTURES_ARRAY_SIZE * 6]; let mut batch_size = 0; macro_rules! send_batch { () => { #[allow(unused_assignments)] if batch_size > 0 { trace!("Call glDrawArrays with {} items", batch_size); unsafe { gl::BufferData( gl::ARRAY_BUFFER, (batch_size * mem::size_of::<shader::Vertex>()) as isize, batch.as_ptr().cast(), gl::STREAM_DRAW, ); gl::DrawArrays(gl::TRIANGLES, 0, batch_size as GLint); } batch_size = 0; } }; } let tex_slots_generator = (gl::TEXTURE0..=gl::TEXTURE31) .zip(renderer.program.u_textures.iter()) .zip(0_u32..) .map(|((tex_enum, &u_texture), index)| (tex_enum, u_texture, index)); let mut tex_slots = tex_slots_generator.clone(); let mut last_tex_slot = (0, 0); for mut vertex in vertices { if last_tex_slot.0 != vertex.texture_id { last_tex_slot = loop { match tex_slots.next() { None => { send_batch!(); tex_slots = tex_slots_generator.clone(); }, Some((tex_enum, u_texture, index)) => { unsafe { gl::ActiveTexture(tex_enum); gl::BindTexture(gl::TEXTURE_2D, vertex.texture_id); gl::Uniform1i(u_texture, index as GLint); } break (vertex.texture_id, index); }, } }; } vertex.texture_id = last_tex_slot.1; batch[batch_size] = MaybeUninit::new(vertex); batch_size += 1; if batch_size == batch.len() { send_batch!(); } } send_batch!(); unsafe { gl::BlendFunc(gl::SRC1_COLOR, gl::ONE_MINUS_SRC1_COLOR); gl::ActiveTexture(gl::TEXTURE0); gl::BindTexture(gl::TEXTURE_2D, 0); gl::UseProgram(0); gl::BindVertexArray(0); gl::BindBuffer(gl::ARRAY_BUFFER, 0); } } }
y, padding_y: size_info.padding_y() as u16, }; self.items.insert(graphic_id, render_item); } }
function_block-function_prefixed
[ { "content": "pub fn derive_deserialize<T>(\n\n ident: Ident,\n\n generics: Generics,\n\n fields: Punctuated<Field, T>,\n\n) -> TokenStream {\n\n // Create all necessary tokens for the implementation.\n\n let GenericsStreams { unconstrained, constrained, phantoms } =\n\n generics_streams(generics.params);\n\n let FieldStreams { flatten, match_assignments } = fields_deserializer(&fields);\n\n let visitor = format_ident!(\"{}Visitor\", ident);\n\n\n\n // Generate deserialization impl.\n\n let tokens = quote! {\n\n #[derive(Default)]\n\n #[allow(non_snake_case)]\n\n struct #visitor < #unconstrained > {\n\n #phantoms\n\n }\n\n\n\n impl<'de, #constrained> serde::de::Visitor<'de> for #visitor < #unconstrained > {\n", "file_path": "alacritty_config_derive/src/de_struct.rs", "rank": 0, "score": 176763.3933975818 }, { "content": "/// Parse a color specifier from list of attributes.\n\nfn parse_sgr_color(params: &mut dyn Iterator<Item = u16>) -> Option<Color> {\n\n match params.next() {\n\n Some(2) => Some(Color::Spec(Rgb {\n\n r: u8::try_from(params.next()?).ok()?,\n\n g: u8::try_from(params.next()?).ok()?,\n\n b: u8::try_from(params.next()?).ok()?,\n\n })),\n\n Some(5) => Some(Color::Indexed(u8::try_from(params.next()?).ok()?)),\n\n _ => None,\n\n }\n\n}\n\n\n\n/// C0 set of 7-bit control characters (from ANSI X3.4-1977).\n\n#[allow(non_snake_case)]\n\npub mod C0 {\n\n /// Null filler, terminal should ignore this character.\n\n pub const NUL: u8 = 0x00;\n\n /// Start of Header.\n\n pub const SOH: u8 = 0x01;\n\n /// Start of Text, implied end of header.\n", "file_path": "alacritty_terminal/src/ansi.rs", "rank": 1, "score": 163183.22765541886 }, { "content": "/// Get the length of occupied cells in a line.\n\npub trait LineLength {\n\n /// Calculate the occupied line length.\n\n fn line_length(&self) -> Column;\n\n}\n\n\n\nimpl LineLength for grid::Row<Cell> {\n\n fn line_length(&self) -> Column {\n\n let mut length = Column(0);\n\n\n\n if self[Column(self.len() - 1)].flags.contains(Flags::WRAPLINE) {\n\n return Column(self.len());\n\n }\n\n\n\n for (index, cell) in self[..].iter().rev().enumerate() {\n\n if cell.c != ' '\n\n || cell.extra.as_ref().map(|extra| extra.zerowidth.is_empty()) == Some(false)\n\n {\n\n length = Column(self.len() - index);\n\n break;\n\n }\n", "file_path": "alacritty_terminal/src/term/cell.rs", "rank": 2, "score": 160521.32505113145 }, { "content": "#[allow(clippy::many_single_char_names)]\n\nfn hls_to_rgb(h: u16, l: u16, s: u16) -> (u16, u16, u16) {\n\n if s == 0 {\n\n return (l, l, l);\n\n }\n\n\n\n let hs = ((h + 240) / 60) % 6;\n\n let lv = l as f64 / 100.0;\n\n\n\n let c2 = f64::abs((2.0 * lv as f64) - 1.0);\n\n let c = (1.0 - c2) * (s as f64 / 100.0);\n\n let x = if hs & 1 == 1 { c } else { 0.0 };\n\n\n\n let rgb = match hs {\n\n 0 => (c, x, 0.),\n\n 1 => (x, c, 0.),\n\n 2 => (0., c, x),\n\n 3 => (0., x, c),\n\n 4 => (x, 0., c),\n\n _ => (c, 0., c),\n\n };\n", "file_path": "alacritty_terminal/src/graphics/sixel.rs", "rank": 3, "score": 158833.08643169687 }, { "content": "#[allow(clippy::all)]\n\nfn cell(c: char) -> Cell {\n\n let mut cell = Cell::default();\n\n cell.c = c;\n\n cell\n\n}\n\n\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 4, "score": 156098.55137580683 }, { "content": "#[test]\n\nfn shrink_reflow_empty_cell_inside_line() {\n\n let mut grid = Grid::<Cell>::new(1, 5, 3);\n\n grid[Line(0)][Column(0)] = cell('1');\n\n grid[Line(0)][Column(1)] = Cell::default();\n\n grid[Line(0)][Column(2)] = cell('3');\n\n grid[Line(0)][Column(3)] = cell('4');\n\n grid[Line(0)][Column(4)] = Cell::default();\n\n\n\n grid.resize(true, 1, 2);\n\n\n\n assert_eq!(grid.total_lines(), 2);\n\n\n\n assert_eq!(grid[Line(-1)].len(), 2);\n\n assert_eq!(grid[Line(-1)][Column(0)], cell('1'));\n\n assert_eq!(grid[Line(-1)][Column(1)], wrap_cell(' '));\n\n\n\n assert_eq!(grid[Line(0)].len(), 2);\n\n assert_eq!(grid[Line(0)][Column(0)], cell('3'));\n\n assert_eq!(grid[Line(0)][Column(1)], cell('4'));\n\n\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 5, "score": 153874.81797304307 }, { "content": "/// Converts the string slice into a Windows-standard representation for \"W\"-\n\n/// suffixed function variants, which accept UTF-16 encoded string values.\n\npub fn win32_string<S: AsRef<OsStr> + ?Sized>(value: &S) -> Vec<u16> {\n\n OsStr::new(value).encode_wide().chain(once(0)).collect()\n\n}\n", "file_path": "alacritty_terminal/src/tty/windows/mod.rs", "rank": 6, "score": 153457.09750734002 }, { "content": "pub fn initialize(\n\n options: &Options,\n\n event_proxy: EventLoopProxy<Event>,\n\n) -> Result<Option<PathBuf>, log::SetLoggerError> {\n\n log::set_max_level(options.log_level());\n\n\n\n let logger = Logger::new(event_proxy);\n\n let path = logger.file_path();\n\n log::set_boxed_logger(Box::new(logger))?;\n\n\n\n Ok(path)\n\n}\n\n\n\npub struct Logger {\n\n logfile: Mutex<OnDemandLogFile>,\n\n stdout: Mutex<LineWriter<Stdout>>,\n\n event_proxy: Mutex<EventLoopProxy<Event>>,\n\n start: Instant,\n\n}\n\n\n", "file_path": "alacritty/src/logging.rs", "rank": 7, "score": 150928.94135228737 }, { "content": "// Install a panic handler that renders the panic in a classical Windows error\n\n// dialog box as well as writes the panic to STDERR.\n\npub fn attach_handler() {\n\n panic::set_hook(Box::new(|panic_info| {\n\n let _ = writeln!(io::stderr(), \"{}\", panic_info);\n\n let msg = format!(\"{}\\n\\nPress Ctrl-C to Copy\", panic_info);\n\n unsafe {\n\n winuser::MessageBoxW(\n\n ptr::null_mut(),\n\n win32_string(&msg).as_ptr(),\n\n win32_string(\"Alacritty: Runtime Error\").as_ptr(),\n\n winuser::MB_ICONERROR\n\n | winuser::MB_OK\n\n | winuser::MB_SETFOREGROUND\n\n | winuser::MB_TASKMODAL,\n\n );\n\n }\n\n }));\n\n}\n", "file_path": "alacritty/src/panic.rs", "rank": 8, "score": 148716.85325673147 }, { "content": "#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]\n\nstruct ColorRegister(u16);\n\n\n\n/// Number of color registers.\n\npub const MAX_COLOR_REGISTERS: usize = 1024;\n\n\n\n/// Color register for transparent pixels.\n\nconst REG_TRANSPARENT: ColorRegister = ColorRegister(u16::MAX);\n\n\n\n/// Number of parameters allowed in a single Sixel command.\n\nconst MAX_COMMAND_PARAMS: usize = 5;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n /// Image dimensions are too big.\n\n TooBigImage { width: usize, height: usize },\n\n\n\n /// A component in a color introducer is not valid.\n\n InvalidColorComponent { register: u16, component_value: u16 },\n\n\n\n /// The coordinate system to define the color register is not valid.\n", "file_path": "alacritty_terminal/src/graphics/sixel.rs", "rank": 9, "score": 147247.77647077036 }, { "content": "#[cfg(not(windows))]\n\npub fn foreground_process_path(\n\n master_fd: RawFd,\n\n shell_pid: u32,\n\n) -> Result<PathBuf, Box<dyn Error>> {\n\n let mut pid = unsafe { libc::tcgetpgrp(master_fd) };\n\n if pid < 0 {\n\n pid = shell_pid as pid_t;\n\n }\n\n\n\n #[cfg(not(any(target_os = \"macos\", target_os = \"freebsd\")))]\n\n let link_path = format!(\"/proc/{}/cwd\", pid);\n\n #[cfg(target_os = \"freebsd\")]\n\n let link_path = format!(\"/compat/linux/proc/{}/cwd\", pid);\n\n\n\n #[cfg(not(target_os = \"macos\"))]\n\n let cwd = fs::read_link(link_path)?;\n\n\n\n #[cfg(target_os = \"macos\")]\n\n let cwd = macos::proc::cwd(pid)?;\n\n\n\n Ok(cwd)\n\n}\n", "file_path": "alacritty/src/daemon.rs", "rank": 10, "score": 146607.46598626208 }, { "content": "/// Returns the rasterized glyph if the character is part of the built-in font.\n\npub fn builtin_glyph(\n\n character: char,\n\n metrics: &Metrics,\n\n offset: &Delta<i8>,\n\n glyph_offset: &Delta<i8>,\n\n) -> Option<RasterizedGlyph> {\n\n let mut glyph = match character {\n\n // Box drawing characters and block elements.\n\n '\\u{2500}'..='\\u{259f}' => box_drawing(character, metrics, offset),\n\n _ => return None,\n\n };\n\n\n\n // Since we want to ignore `glyph_offset` for the built-in font, subtract it to compensate its\n\n // addition when loading glyphs in the renderer.\n\n glyph.left -= glyph_offset.x as i32;\n\n glyph.top -= glyph_offset.y as i32;\n\n\n\n Some(glyph)\n\n}\n\n\n", "file_path": "alacritty/src/renderer/builtin_font.rs", "rank": 11, "score": 144599.0916073835 }, { "content": "pub fn set_locale_environment() {\n\n let env_locale_c = CString::new(\"\").unwrap();\n\n let env_locale_ptr = unsafe { setlocale(LC_ALL, env_locale_c.as_ptr()) };\n\n if !env_locale_ptr.is_null() {\n\n let env_locale = unsafe { CStr::from_ptr(env_locale_ptr).to_string_lossy() };\n\n\n\n // Assume `C` locale means unchanged, since it is the default anyways.\n\n if env_locale != \"C\" {\n\n debug!(\"Using environment locale: {}\", env_locale);\n\n return;\n\n }\n\n }\n\n\n\n let system_locale = system_locale();\n\n\n\n // Set locale to system locale.\n\n let system_locale_c = CString::new(system_locale.clone()).expect(\"nul byte in system locale\");\n\n let lc_all = unsafe { setlocale(LC_ALL, system_locale_c.as_ptr()) };\n\n\n\n // Check if system locale was valid or not.\n", "file_path": "alacritty/src/macos/locale.rs", "rank": 12, "score": 144593.78964954644 }, { "content": "fn wrap_cell(c: char) -> Cell {\n\n let mut cell = cell(c);\n\n cell.flags.insert(Flags::WRAPLINE);\n\n cell\n\n}\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 13, "score": 144320.72727268437 }, { "content": "/// Check if there is a hint highlighted at the specified point.\n\npub fn highlighted_at<T>(\n\n term: &Term<T>,\n\n config: &UiConfig,\n\n point: Point,\n\n mouse_mods: ModifiersState,\n\n) -> Option<HintMatch> {\n\n let mouse_mode = term.mode().intersects(TermMode::MOUSE_MODE);\n\n\n\n config.hints.enabled.iter().find_map(|hint| {\n\n // Check if all required modifiers are pressed.\n\n let highlight = hint.mouse.map_or(false, |mouse| {\n\n mouse.enabled\n\n && mouse_mods.contains(mouse.mods.0)\n\n && (!mouse_mode || mouse_mods.contains(ModifiersState::SHIFT))\n\n });\n\n if !highlight {\n\n return None;\n\n }\n\n\n\n hint.regex.with_compiled(|regex| {\n", "file_path": "alacritty/src/display/hint.rs", "rank": 14, "score": 140929.48152762375 }, { "content": "#[cfg(not(windows))]\n\npub fn spawn_daemon<I, S>(\n\n program: &str,\n\n args: I,\n\n master_fd: RawFd,\n\n shell_pid: u32,\n\n) -> io::Result<()>\n\nwhere\n\n I: IntoIterator<Item = S> + Copy,\n\n S: AsRef<OsStr>,\n\n{\n\n let mut command = Command::new(program);\n\n command.args(args).stdin(Stdio::null()).stdout(Stdio::null()).stderr(Stdio::null());\n\n if let Ok(cwd) = foreground_process_path(master_fd, shell_pid) {\n\n command.current_dir(cwd);\n\n }\n\n unsafe {\n\n command\n\n .pre_exec(|| {\n\n match libc::fork() {\n\n -1 => return Err(io::Error::last_os_error()),\n", "file_path": "alacritty/src/daemon.rs", "rank": 15, "score": 136266.49846639915 }, { "content": "/// Find last non-empty cell in line.\n\nfn last_occupied_in_line<T>(term: &Term<T>, line: Line) -> Option<Point> {\n\n (0..term.columns())\n\n .map(|col| Point::new(line, Column(col)))\n\n .rfind(|&point| !is_space(term, point))\n\n}\n\n\n", "file_path": "alacritty_terminal/src/vi_mode.rs", "rank": 16, "score": 134518.09029123338 }, { "content": "/// Find first non-empty cell in line.\n\nfn first_occupied_in_line<T>(term: &Term<T>, line: Line) -> Option<Point> {\n\n (0..term.columns())\n\n .map(|col| Point::new(line, Column(col)))\n\n .find(|&point| !is_space(term, point))\n\n}\n\n\n", "file_path": "alacritty_terminal/src/vi_mode.rs", "rank": 17, "score": 134518.09029123338 }, { "content": "#[derive(Serialize, Deserialize, Default, Debug, Clone, Eq, PartialEq)]\n\nstruct CellExtra {\n\n zerowidth: Vec<char>,\n\n\n\n #[serde(skip)]\n\n graphic: Option<Box<GraphicCell>>,\n\n}\n\n\n\n/// Content and attributes of a single cell in the terminal grid.\n\n#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]\n\npub struct Cell {\n\n pub c: char,\n\n pub fg: Color,\n\n pub bg: Color,\n\n pub flags: Flags,\n\n #[serde(default)]\n\n extra: Option<Box<CellExtra>>,\n\n}\n\n\n\nimpl Default for Cell {\n\n #[inline]\n", "file_path": "alacritty_terminal/src/term/cell.rs", "rank": 18, "score": 133382.84682963826 }, { "content": "/// Setup environment variables.\n\npub fn setup_env(config: &Config) {\n\n // Default to 'alacritty' terminfo if it is available, otherwise\n\n // default to 'xterm-256color'. May be overridden by user's config\n\n // below.\n\n let terminfo = if terminfo_exists(\"alacritty\") { \"alacritty\" } else { \"xterm-256color\" };\n\n env::set_var(\"TERM\", terminfo);\n\n\n\n // Advertise 24-bit color support.\n\n env::set_var(\"COLORTERM\", \"truecolor\");\n\n\n\n // Prevent child processes from inheriting startup notification env.\n\n env::remove_var(\"DESKTOP_STARTUP_ID\");\n\n\n\n // Set env vars from config.\n\n for (key, value) in config.env.iter() {\n\n env::set_var(key, value);\n\n }\n\n}\n\n\n", "file_path": "alacritty_terminal/src/tty/mod.rs", "rank": 19, "score": 130487.67661160126 }, { "content": "pub fn default_key_bindings() -> Vec<KeyBinding> {\n\n let mut bindings = bindings!(\n\n KeyBinding;\n\n Copy; Action::Copy;\n\n Copy, +BindingMode::VI; Action::ClearSelection;\n\n Paste, ~BindingMode::VI; Action::Paste;\n\n L, ModifiersState::CTRL; Action::ClearLogNotice;\n\n L, ModifiersState::CTRL, ~BindingMode::VI, ~BindingMode::SEARCH;\n\n Action::Esc(\"\\x0c\".into());\n\n Tab, ModifiersState::SHIFT, ~BindingMode::VI, ~BindingMode::SEARCH;\n\n Action::Esc(\"\\x1b[Z\".into());\n\n Back, ModifiersState::ALT, ~BindingMode::VI, ~BindingMode::SEARCH;\n\n Action::Esc(\"\\x1b\\x7f\".into());\n\n Back, ModifiersState::SHIFT, ~BindingMode::VI, ~BindingMode::SEARCH;\n\n Action::Esc(\"\\x7f\".into());\n\n Home, ModifiersState::SHIFT, ~BindingMode::ALT_SCREEN; Action::ScrollToTop;\n\n End, ModifiersState::SHIFT, ~BindingMode::ALT_SCREEN; Action::ScrollToBottom;\n\n PageUp, ModifiersState::SHIFT, ~BindingMode::ALT_SCREEN; Action::ScrollPageUp;\n\n PageDown, ModifiersState::SHIFT, ~BindingMode::ALT_SCREEN; Action::ScrollPageDown;\n\n Home, ModifiersState::SHIFT, +BindingMode::ALT_SCREEN,\n", "file_path": "alacritty/src/config/bindings.rs", "rank": 20, "score": 128725.06823461014 }, { "content": "#[cfg(test)]\n\npub fn platform_key_bindings() -> Vec<KeyBinding> {\n\n vec![]\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]\n\npub enum Key {\n\n Scancode(u32),\n\n Keycode(VirtualKeyCode),\n\n}\n\n\n\nimpl<'a> Deserialize<'a> for Key {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'a>,\n\n {\n\n let value = SerdeValue::deserialize(deserializer)?;\n\n match u32::deserialize(value.clone()) {\n\n Ok(scancode) => Ok(Key::Scancode(scancode)),\n\n Err(_) => {\n\n let keycode = VirtualKeyCode::deserialize(value).map_err(D::Error::custom)?;\n", "file_path": "alacritty/src/config/bindings.rs", "rank": 21, "score": 128725.06823461014 }, { "content": "pub fn default_mouse_bindings() -> Vec<MouseBinding> {\n\n bindings!(\n\n MouseBinding;\n\n MouseButton::Right; MouseAction::ExpandSelection;\n\n MouseButton::Right, ModifiersState::CTRL; MouseAction::ExpandSelection;\n\n MouseButton::Middle, ~BindingMode::VI; Action::PasteSelection;\n\n )\n\n}\n\n\n", "file_path": "alacritty/src/config/bindings.rs", "rank": 22, "score": 128725.06823461014 }, { "content": "pub fn new(config: &PtyConfig, size: &SizeInfo) -> Option<Pty> {\n\n let mut pty_handle = 0 as HPCON;\n\n\n\n // Passing 0 as the size parameter allows the \"system default\" buffer\n\n // size to be used. There may be small performance and memory advantages\n\n // to be gained by tuning this in the future, but it's likely a reasonable\n\n // start point.\n\n let (conout, conout_pty_handle) = miow::pipe::anonymous(0).unwrap();\n\n let (conin_pty_handle, conin) = miow::pipe::anonymous(0).unwrap();\n\n\n\n let coord =\n\n coord_from_sizeinfo(size).expect(\"Overflow when creating initial size on pseudoconsole\");\n\n\n\n // Create the Pseudo Console, using the pipes.\n\n let result = unsafe {\n\n CreatePseudoConsole(\n\n coord,\n\n conin_pty_handle.into_raw_handle(),\n\n conout_pty_handle.into_raw_handle(),\n\n 0,\n", "file_path": "alacritty_terminal/src/tty/windows/conpty.rs", "rank": 23, "score": 126767.08866666342 }, { "content": "/// Load the configuration file.\n\npub fn load(options: &Options) -> UiConfig {\n\n let config_options = options.config_options.clone();\n\n let config_path = options.config_file.clone().or_else(installed_config);\n\n\n\n // Load the config using the following fallback behavior:\n\n // - Config path + CLI overrides\n\n // - CLI overrides\n\n // - Default\n\n let mut config = config_path\n\n .as_ref()\n\n .and_then(|config_path| load_from(config_path, config_options.clone()).ok())\n\n .unwrap_or_else(|| {\n\n let mut config = UiConfig::deserialize(config_options).unwrap_or_default();\n\n match config_path {\n\n Some(config_path) => config.config_paths.push(config_path),\n\n None => info!(target: LOG_TARGET_CONFIG, \"No config file found; using default\"),\n\n }\n\n config\n\n });\n\n\n\n after_loading(&mut config, options);\n\n\n\n config\n\n}\n\n\n", "file_path": "alacritty/src/config/mod.rs", "rank": 24, "score": 126589.86139135664 }, { "content": "/// Helper to build a COORD from a SizeInfo, returning None in overflow cases.\n\nfn coord_from_sizeinfo(size: &SizeInfo) -> Option<COORD> {\n\n let lines = size.screen_lines();\n\n let columns = size.columns();\n\n\n\n if columns <= i16::MAX as usize && lines <= i16::MAX as usize {\n\n Some(COORD { X: columns as i16, Y: lines as i16 })\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "alacritty_terminal/src/tty/windows/conpty.rs", "rank": 25, "score": 122198.97475342007 }, { "content": "#[proc_macro_derive(ConfigDeserialize, attributes(config))]\n\npub fn derive_config_deserialize(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n match input.data {\n\n Data::Struct(DataStruct { fields: Fields::Named(fields), .. }) => {\n\n de_struct::derive_deserialize(input.ident, input.generics, fields.named)\n\n },\n\n Data::Enum(data_enum) => de_enum::derive_deserialize(input.ident, data_enum),\n\n _ => Error::new(input.ident.span(), UNSUPPORTED_ERROR).to_compile_error().into(),\n\n }\n\n}\n\n\n\n/// Verify that a token path ends with a specific segment.\n\npub(crate) fn path_ends_with(path: &Path, segment: &str) -> bool {\n\n let segments = path.segments.iter();\n\n segments.last().map_or(false, |s| s.ident == segment)\n\n}\n", "file_path": "alacritty_config_derive/src/lib.rs", "rank": 26, "score": 119961.79253352123 }, { "content": "/// Merge two serde structures.\n\n///\n\n/// This will take all values from `replacement` and use `base` whenever a value isn't present in\n\n/// `replacement`.\n\npub fn merge(base: Value, replacement: Value) -> Value {\n\n match (base, replacement) {\n\n (Value::Sequence(mut base), Value::Sequence(mut replacement)) => {\n\n base.append(&mut replacement);\n\n Value::Sequence(base)\n\n },\n\n (Value::Mapping(base), Value::Mapping(replacement)) => {\n\n Value::Mapping(merge_mapping(base, replacement))\n\n },\n\n (value, Value::Null) => value,\n\n (_, value) => value,\n\n }\n\n}\n\n\n", "file_path": "alacritty/src/config/serde_utils.rs", "rank": 27, "score": 116995.55211279354 }, { "content": "pub fn cwd(pid: c_int) -> Result<PathBuf, Error> {\n\n let mut info = MaybeUninit::<sys::proc_vnodepathinfo>::uninit();\n\n let info_ptr = info.as_mut_ptr() as *mut c_void;\n\n let size = mem::size_of::<sys::proc_vnodepathinfo>() as c_int;\n\n\n\n let c_str = unsafe {\n\n let pidinfo_size = sys::proc_pidinfo(pid, sys::PROC_PIDVNODEPATHINFO, 0, info_ptr, size);\n\n match pidinfo_size {\n\n c if c < 0 => return Err(io::Error::last_os_error().into()),\n\n s if s != size => return Err(Error::InvalidSize),\n\n _ => CStr::from_ptr(info.assume_init().pvi_cdir.vip_path.as_ptr()),\n\n }\n\n };\n\n\n\n Ok(CString::from(c_str).into_string().map(PathBuf::from)?)\n\n}\n\n\n\n/// Bindings for libproc.\n\n#[allow(non_camel_case_types)]\n\nmod sys {\n", "file_path": "alacritty/src/macos/proc.rs", "rank": 28, "score": 116990.67851640368 }, { "content": "/// Create a new TTY and return a handle to interact with it.\n\npub fn new(config: &PtyConfig, size: &SizeInfo, window_id: Option<usize>) -> Result<Pty> {\n\n let (master, slave) = make_pty(size.to_winsize());\n\n\n\n #[cfg(any(target_os = \"linux\", target_os = \"macos\"))]\n\n if let Ok(mut termios) = termios::tcgetattr(master) {\n\n // Set character encoding to UTF-8.\n\n termios.input_flags.set(InputFlags::IUTF8, true);\n\n let _ = termios::tcsetattr(master, SetArg::TCSANOW, &termios);\n\n }\n\n\n\n let mut buf = [0; 1024];\n\n let pw = get_pw_entry(&mut buf);\n\n\n\n let shell = match config.shell.as_ref() {\n\n Some(shell) => Cow::Borrowed(shell),\n\n None => Cow::Owned(default_shell(&pw)),\n\n };\n\n\n\n let mut builder = Command::new(shell.program());\n\n for arg in shell.args() {\n", "file_path": "alacritty_terminal/src/tty/unix.rs", "rank": 29, "score": 113974.21874317012 }, { "content": "pub fn new(config: &PtyConfig, size: &SizeInfo, _window_id: Option<usize>) -> Result<Pty> {\n\n conpty::new(config, size).ok_or_else(|| Error::new(ErrorKind::Other, \"failed to spawn conpty\"))\n\n}\n\n\n\nimpl Pty {\n\n fn new(\n\n backend: impl Into<Backend>,\n\n conout: impl Into<ReadPipe>,\n\n conin: impl Into<WritePipe>,\n\n child_watcher: ChildExitWatcher,\n\n ) -> Self {\n\n Self {\n\n backend: backend.into(),\n\n conout: conout.into(),\n\n conin: conin.into(),\n\n read_token: 0.into(),\n\n write_token: 0.into(),\n\n child_event_token: 0.into(),\n\n child_watcher,\n\n }\n", "file_path": "alacritty_terminal/src/tty/windows/mod.rs", "rank": 30, "score": 112730.93870255782 }, { "content": "/// Trait for determining if a reset should be performed.\n\npub trait ResetDiscriminant<T> {\n\n /// Value based on which equality for the reset will be determined.\n\n fn discriminant(&self) -> T;\n\n}\n\n\n\nimpl<T: Copy> ResetDiscriminant<T> for T {\n\n fn discriminant(&self) -> T {\n\n *self\n\n }\n\n}\n\n\n\nimpl ResetDiscriminant<Color> for Cell {\n\n fn discriminant(&self) -> Color {\n\n self.bg\n\n }\n\n}\n\n\n\n/// Dynamically allocated cell content.\n\n///\n\n/// This storage is reserved for cell attributes which are rarely set. This allows reducing the\n\n/// allocation required ahead of time for every cell, with some additional overhead when the extra\n\n/// storage is actually required.\n", "file_path": "alacritty_terminal/src/term/cell.rs", "rank": 31, "score": 111974.61360704279 }, { "content": "pub trait GridCell: Sized {\n\n /// Check if the cell contains any content.\n\n fn is_empty(&self) -> bool;\n\n\n\n /// Perform an opinionated cell reset based on a template cell.\n\n fn reset(&mut self, template: &Self);\n\n\n\n fn flags(&self) -> &Flags;\n\n fn flags_mut(&mut self) -> &mut Flags;\n\n}\n\n\n\n#[derive(Debug, Default, Clone, PartialEq, Eq)]\n\npub struct Cursor<T> {\n\n /// The location of this cursor.\n\n pub point: Point,\n\n\n\n /// Template cell when using this cursor.\n\n pub template: T,\n\n\n\n /// Currently configured graphic character sets.\n", "file_path": "alacritty_terminal/src/grid/mod.rs", "rank": 32, "score": 111974.61360704279 }, { "content": "#[inline]\n\npub fn viewport_to_point(display_offset: usize, point: Point<usize>) -> Point {\n\n let line = Line(point.line as i32) - display_offset;\n\n Point::new(line, point.column)\n\n}\n\n\n\n/// Calculate the cell dimensions based on font metrics.\n\n///\n\n/// This will return a tuple of the cell width and height.\n", "file_path": "alacritty/src/display/mod.rs", "rank": 33, "score": 111353.45275966905 }, { "content": "/// Attempt to reload the configuration file.\n\npub fn reload(config_path: &Path, options: &Options) -> Result<UiConfig> {\n\n // Load config, propagating errors.\n\n let config_options = options.config_options.clone();\n\n let mut config = load_from(config_path, config_options)?;\n\n\n\n after_loading(&mut config, options);\n\n\n\n Ok(config)\n\n}\n\n\n", "file_path": "alacritty/src/config/mod.rs", "rank": 34, "score": 111347.974094768 }, { "content": "#[cfg(windows)]\n\npub fn spawn_daemon<I, S>(program: &str, args: I) -> io::Result<()>\n\nwhere\n\n I: IntoIterator<Item = S> + Copy,\n\n S: AsRef<OsStr>,\n\n{\n\n // Setting all the I/O handles to null and setting the\n\n // CREATE_NEW_PROCESS_GROUP and CREATE_NO_WINDOW has the effect\n\n // that console applications will run without opening a new\n\n // console window.\n\n Command::new(program)\n\n .args(args)\n\n .stdin(Stdio::null())\n\n .stdout(Stdio::null())\n\n .stderr(Stdio::null())\n\n .creation_flags(CREATE_NEW_PROCESS_GROUP | CREATE_NO_WINDOW)\n\n .spawn()\n\n .map(|_| ())\n\n}\n\n\n\n/// Start a new process in the background.\n", "file_path": "alacritty/src/daemon.rs", "rank": 35, "score": 110452.10036120069 }, { "content": "/// Send a message to the active Alacritty socket.\n\npub fn send_message(socket: Option<PathBuf>, message: SocketMessage) -> IoResult<()> {\n\n let mut socket = find_socket(socket)?;\n\n\n\n let message = serde_json::to_string(&message)?;\n\n socket.write_all(message[..].as_bytes())?;\n\n let _ = socket.flush();\n\n\n\n Ok(())\n\n}\n\n\n\n/// Directory for the IPC socket file.\n", "file_path": "alacritty/src/ipc.rs", "rank": 36, "score": 109853.16054167066 }, { "content": "pub fn derive_deserialize(ident: Ident, data_enum: DataEnum) -> TokenStream {\n\n let visitor = format_ident!(\"{}Visitor\", ident);\n\n\n\n // Create match arm streams and get a list with all available values.\n\n let mut match_arms_stream = TokenStream2::new();\n\n let mut available_values = String::from(\"one of \");\n\n for variant in data_enum.variants.iter().filter(|variant| {\n\n // Skip deserialization for `#[config(skip)]` fields.\n\n variant.attrs.iter().all(|attr| {\n\n !crate::path_ends_with(&attr.path, \"config\") || attr.tokens.to_string() != \"(skip)\"\n\n })\n\n }) {\n\n let variant_ident = &variant.ident;\n\n let variant_str = variant_ident.to_string();\n\n available_values = format!(\"{}`{}`, \", available_values, variant_str);\n\n\n\n let literal = variant_str.to_lowercase();\n\n\n\n match_arms_stream.extend(quote! {\n\n #literal => Ok(#ident :: #variant_ident),\n", "file_path": "alacritty_config_derive/src/de_enum.rs", "rank": 37, "score": 109499.0378968891 }, { "content": "#[inline]\n\npub fn point_to_viewport(display_offset: usize, point: Point) -> Option<Point<usize>> {\n\n let viewport_line = point.line.0 + display_offset as i32;\n\n usize::try_from(viewport_line).ok().map(|line| Point::new(line, point.column))\n\n}\n\n\n\n/// Convert a viewport relative point to a terminal point.\n", "file_path": "alacritty/src/display/mod.rs", "rank": 38, "score": 107696.84243072779 }, { "content": "pub fn watch(mut paths: Vec<PathBuf>, event_proxy: EventLoopProxy<Event>) {\n\n // Don't monitor config if there is no path to watch.\n\n if paths.is_empty() {\n\n return;\n\n }\n\n\n\n // Canonicalize paths, keeping the base paths for symlinks.\n\n for i in 0..paths.len() {\n\n if let Ok(canonical_path) = paths[i].canonicalize() {\n\n match paths[i].symlink_metadata() {\n\n Ok(metadata) if metadata.file_type().is_symlink() => paths.push(canonical_path),\n\n _ => paths[i] = canonical_path,\n\n }\n\n }\n\n }\n\n\n\n // The Duration argument is a debouncing period.\n\n let (tx, rx) = mpsc::channel();\n\n let mut watcher = match watcher(tx, DEBOUNCE_DELAY) {\n\n Ok(watcher) => watcher,\n", "file_path": "alacritty/src/config/monitor.rs", "rank": 39, "score": 104870.71122311024 }, { "content": "/// Create an IPC socket.\n\npub fn spawn_ipc_socket(options: &Options, event_proxy: EventLoopProxy<Event>) -> Option<PathBuf> {\n\n // Create the IPC socket and export its path as env variable if necessary.\n\n let socket_path = options.socket.clone().unwrap_or_else(|| {\n\n let mut path = socket_dir();\n\n path.push(format!(\"{}-{}.sock\", socket_prefix(), process::id()));\n\n path\n\n });\n\n env::set_var(ALACRITTY_SOCKET_ENV, socket_path.as_os_str());\n\n\n\n let listener = match UnixListener::bind(&socket_path) {\n\n Ok(listener) => listener,\n\n Err(err) => {\n\n warn!(\"Unable to create socket: {:?}\", err);\n\n return None;\n\n },\n\n };\n\n\n\n // Spawn a thread to listen on the IPC socket.\n\n thread::spawn_named(\"socket listener\", move || {\n\n let mut data = String::new();\n", "file_path": "alacritty/src/ipc.rs", "rank": 40, "score": 103538.62297688841 }, { "content": "/// Like `thread::spawn`, but with a `name` argument.\n\npub fn spawn_named<F, T, S>(name: S, f: F) -> JoinHandle<T>\n\nwhere\n\n F: FnOnce() -> T + Send + 'static,\n\n T: Send + 'static,\n\n S: Into<String>,\n\n{\n\n Builder::new().name(name.into()).spawn(f).expect(\"thread spawn works\")\n\n}\n", "file_path": "alacritty_terminal/src/thread.rs", "rank": 41, "score": 102643.50837354192 }, { "content": "/// Create the deserializers for match arms and flattened fields.\n\nfn fields_deserializer<T>(fields: &Punctuated<Field, T>) -> FieldStreams {\n\n let mut field_streams = FieldStreams::default();\n\n\n\n // Create the deserialization stream for each field.\n\n for field in fields.iter() {\n\n if let Err(err) = field_deserializer(&mut field_streams, field) {\n\n field_streams.flatten = err.to_compile_error();\n\n return field_streams;\n\n }\n\n }\n\n\n\n field_streams\n\n}\n\n\n", "file_path": "alacritty_config_derive/src/de_struct.rs", "rank": 42, "score": 93096.5515183445 }, { "content": "#[inline]\n\nfn compute_cell_size(config: &UiConfig, metrics: &crossfont::Metrics) -> (f32, f32) {\n\n let offset_x = f64::from(config.font.offset.x);\n\n let offset_y = f64::from(config.font.offset.y);\n\n (\n\n (metrics.average_advance + offset_x).floor().max(1.) as f32,\n\n (metrics.line_height + offset_y).floor().max(1.) as f32,\n\n )\n\n}\n\n\n", "file_path": "alacritty/src/display/mod.rs", "rank": 43, "score": 92021.44103302553 }, { "content": "/// Create the necessary generics annotations.\n\n///\n\n/// This will create three different token streams, which might look like this:\n\n/// - unconstrained: `T`\n\n/// - constrained: `T: Default + Deserialize<'de>`\n\n/// - phantoms: `T: PhantomData<T>,`\n\nfn generics_streams<T>(params: Punctuated<GenericParam, T>) -> GenericsStreams {\n\n let mut generics = GenericsStreams::default();\n\n\n\n for generic in params {\n\n // NOTE: Lifetimes and const params are not supported.\n\n if let GenericParam::Type(TypeParam { ident, .. }) = generic {\n\n generics.unconstrained.extend(quote!( #ident , ));\n\n generics.constrained.extend(quote! {\n\n #ident : Default + serde::Deserialize<'de> ,\n\n });\n\n generics.phantoms.extend(quote! {\n\n #ident : std::marker::PhantomData < #ident >,\n\n });\n\n }\n\n }\n\n\n\n generics\n\n}\n", "file_path": "alacritty_config_derive/src/de_struct.rs", "rank": 44, "score": 91659.53731340518 }, { "content": "/// Append a single field deserializer to the stream.\n\nfn field_deserializer(field_streams: &mut FieldStreams, field: &Field) -> Result<(), Error> {\n\n let ident = field.ident.as_ref().expect(\"unreachable tuple struct\");\n\n let literal = ident.to_string();\n\n let mut literals = vec![literal.clone()];\n\n\n\n // Create default stream for deserializing fields.\n\n let mut match_assignment_stream = quote! {\n\n match serde::Deserialize::deserialize(value) {\n\n Ok(value) => config.#ident = value,\n\n Err(err) => {\n\n log::error!(target: #LOG_TARGET, \"Config error: {}: {}\", #literal, err);\n\n },\n\n }\n\n };\n\n\n\n // Iterate over all #[config(...)] attributes.\n\n for attr in field.attrs.iter().filter(|attr| crate::path_ends_with(&attr.path, \"config\")) {\n\n let parsed = match attr.parse_args::<Attr>() {\n\n Ok(parsed) => parsed,\n\n Err(_) => continue,\n", "file_path": "alacritty_config_derive/src/de_struct.rs", "rank": 45, "score": 89074.4612599176 }, { "content": "/// Field attribute.\n\nstruct Attr {\n\n ident: String,\n\n param: Option<LitStr>,\n\n}\n\n\n\nimpl Parse for Attr {\n\n fn parse(input: ParseStream<'_>) -> parse::Result<Self> {\n\n let ident = input.parse::<Ident>()?.to_string();\n\n let param = input.parse::<Token![=]>().and_then(|_| input.parse()).ok();\n\n Ok(Self { ident, param })\n\n }\n\n}\n\n\n\n/// Storage for all necessary generics information.\n", "file_path": "alacritty_config_derive/src/de_struct.rs", "rank": 46, "score": 88692.87371026236 }, { "content": "#[derive(Default)]\n\nstruct GenericsStreams {\n\n unconstrained: TokenStream2,\n\n constrained: TokenStream2,\n\n phantoms: TokenStream2,\n\n}\n\n\n", "file_path": "alacritty_config_derive/src/de_struct.rs", "rank": 47, "score": 87905.53954707077 }, { "content": "#[derive(Default)]\n\nstruct FieldStreams {\n\n match_assignments: TokenStream2,\n\n flatten: TokenStream2,\n\n}\n\n\n", "file_path": "alacritty_config_derive/src/de_struct.rs", "rank": 48, "score": 87905.53954707077 }, { "content": "fn main() {\n\n let mut version = String::from(env!(\"CARGO_PKG_VERSION\"));\n\n if let Some(commit_hash) = commit_hash() {\n\n version = format!(\"{} ({})\", version, commit_hash);\n\n }\n\n println!(\"cargo:rustc-env=VERSION={}\", version);\n\n\n\n let dest = env::var(\"OUT_DIR\").unwrap();\n\n let mut file = File::create(&Path::new(&dest).join(\"gl_bindings.rs\")).unwrap();\n\n\n\n Registry::new(Api::Gl, (3, 3), Profile::Core, Fallbacks::All, [\"GL_ARB_blend_func_extended\"])\n\n .write_bindings(GlobalGenerator, &mut file)\n\n .unwrap();\n\n\n\n #[cfg(windows)]\n\n embed_resource::compile(\"./windows/windows.rc\");\n\n}\n\n\n", "file_path": "alacritty/build.rs", "rank": 49, "score": 80390.37393021921 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Mock;\n\n\n\nimpl EventListener for Mock {\n\n fn send_event(&self, _event: Event) {}\n\n}\n\n\n", "file_path": "alacritty_terminal/tests/ref.rs", "rank": 50, "score": 80027.07953859086 }, { "content": "/// Temporary files stored for Alacritty.\n\n///\n\n/// This stores temporary files to automate their destruction through its `Drop` implementation.\n\nstruct TemporaryFiles {\n\n #[cfg(unix)]\n\n socket_path: Option<PathBuf>,\n\n log_file: Option<PathBuf>,\n\n}\n\n\n\nimpl Drop for TemporaryFiles {\n\n fn drop(&mut self) {\n\n // Clean up the IPC socket file.\n\n #[cfg(unix)]\n\n if let Some(socket_path) = &self.socket_path {\n\n let _ = fs::remove_file(socket_path);\n\n }\n\n\n\n // Clean up logfile.\n\n if let Some(log_file) = &self.log_file {\n\n if fs::remove_file(log_file).is_ok() {\n\n let _ = writeln!(io::stdout(), \"Deleted log file at \\\"{}\\\"\", log_file.display());\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "alacritty/src/main.rs", "rank": 51, "score": 80027.07953859086 }, { "content": "#[repr(C)]\n\n#[derive(Debug, Clone, Copy)]\n\nstruct Vertex {\n\n // Normalized screen coordinates.\n\n x: f32,\n\n y: f32,\n\n\n\n // Color.\n\n r: u8,\n\n g: u8,\n\n b: u8,\n\n a: u8,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct RectRenderer {\n\n // GL buffer objects.\n\n vao: GLuint,\n\n vbo: GLuint,\n\n\n\n program: RectShaderProgram,\n\n\n", "file_path": "alacritty/src/renderer/rects.rs", "rank": 52, "score": 80027.07953859086 }, { "content": "#[derive(Debug, Copy, Clone, PartialEq)]\n\nstruct Anchor {\n\n point: Point,\n\n side: Side,\n\n}\n\n\n\nimpl Anchor {\n\n fn new(point: Point, side: Side) -> Anchor {\n\n Anchor { point, side }\n\n }\n\n}\n\n\n\n/// Represents a range of selected cells.\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\npub struct SelectionRange {\n\n /// Start point, top left of the selection.\n\n pub start: Point,\n\n /// End point, bottom right of the selection.\n\n pub end: Point,\n\n /// Whether this selection is a block selection.\n\n pub is_block: bool,\n", "file_path": "alacritty_terminal/src/selection.rs", "rank": 53, "score": 80027.07953859086 }, { "content": "#[derive(Debug)]\n\nstruct Atlas {\n\n /// Texture id for this atlas.\n\n id: GLuint,\n\n\n\n /// Width of atlas.\n\n width: i32,\n\n\n\n /// Height of atlas.\n\n height: i32,\n\n\n\n /// Left-most free pixel in a row.\n\n ///\n\n /// This is called the extent because it is the upper bound of used pixels\n\n /// in a row.\n\n row_extent: i32,\n\n\n\n /// Baseline for glyphs in the current row.\n\n row_baseline: i32,\n\n\n\n /// Tallest glyph in current row.\n\n ///\n\n /// This is used as the advance when end of row is reached.\n\n row_tallest: i32,\n\n}\n\n\n", "file_path": "alacritty/src/renderer/mod.rs", "rank": 54, "score": 80027.07953859086 }, { "content": "fn main() {\n\n #[cfg(windows)]\n\n panic::attach_handler();\n\n\n\n // When linked with the windows subsystem windows won't automatically attach\n\n // to the console of the parent process, so we do it explicitly. This fails\n\n // silently if the parent has no console.\n\n #[cfg(windows)]\n\n unsafe {\n\n AttachConsole(ATTACH_PARENT_PROCESS);\n\n }\n\n\n\n // Load command line options.\n\n let options = Options::new();\n\n\n\n #[cfg(unix)]\n\n let result = match options.subcommands {\n\n Some(Subcommands::Msg(options)) => msg(options),\n\n None => alacritty(options),\n\n };\n", "file_path": "alacritty/src/main.rs", "rank": 55, "score": 79173.97392426725 }, { "content": "/// Canvas which is used for simple line drawing operations.\n\n///\n\n/// The coordinate system is the following:\n\n///\n\n/// 0 x\n\n/// --------------→\n\n/// |\n\n/// |\n\n/// |\n\n/// |\n\n/// |\n\n/// |\n\n/// y↓\n\nstruct Canvas {\n\n /// Canvas width.\n\n width: usize,\n\n\n\n /// Canvas height.\n\n height: usize,\n\n\n\n /// Canvas buffer we draw on.\n\n buffer: Vec<Pixel>,\n\n}\n\n\n\nimpl Canvas {\n\n /// Builds new `Canvas` for line drawing with the given `width` and `height` with default color.\n\n fn new(width: usize, height: usize) -> Self {\n\n let buffer = vec![Pixel::default(); width * height];\n\n Self { width, height, buffer }\n\n }\n\n\n\n /// Vertical center of the `Canvas`.\n\n fn y_center(&self) -> f32 {\n", "file_path": "alacritty/src/renderer/builtin_font.rs", "rank": 56, "score": 78927.75550908002 }, { "content": "/// Helper type which tracks how much of a buffer has been written.\n\nstruct Writing {\n\n source: Cow<'static, [u8]>,\n\n written: usize,\n\n}\n\n\n\npub struct Notifier(pub Sender<Msg>);\n\n\n\nimpl event::Notify for Notifier {\n\n fn notify<B>(&self, bytes: B)\n\n where\n\n B: Into<Cow<'static, [u8]>>,\n\n {\n\n let bytes = bytes.into();\n\n // terminal hangs if we send 0 bytes through.\n\n if bytes.len() == 0 {\n\n return;\n\n }\n\n\n\n let _ = self.0.send(Msg::Input(bytes));\n\n }\n", "file_path": "alacritty_terminal/src/event_loop.rs", "rank": 57, "score": 78923.06386783917 }, { "content": "#[derive(ConfigDeserialize)]\n\nstruct Test {\n\n #[config(alias = \"noalias\")]\n\n #[config(deprecated = \"use field2 instead\")]\n\n field1: usize,\n\n #[config(deprecated = \"shouldn't be hit\")]\n\n field2: String,\n\n field3: Option<u8>,\n\n #[doc(hidden)]\n\n nesting: Test2<usize>,\n\n #[config(flatten)]\n\n flatten: Test3,\n\n enom_small: TestEnum,\n\n enom_big: TestEnum,\n\n #[config(deprecated)]\n\n enom_error: TestEnum,\n\n}\n\n\n\nimpl Default for Test {\n\n fn default() -> Self {\n\n Self {\n", "file_path": "alacritty_config_derive/tests/config.rs", "rank": 58, "score": 78917.77157019608 }, { "content": "#[derive(PartialEq, Eq)]\n\nstruct RawBinding {\n\n key: Option<Key>,\n\n mouse: Option<MouseButton>,\n\n mods: ModifiersState,\n\n mode: BindingMode,\n\n notmode: BindingMode,\n\n action: Action,\n\n}\n\n\n\nimpl RawBinding {\n\n fn into_mouse_binding(self) -> Result<MouseBinding, Self> {\n\n if let Some(mouse) = self.mouse {\n\n Ok(Binding {\n\n trigger: mouse,\n\n mods: self.mods,\n\n action: self.action,\n\n mode: self.mode,\n\n notmode: self.notmode,\n\n })\n\n } else {\n", "file_path": "alacritty/src/config/bindings.rs", "rank": 59, "score": 78917.77157019608 }, { "content": "#[derive(Default)]\n\nstruct Logger {\n\n error_logs: Arc<Mutex<Vec<String>>>,\n\n warn_logs: Arc<Mutex<Vec<String>>>,\n\n}\n\n\n\nimpl Log for Logger {\n\n fn log(&self, record: &Record<'_>) {\n\n assert_eq!(record.target(), env!(\"CARGO_PKG_NAME\"));\n\n\n\n match record.level() {\n\n Level::Error => {\n\n let mut error_logs = self.error_logs.lock().unwrap();\n\n error_logs.push(record.args().to_string());\n\n },\n\n Level::Warn => {\n\n let mut warn_logs = self.warn_logs.lock().unwrap();\n\n warn_logs.push(record.args().to_string());\n\n },\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n fn enabled(&self, _metadata: &Metadata<'_>) -> bool {\n\n true\n\n }\n\n\n\n fn flush(&self) {}\n\n}\n", "file_path": "alacritty_config_derive/tests/config.rs", "rank": 60, "score": 78917.77157019608 }, { "content": "struct OnDemandLogFile {\n\n file: Option<LineWriter<File>>,\n\n created: Arc<AtomicBool>,\n\n path: PathBuf,\n\n}\n\n\n\nimpl OnDemandLogFile {\n\n fn new() -> Self {\n\n let mut path = env::temp_dir();\n\n path.push(format!(\"Alacritty-{}.log\", process::id()));\n\n\n\n // Set log path as an environment variable.\n\n env::set_var(ALACRITTY_LOG_ENV, path.as_os_str());\n\n\n\n OnDemandLogFile { path, file: None, created: Arc::new(AtomicBool::new(false)) }\n\n }\n\n\n\n fn file(&mut self) -> Result<&mut LineWriter<File>, io::Error> {\n\n // Allow to recreate the file if it has been deleted at runtime.\n\n if self.file.is_some() && !self.path.as_path().exists() {\n", "file_path": "alacritty/src/logging.rs", "rank": 61, "score": 78917.77157019608 }, { "content": "#[derive(Deserialize, Default)]\n\nstruct RefConfig {\n\n history_size: u32,\n\n}\n\n\n", "file_path": "alacritty_terminal/tests/ref.rs", "rank": 62, "score": 78917.77157019608 }, { "content": "#[repr(packed)]\n\n#[derive(Clone, Copy, Debug, Default)]\n\nstruct Pixel {\n\n _r: u8,\n\n _g: u8,\n\n _b: u8,\n\n}\n\n\n\nimpl Pixel {\n\n fn gray(color: u8) -> Self {\n\n Self { _r: color, _g: color, _b: color }\n\n }\n\n}\n\n\n\nimpl ops::Add for Pixel {\n\n type Output = Pixel;\n\n\n\n fn add(self, rhs: Pixel) -> Self::Output {\n\n let _r = self._r.saturating_add(rhs._r);\n\n let _g = self._g.saturating_add(rhs._g);\n\n let _b = self._b.saturating_add(rhs._b);\n\n Pixel { _r, _g, _b }\n", "file_path": "alacritty/src/renderer/builtin_font.rs", "rank": 63, "score": 78917.77157019608 }, { "content": "#[derive(Debug)]\n\n#[repr(C)]\n\nstruct InstanceData {\n\n // Coords.\n\n col: u16,\n\n row: u16,\n\n\n\n // Glyph offset.\n\n left: i16,\n\n top: i16,\n\n\n\n // Glyph size.\n\n width: i16,\n\n height: i16,\n\n\n\n // UV offset.\n\n uv_left: f32,\n\n uv_bot: f32,\n\n\n\n // uv scale.\n\n uv_width: f32,\n\n uv_height: f32,\n", "file_path": "alacritty/src/renderer/mod.rs", "rank": 64, "score": 78917.77157019608 }, { "content": "#[derive(ConfigDeserialize, Default)]\n\nstruct Test3 {\n\n flatty: usize,\n\n}\n\n\n", "file_path": "alacritty_config_derive/tests/config.rs", "rank": 65, "score": 78917.77157019608 }, { "content": "#[derive(Debug)]\n\nstruct SyncState {\n\n /// Expiration time of the synchronized update.\n\n timeout: Option<Instant>,\n\n\n\n /// Sync DCS waiting for termination sequence.\n\n pending_dcs: Option<Dcs>,\n\n\n\n /// Bytes read during the synchronized update.\n\n buffer: Vec<u8>,\n\n}\n\n\n\nimpl Default for SyncState {\n\n fn default() -> Self {\n\n Self { buffer: Vec::with_capacity(SYNC_BUFFER_SIZE), pending_dcs: None, timeout: None }\n\n }\n\n}\n\n\n\n/// Pending DCS sequence.\n", "file_path": "alacritty_terminal/src/ansi.rs", "rank": 66, "score": 78917.77157019608 }, { "content": "/// Generator for creating new hint labels.\n\nstruct HintLabels {\n\n /// Full character set available.\n\n alphabet: Vec<char>,\n\n\n\n /// Alphabet indices for the next label.\n\n indices: Vec<usize>,\n\n\n\n /// Point separating the alphabet's head and tail characters.\n\n ///\n\n /// To make identification of the tail character easy, part of the alphabet cannot be used for\n\n /// any other position.\n\n ///\n\n /// All characters in the alphabet before this index will be used for the last character, while\n\n /// the rest will be used for everything else.\n\n split_point: usize,\n\n}\n\n\n\nimpl HintLabels {\n\n /// Create a new label generator.\n\n ///\n", "file_path": "alacritty/src/display/hint.rs", "rank": 67, "score": 78917.77157019608 }, { "content": "#[derive(Debug, Default)]\n\nstruct ProcessorState {\n\n /// Last processed character for repetition.\n\n preceding_char: Option<char>,\n\n\n\n /// DCS sequence waiting for termination.\n\n dcs: Option<Dcs>,\n\n\n\n /// State for synchronized terminal updates.\n\n sync_state: SyncState,\n\n}\n\n\n", "file_path": "alacritty_terminal/src/ansi.rs", "rank": 68, "score": 78917.77157019608 }, { "content": "#[derive(Debug)]\n\nstruct CommandParser {\n\n /// Active command.\n\n command: SixelCommand,\n\n\n\n /// Parameter values.\n\n ///\n\n /// If a value is greater than `u16::MAX`, it will be kept as `u16::MAX`.\n\n ///\n\n /// Parameters after `MAX_COMMAND_PARAMS` are ignored.\n\n params: [u16; MAX_COMMAND_PARAMS],\n\n\n\n /// Current position.\n\n params_position: usize,\n\n}\n\n\n\nimpl CommandParser {\n\n fn new(command: SixelCommand) -> CommandParser {\n\n CommandParser { command, params: [0; MAX_COMMAND_PARAMS], params_position: 0 }\n\n }\n\n\n", "file_path": "alacritty_terminal/src/graphics/sixel.rs", "rank": 69, "score": 77859.96554869853 }, { "content": "struct TabStops {\n\n tabs: Vec<bool>,\n\n}\n\n\n\nimpl TabStops {\n\n #[inline]\n\n fn new(columns: usize) -> TabStops {\n\n TabStops { tabs: (0..columns).map(|i| i % INITIAL_TABSTOPS == 0).collect() }\n\n }\n\n\n\n /// Remove all tabstops.\n\n #[inline]\n\n fn clear_all(&mut self) {\n\n unsafe {\n\n ptr::write_bytes(self.tabs.as_mut_ptr(), 0, self.tabs.len());\n\n }\n\n }\n\n\n\n /// Increase tabstop capacity.\n\n #[inline]\n", "file_path": "alacritty_terminal/src/term/mod.rs", "rank": 70, "score": 77859.96554869853 }, { "content": "#[inline]\n\nfn load_glyph(\n\n active_tex: &mut GLuint,\n\n atlas: &mut Vec<Atlas>,\n\n current_atlas: &mut usize,\n\n rasterized: &RasterizedGlyph,\n\n) -> Glyph {\n\n // At least one atlas is guaranteed to be in the `self.atlas` list; thus\n\n // the unwrap.\n\n match atlas[*current_atlas].insert(rasterized, active_tex) {\n\n Ok(glyph) => glyph,\n\n Err(AtlasInsertError::Full) => {\n\n *current_atlas += 1;\n\n if *current_atlas == atlas.len() {\n\n let new = Atlas::new(ATLAS_SIZE);\n\n *active_tex = 0; // Atlas::new binds a texture. Ugh this is sloppy.\n\n atlas.push(new);\n\n }\n\n load_glyph(active_tex, atlas, current_atlas, rasterized)\n\n },\n\n Err(AtlasInsertError::GlyphTooLarge) => Glyph {\n", "file_path": "alacritty/src/renderer/mod.rs", "rank": 72, "score": 76920.10068051465 }, { "content": "/// Calculate the size of the window given padding, terminal dimensions and cell size.\n\nfn window_size(\n\n config: &UiConfig,\n\n dimensions: Dimensions,\n\n cell_width: f32,\n\n cell_height: f32,\n\n scale_factor: f64,\n\n) -> PhysicalSize<u32> {\n\n let padding = config.window.padding(scale_factor);\n\n\n\n let grid_width = cell_width * dimensions.columns.0.max(MIN_COLUMNS) as f32;\n\n let grid_height = cell_height * dimensions.lines.max(MIN_SCREEN_LINES) as f32;\n\n\n\n let width = (padding.0).mul_add(2., grid_width).floor();\n\n let height = (padding.1).mul_add(2., grid_height).floor();\n\n\n\n PhysicalSize::new(width as u32, height as u32)\n\n}\n", "file_path": "alacritty/src/display/mod.rs", "rank": 73, "score": 76919.72526463849 }, { "content": "#[test]\n\nfn scroll_up() {\n\n let mut grid = Grid::<usize>::new(10, 1, 0);\n\n for i in 0..10 {\n\n grid[Line(i as i32)][Column(0)] = i;\n\n }\n\n\n\n grid.scroll_up::<usize>(&(Line(0)..Line(10)), 2);\n\n\n\n assert_eq!(grid[Line(0)][Column(0)], 2);\n\n assert_eq!(grid[Line(0)].occ, 1);\n\n assert_eq!(grid[Line(1)][Column(0)], 3);\n\n assert_eq!(grid[Line(1)].occ, 1);\n\n assert_eq!(grid[Line(2)][Column(0)], 4);\n\n assert_eq!(grid[Line(2)].occ, 1);\n\n assert_eq!(grid[Line(3)][Column(0)], 5);\n\n assert_eq!(grid[Line(3)].occ, 1);\n\n assert_eq!(grid[Line(4)][Column(0)], 6);\n\n assert_eq!(grid[Line(4)].occ, 1);\n\n assert_eq!(grid[Line(5)][Column(0)], 7);\n\n assert_eq!(grid[Line(5)].occ, 1);\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 74, "score": 76914.62201561358 }, { "content": "#[test]\n\nfn scroll_down() {\n\n let mut grid = Grid::<usize>::new(10, 1, 0);\n\n for i in 0..10 {\n\n grid[Line(i as i32)][Column(0)] = i;\n\n }\n\n\n\n grid.scroll_down::<usize>(&(Line(0)..Line(10)), 2);\n\n\n\n assert_eq!(grid[Line(0)][Column(0)], 0); // was 8.\n\n assert_eq!(grid[Line(0)].occ, 0);\n\n assert_eq!(grid[Line(1)][Column(0)], 0); // was 9.\n\n assert_eq!(grid[Line(1)].occ, 0);\n\n assert_eq!(grid[Line(2)][Column(0)], 0);\n\n assert_eq!(grid[Line(2)].occ, 1);\n\n assert_eq!(grid[Line(3)][Column(0)], 1);\n\n assert_eq!(grid[Line(3)].occ, 1);\n\n assert_eq!(grid[Line(4)][Column(0)], 2);\n\n assert_eq!(grid[Line(4)].occ, 1);\n\n assert_eq!(grid[Line(5)][Column(0)], 3);\n\n assert_eq!(grid[Line(5)].occ, 1);\n\n assert_eq!(grid[Line(6)][Column(0)], 4);\n\n assert_eq!(grid[Line(6)].occ, 1);\n\n assert_eq!(grid[Line(7)][Column(0)], 5);\n\n assert_eq!(grid[Line(7)].occ, 1);\n\n assert_eq!(grid[Line(8)][Column(0)], 6);\n\n assert_eq!(grid[Line(8)].occ, 1);\n\n assert_eq!(grid[Line(9)][Column(0)], 7);\n\n assert_eq!(grid[Line(9)].occ, 1);\n\n}\n\n\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 75, "score": 76914.62201561358 }, { "content": "/// Deserialize all configuration files as generic Value.\n\nfn parse_config(\n\n path: &Path,\n\n config_paths: &mut Vec<PathBuf>,\n\n recursion_limit: usize,\n\n) -> Result<Value> {\n\n config_paths.push(path.to_owned());\n\n\n\n let mut contents = fs::read_to_string(path)?;\n\n\n\n // Remove UTF-8 BOM.\n\n if contents.starts_with('\\u{FEFF}') {\n\n contents = contents.split_off(3);\n\n }\n\n\n\n // Load configuration file as Value.\n\n let config: Value = match serde_yaml::from_str(&contents) {\n\n Ok(config) => config,\n\n Err(error) => {\n\n // Prevent parsing error with an empty string and commented out file.\n\n if error.to_string() == \"EOF while parsing a value\" {\n", "file_path": "alacritty/src/config/mod.rs", "rank": 76, "score": 76914.62201561358 }, { "content": "/// State of the terminal damage.\n\nstruct TermDamageState {\n\n /// Hint whether terminal should be damaged entirely regardless of the actual damage changes.\n\n is_fully_damaged: bool,\n\n\n\n /// Information about damage on terminal lines.\n\n lines: Vec<LineDamageBounds>,\n\n\n\n /// Old terminal cursor point.\n\n last_cursor: Point,\n\n\n\n /// Old selection range.\n\n last_selection: Option<SelectionRange>,\n\n}\n\n\n\nimpl TermDamageState {\n\n fn new(num_cols: usize, num_lines: usize) -> Self {\n\n let lines =\n\n (0..num_lines).map(|line| LineDamageBounds::undamaged(line, num_cols)).collect();\n\n\n\n Self {\n", "file_path": "alacritty_terminal/src/term/mod.rs", "rank": 77, "score": 76850.15621489564 }, { "content": "#[derive(Default)]\n\nstruct Regex<'a> {\n\n /// All visible matches.\n\n matches: Cow<'a, RegexMatches>,\n\n\n\n /// Index of the last match checked.\n\n index: usize,\n\n}\n\n\n\nimpl<'a> Regex<'a> {\n\n /// Create a new renderable regex iterator.\n\n fn new<T>(term: &Term<T>, dfas: &RegexSearch) -> Self {\n\n let matches = Cow::Owned(RegexMatches::new(term, dfas));\n\n Self { index: 0, matches }\n\n }\n\n\n\n /// Advance the regex tracker to the next point.\n\n ///\n\n /// This will return `true` if the point passed is part of a regex match.\n\n fn advance(&mut self, point: Point) -> bool {\n\n while let Some(regex_match) = self.matches.get(self.index) {\n", "file_path": "alacritty/src/display/content.rs", "rank": 78, "score": 76397.47378433013 }, { "content": "/// Regex hints for keyboard shortcuts.\n\nstruct Hint<'a> {\n\n /// Hint matches and position.\n\n regex: Regex<'a>,\n\n\n\n /// Last match checked against current cell position.\n\n labels: &'a Vec<Vec<char>>,\n\n}\n\n\n\nimpl<'a> Hint<'a> {\n\n /// Advance the hint iterator.\n\n ///\n\n /// If the point is within a hint, the keyboard shortcut character that should be displayed at\n\n /// this position will be returned.\n\n ///\n\n /// The tuple's [`bool`] will be `true` when the character is the first for this hint.\n\n fn advance(&mut self, viewport_start: Point, point: Point) -> Option<(char, bool)> {\n\n // Check if we're within a match at all.\n\n if !self.regex.advance(point) {\n\n return None;\n\n }\n", "file_path": "alacritty/src/display/content.rs", "rank": 79, "score": 76397.47378433013 }, { "content": "#[test]\n\nfn scroll_down_with_history() {\n\n let mut grid = Grid::<usize>::new(10, 1, 1);\n\n grid.increase_scroll_limit(1);\n\n for i in 0..10 {\n\n grid[Line(i as i32)][Column(0)] = i;\n\n }\n\n\n\n grid.scroll_down::<usize>(&(Line(0)..Line(10)), 2);\n\n\n\n assert_eq!(grid[Line(0)][Column(0)], 0); // was 8.\n\n assert_eq!(grid[Line(0)].occ, 0);\n\n assert_eq!(grid[Line(1)][Column(0)], 0); // was 9.\n\n assert_eq!(grid[Line(1)].occ, 0);\n\n assert_eq!(grid[Line(2)][Column(0)], 0);\n\n assert_eq!(grid[Line(2)].occ, 1);\n\n assert_eq!(grid[Line(3)][Column(0)], 1);\n\n assert_eq!(grid[Line(3)].occ, 1);\n\n assert_eq!(grid[Line(4)][Column(0)], 2);\n\n assert_eq!(grid[Line(4)].occ, 1);\n\n assert_eq!(grid[Line(5)][Column(0)], 3);\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 80, "score": 75863.61742127207 }, { "content": "#[test]\n\nfn shrink_reflow() {\n\n let mut grid = Grid::<Cell>::new(1, 5, 2);\n\n grid[Line(0)][Column(0)] = cell('1');\n\n grid[Line(0)][Column(1)] = cell('2');\n\n grid[Line(0)][Column(2)] = cell('3');\n\n grid[Line(0)][Column(3)] = cell('4');\n\n grid[Line(0)][Column(4)] = cell('5');\n\n\n\n grid.resize(true, 1, 2);\n\n\n\n assert_eq!(grid.total_lines(), 3);\n\n\n\n assert_eq!(grid[Line(-2)].len(), 2);\n\n assert_eq!(grid[Line(-2)][Column(0)], cell('1'));\n\n assert_eq!(grid[Line(-2)][Column(1)], wrap_cell('2'));\n\n\n\n assert_eq!(grid[Line(-1)].len(), 2);\n\n assert_eq!(grid[Line(-1)][Column(0)], cell('3'));\n\n assert_eq!(grid[Line(-1)][Column(1)], wrap_cell('4'));\n\n\n\n assert_eq!(grid[Line(0)].len(), 2);\n\n assert_eq!(grid[Line(0)][Column(0)], cell('5'));\n\n assert_eq!(grid[Line(0)][Column(1)], Cell::default());\n\n}\n\n\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 81, "score": 75863.61742127207 }, { "content": "#[test]\n\nfn test_iter() {\n\n let assert_indexed = |value: usize, indexed: Option<Indexed<&usize>>| {\n\n assert_eq!(Some(&value), indexed.map(|indexed| indexed.cell));\n\n };\n\n\n\n let mut grid = Grid::<usize>::new(5, 5, 0);\n\n for i in 0..5 {\n\n for j in 0..5 {\n\n grid[Line(i)][Column(j)] = i as usize * 5 + j;\n\n }\n\n }\n\n\n\n let mut iter = grid.iter_from(Point::new(Line(0), Column(0)));\n\n\n\n assert_eq!(None, iter.prev());\n\n assert_indexed(1, iter.next());\n\n assert_eq!(Column(1), iter.point().column);\n\n assert_eq!(0, iter.point().line);\n\n\n\n assert_indexed(2, iter.next());\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 82, "score": 75863.61742127207 }, { "content": "#[test]\n\nfn grow_reflow() {\n\n let mut grid = Grid::<Cell>::new(2, 2, 0);\n\n grid[Line(0)][Column(0)] = cell('1');\n\n grid[Line(0)][Column(1)] = wrap_cell('2');\n\n grid[Line(1)][Column(0)] = cell('3');\n\n grid[Line(1)][Column(1)] = Cell::default();\n\n\n\n grid.resize(true, 2, 3);\n\n\n\n assert_eq!(grid.total_lines(), 2);\n\n\n\n assert_eq!(grid[Line(0)].len(), 3);\n\n assert_eq!(grid[Line(0)][Column(0)], cell('1'));\n\n assert_eq!(grid[Line(0)][Column(1)], cell('2'));\n\n assert_eq!(grid[Line(0)][Column(2)], cell('3'));\n\n\n\n // Make sure rest of grid is empty.\n\n assert_eq!(grid[Line(1)].len(), 3);\n\n assert_eq!(grid[Line(1)][Column(0)], Cell::default());\n\n assert_eq!(grid[Line(1)][Column(1)], Cell::default());\n\n assert_eq!(grid[Line(1)][Column(2)], Cell::default());\n\n}\n\n\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 83, "score": 75863.61742127207 }, { "content": "#[test]\n\nfn config_deserialize() {\n\n let logger = unsafe {\n\n LOGGER = Some(Logger::default());\n\n LOGGER.as_mut().unwrap()\n\n };\n\n\n\n log::set_logger(logger).unwrap();\n\n log::set_max_level(log::LevelFilter::Warn);\n\n\n\n let test: Test = serde_yaml::from_str(\n\n r#\"\n\n field1: 3\n\n field3: 32\n\n nesting:\n\n field1: \"testing\"\n\n field2: None\n\n field3: 99\n\n aliased: 8\n\n flatty: 123\n\n enom_small: \"one\"\n", "file_path": "alacritty_config_derive/tests/config.rs", "rank": 84, "score": 75863.61742127207 }, { "content": "#[derive(Debug)]\n\nstruct Passwd<'a> {\n\n name: &'a str,\n\n dir: &'a str,\n\n shell: &'a str,\n\n}\n\n\n", "file_path": "alacritty_terminal/src/tty/unix.rs", "rank": 85, "score": 75288.16581593535 }, { "content": "/// Type that handles actions from the parser.\n\n///\n\n/// XXX Should probably not provide default impls for everything, but it makes\n\n/// writing specific handler impls for tests far easier.\n\npub trait Handler {\n\n /// OSC to set window title.\n\n fn set_title(&mut self, _: Option<String>) {}\n\n\n\n /// Set the cursor style.\n\n fn set_cursor_style(&mut self, _: Option<CursorStyle>) {}\n\n\n\n /// Set the cursor shape.\n\n fn set_cursor_shape(&mut self, _shape: CursorShape) {}\n\n\n\n /// A character to be displayed.\n\n fn input(&mut self, _c: char) {}\n\n\n\n /// Set cursor to position.\n\n fn goto(&mut self, _: Line, _: Column) {}\n\n\n\n /// Set cursor to specific row.\n\n fn goto_line(&mut self, _: Line) {}\n\n\n\n /// Set cursor to specific column.\n", "file_path": "alacritty_terminal/src/ansi.rs", "rank": 86, "score": 75256.23737093323 }, { "content": "/// Types that are interested in when the display is resized.\n\npub trait OnResize {\n\n fn on_resize(&mut self, size: &SizeInfo);\n\n}\n\n\n", "file_path": "alacritty_terminal/src/event.rs", "rank": 87, "score": 75251.09118389252 }, { "content": "/// Trait for conversion into the iterator.\n\npub trait IntoRects {\n\n /// Consume the cursor for an iterator of rects.\n\n fn rects(self, size_info: &SizeInfo, thickness: f32) -> CursorRects;\n\n}\n\n\n\nimpl IntoRects for RenderableCursor {\n\n fn rects(self, size_info: &SizeInfo, thickness: f32) -> CursorRects {\n\n let point = self.point();\n\n let x = point.column.0 as f32 * size_info.cell_width() + size_info.padding_x();\n\n let y = point.line as f32 * size_info.cell_height() + size_info.padding_y();\n\n\n\n let mut width = size_info.cell_width();\n\n let height = size_info.cell_height();\n\n\n\n let thickness = (thickness * width as f32).round().max(1.);\n\n\n\n if self.is_wide() {\n\n width *= 2.;\n\n }\n\n\n", "file_path": "alacritty/src/display/cursor.rs", "rank": 88, "score": 75245.71039059662 }, { "content": "/// Byte sequences are sent to a `Notify` in response to some events.\n\npub trait Notify {\n\n /// Notify that an escape sequence should be written to the PTY.\n\n ///\n\n /// TODO this needs to be able to error somehow.\n\n fn notify<B: Into<Cow<'static, [u8]>>>(&self, _: B);\n\n}\n\n\n", "file_path": "alacritty_terminal/src/event.rs", "rank": 89, "score": 75245.71039059662 }, { "content": "#[test]\n\nfn grow_reflow_disabled() {\n\n let mut grid = Grid::<Cell>::new(2, 2, 0);\n\n grid[Line(0)][Column(0)] = cell('1');\n\n grid[Line(0)][Column(1)] = wrap_cell('2');\n\n grid[Line(1)][Column(0)] = cell('3');\n\n grid[Line(1)][Column(1)] = Cell::default();\n\n\n\n grid.resize(false, 2, 3);\n\n\n\n assert_eq!(grid.total_lines(), 2);\n\n\n\n assert_eq!(grid[Line(0)].len(), 3);\n\n assert_eq!(grid[Line(0)][Column(0)], cell('1'));\n\n assert_eq!(grid[Line(0)][Column(1)], wrap_cell('2'));\n\n assert_eq!(grid[Line(0)][Column(2)], Cell::default());\n\n\n\n assert_eq!(grid[Line(1)].len(), 3);\n\n assert_eq!(grid[Line(1)][Column(0)], cell('3'));\n\n assert_eq!(grid[Line(1)][Column(1)], Cell::default());\n\n assert_eq!(grid[Line(1)][Column(2)], Cell::default());\n\n}\n\n\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 90, "score": 74860.30090797294 }, { "content": "#[test]\n\nfn shrink_reflow_disabled() {\n\n let mut grid = Grid::<Cell>::new(1, 5, 2);\n\n grid[Line(0)][Column(0)] = cell('1');\n\n grid[Line(0)][Column(1)] = cell('2');\n\n grid[Line(0)][Column(2)] = cell('3');\n\n grid[Line(0)][Column(3)] = cell('4');\n\n grid[Line(0)][Column(4)] = cell('5');\n\n\n\n grid.resize(false, 1, 2);\n\n\n\n assert_eq!(grid.total_lines(), 1);\n\n\n\n assert_eq!(grid[Line(0)].len(), 2);\n\n assert_eq!(grid[Line(0)][Column(0)], cell('1'));\n\n assert_eq!(grid[Line(0)][Column(1)], cell('2'));\n\n}\n\n\n\n// https://github.com/rust-lang/rust-clippy/pull/6375\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 91, "score": 74860.30090797294 }, { "content": "#[test]\n\nfn shrink_reflow_twice() {\n\n let mut grid = Grid::<Cell>::new(1, 5, 2);\n\n grid[Line(0)][Column(0)] = cell('1');\n\n grid[Line(0)][Column(1)] = cell('2');\n\n grid[Line(0)][Column(2)] = cell('3');\n\n grid[Line(0)][Column(3)] = cell('4');\n\n grid[Line(0)][Column(4)] = cell('5');\n\n\n\n grid.resize(true, 1, 4);\n\n grid.resize(true, 1, 2);\n\n\n\n assert_eq!(grid.total_lines(), 3);\n\n\n\n assert_eq!(grid[Line(-2)].len(), 2);\n\n assert_eq!(grid[Line(-2)][Column(0)], cell('1'));\n\n assert_eq!(grid[Line(-2)][Column(1)], wrap_cell('2'));\n\n\n\n assert_eq!(grid[Line(-1)].len(), 2);\n\n assert_eq!(grid[Line(-1)][Column(0)], cell('3'));\n\n assert_eq!(grid[Line(-1)][Column(1)], wrap_cell('4'));\n\n\n\n assert_eq!(grid[Line(0)].len(), 2);\n\n assert_eq!(grid[Line(0)][Column(0)], cell('5'));\n\n assert_eq!(grid[Line(0)][Column(1)], Cell::default());\n\n}\n\n\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 92, "score": 74860.30090797294 }, { "content": "#[test]\n\nfn grow_reflow_multiline() {\n\n let mut grid = Grid::<Cell>::new(3, 2, 0);\n\n grid[Line(0)][Column(0)] = cell('1');\n\n grid[Line(0)][Column(1)] = wrap_cell('2');\n\n grid[Line(1)][Column(0)] = cell('3');\n\n grid[Line(1)][Column(1)] = wrap_cell('4');\n\n grid[Line(2)][Column(0)] = cell('5');\n\n grid[Line(2)][Column(1)] = cell('6');\n\n\n\n grid.resize(true, 3, 6);\n\n\n\n assert_eq!(grid.total_lines(), 3);\n\n\n\n assert_eq!(grid[Line(0)].len(), 6);\n\n assert_eq!(grid[Line(0)][Column(0)], cell('1'));\n\n assert_eq!(grid[Line(0)][Column(1)], cell('2'));\n\n assert_eq!(grid[Line(0)][Column(2)], cell('3'));\n\n assert_eq!(grid[Line(0)][Column(3)], cell('4'));\n\n assert_eq!(grid[Line(0)][Column(4)], cell('5'));\n\n assert_eq!(grid[Line(0)][Column(5)], cell('6'));\n\n\n\n // Make sure rest of grid is empty.\n\n for r in (1..3).map(Line::from) {\n\n assert_eq!(grid[r].len(), 6);\n\n for c in 0..6 {\n\n assert_eq!(grid[r][Column(c)], Cell::default());\n\n }\n\n }\n\n}\n\n\n", "file_path": "alacritty_terminal/src/grid/tests.rs", "rank": 93, "score": 74860.30090797294 }, { "content": "/// A group of 6 vertical pixels.\n\nstruct Sixel(u8);\n\n\n\nimpl Sixel {\n\n /// Create a new sixel.\n\n ///\n\n /// It expects the byte value from the picture definition stream.\n\n #[inline]\n\n fn new(byte: u8) -> Sixel {\n\n debug_assert!((0x3F..=0x7E).contains(&byte));\n\n Sixel(byte - 0x3F)\n\n }\n\n\n\n /// Return how many rows are printed in the sixel.\n\n #[inline]\n\n fn height(&self) -> usize {\n\n 8 - self.0.leading_zeros() as usize\n\n }\n\n\n\n /// Return an iterator to get dots in the sixel.\n\n #[inline]\n", "file_path": "alacritty_terminal/src/graphics/sixel.rs", "rank": 94, "score": 74230.3597944378 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq)]\n\nstruct Size(FontSize);\n\n\n\nimpl Default for Size {\n\n fn default() -> Self {\n\n Self(FontSize::new(11.))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Size {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n struct NumVisitor;\n\n impl<'v> Visitor<'v> for NumVisitor {\n\n type Value = Size;\n\n\n\n fn expecting(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_str(\"f64 or u64\")\n\n }\n", "file_path": "alacritty/src/config/font.rs", "rank": 95, "score": 74230.3597944378 }, { "content": "#[derive(Copy, Clone, Default, Debug, PartialEq, Eq)]\n\nstruct ColorIndex(u8);\n\n\n\nimpl<'de> Deserialize<'de> for ColorIndex {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n let index = u8::deserialize(deserializer)?;\n\n\n\n if index < 16 {\n\n Err(SerdeError::custom(\n\n \"Config error: indexed_color's index is {}, but a value bigger than 15 was \\\n\n expected; ignoring setting\",\n\n ))\n\n } else {\n\n Ok(Self(index))\n\n }\n\n }\n\n}\n\n\n", "file_path": "alacritty/src/config/color.rs", "rank": 96, "score": 74230.3597944378 }, { "content": " /// Cell at the current iteratior position.\n\n pub fn cell(&self) -> &'a T {\n\n &self.grid[self.point]\n\n }\n\n}\n\n\n\nimpl<'a, T> Iterator for GridIterator<'a, T> {\n\n type Item = Indexed<&'a T>;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n // Stop once we've reached the end of the grid.\n\n if self.point >= self.end {\n\n return None;\n\n }\n\n\n\n match self.point {\n\n Point { column, .. } if column == self.grid.last_column() => {\n\n self.point.column = Column(0);\n\n self.point.line += 1;\n\n },\n\n _ => self.point.column += Column(1),\n\n }\n\n\n\n Some(Indexed { cell: &self.grid[self.point], point: self.point })\n\n }\n\n}\n\n\n", "file_path": "alacritty_terminal/src/grid/mod.rs", "rank": 98, "score": 32.04572644377897 }, { "content": " #[inline]\n\n pub fn display_offset(&self) -> usize {\n\n self.display_offset\n\n }\n\n\n\n #[inline]\n\n pub fn cursor_cell(&mut self) -> &mut T {\n\n let point = self.cursor.point;\n\n &mut self[point.line][point.column]\n\n }\n\n}\n\n\n\nimpl<T: PartialEq> PartialEq for Grid<T> {\n\n fn eq(&self, other: &Self) -> bool {\n\n // Compare struct fields and check result of grid comparison.\n\n self.raw.eq(&other.raw)\n\n && self.columns.eq(&other.columns)\n\n && self.lines.eq(&other.lines)\n\n && self.display_offset.eq(&other.display_offset)\n\n }\n", "file_path": "alacritty_terminal/src/grid/mod.rs", "rank": 99, "score": 31.960989833643133 } ]
Rust
xhcid/src/usb/setup.rs
redox-os/drivers
36b3af426260cb1bd740426f5fb8a07a5f37e4c5
use super::DescriptorKind; use crate::driver_interface::*; #[repr(packed)] #[derive(Clone, Copy, Debug, Default)] pub struct Setup { pub kind: u8, pub request: u8, pub value: u16, pub index: u16, pub length: u16, } #[repr(u8)] pub enum ReqDirection { HostToDevice = 0, DeviceToHost = 1, } impl From<PortReqDirection> for ReqDirection { fn from(d: PortReqDirection) -> Self { match d { PortReqDirection::DeviceToHost => Self::DeviceToHost, PortReqDirection::HostToDevice => Self::HostToDevice, } } } #[repr(u8)] pub enum ReqType { Standard = 0, Class = 1, Vendor = 2, Reserved = 3, } impl From<PortReqTy> for ReqType { fn from(d: PortReqTy) -> Self { match d { PortReqTy::Standard => Self::Standard, PortReqTy::Class => Self::Class, PortReqTy::Vendor => Self::Vendor, } } } #[repr(u8)] pub enum ReqRecipient { Device = 0, Interface = 1, Endpoint = 2, Other = 3, VendorSpecific = 31, } impl From<PortReqRecipient> for ReqRecipient { fn from(d: PortReqRecipient) -> Self { match d { PortReqRecipient::Device => Self::Device, PortReqRecipient::Interface => Self::Interface, PortReqRecipient::Endpoint => Self::Endpoint, PortReqRecipient::Other => Self::Other, PortReqRecipient::VendorSpecific => Self::VendorSpecific, } } } pub const USB_SETUP_DIR_BIT: u8 = 1 << 7; pub const USB_SETUP_DIR_SHIFT: u8 = 7; pub const USB_SETUP_REQ_TY_MASK: u8 = 0x60; pub const USB_SETUP_REQ_TY_SHIFT: u8 = 5; pub const USB_SETUP_RECIPIENT_MASK: u8 = 0x1F; pub const USB_SETUP_RECIPIENT_SHIFT: u8 = 0; impl Setup { pub fn direction(&self) -> ReqDirection { if self.kind & USB_SETUP_DIR_BIT == 0 { ReqDirection::HostToDevice } else { ReqDirection::DeviceToHost } } pub const fn req_ty(&self) -> u8 { (self.kind & USB_SETUP_REQ_TY_MASK) >> USB_SETUP_REQ_TY_SHIFT } pub const fn req_recipient(&self) -> u8 { (self.kind & USB_SETUP_RECIPIENT_MASK) >> USB_SETUP_RECIPIENT_SHIFT } pub fn is_allowed_from_api(&self) -> bool { self.req_ty() == ReqType::Class as u8 || self.req_ty() == ReqType::Vendor as u8 } pub const fn get_status() -> Self { Self { kind: 0b1000_0000, request: 0x00, value: 0, index: 0, length: 2, } } pub const fn clear_feature(feature: u16) -> Self { Self { kind: 0b0000_0000, request: 0x01, value: feature, index: 0, length: 0, } } pub const fn set_feature(feature: u16) -> Self { Self { kind: 0b0000_0000, request: 0x03, value: feature, index: 0, length: 0, } } pub const fn set_address(address: u16) -> Self { Self { kind: 0b0000_0000, request: 0x05, value: address, index: 0, length: 0, } } pub const fn get_descriptor( kind: DescriptorKind, index: u8, language: u16, length: u16, ) -> Self { Self { kind: 0b1000_0000, request: 0x06, value: ((kind as u16) << 8) | (index as u16), index: language, length: length, } } pub const fn set_descriptor(kind: u8, index: u8, language: u16, length: u16) -> Self { Self { kind: 0b0000_0000, request: 0x07, value: ((kind as u16) << 8) | (index as u16), index: language, length: length, } } pub const fn get_configuration() -> Self { Self { kind: 0b1000_0000, request: 0x08, value: 0, index: 0, length: 1, } } pub const fn set_configuration(value: u8) -> Self { Self { kind: 0b0000_0000, request: 0x09, value: value as u16, index: 0, length: 0, } } pub const fn set_interface(interface: u8, alternate_setting: u8) -> Self { Self { kind: 0b0000_0001, request: 0x09, value: alternate_setting as u16, index: interface as u16, length: 0, } } }
use super::DescriptorKind; use crate::driver_interface::*; #[repr(packed)] #[derive(Clone, Copy, Debug, Default)] pub struct Setup { pub kind: u8, pub request: u8, pub value: u16, pub index: u16, pub length: u16, } #[repr(u8)] pub enum ReqDirection { HostToDevice = 0, DeviceToHost = 1, } impl From<PortReqDirection> for ReqDirection { fn from(d: PortReqDirection) -> Self { match d { PortReqDirection::DeviceToHost => Self::DeviceToHost, PortReqDirection::HostToDevice => Self::HostToDevice, } } } #[repr(u8)] pub enum ReqType { Standard = 0, Class = 1, Vendor = 2, Reserved = 3, } impl From<PortReqTy> for ReqType { fn from(d: PortReqTy) -> Self { match d { PortReqTy::Standard => Self::Standard, PortReqTy::Class => Self::Class, PortReqTy::Vendor => Self::Vendor, } } } #[repr(u8)] pub enum ReqRecipient { Device = 0, Interface = 1, Endpoint = 2, Other = 3, VendorSpecific = 31, } impl From<PortReqRecipient> for ReqRecipient { fn from(d: PortReqRecipient) -> Self { match d { PortReqRecipient::Device => Self::Device, PortReqRecipient::Interface => Self::Interface, PortReqRecipient::Endpoint => Self::Endpoint, PortReqRecipient::Other => Self::Other, PortReqRecipient::VendorSpecific => Self::VendorSpecific, } } } pub const USB_SETUP_DIR_BIT: u8 = 1 << 7; pub const USB_SETUP_DIR_SHIFT: u8 = 7; pub const USB_SETUP_REQ_TY_MASK: u8 = 0x60; pub const USB_SETUP_REQ_TY_SHIFT: u8 = 5; pub const USB_SETUP_RECIPIENT_MASK: u8 = 0x1F; pub const USB_SETUP_RECIPIENT_SHIFT: u8 = 0; impl Setup { pub fn direction(&self) -> ReqDirection { if self.kind & USB_SETUP_DIR_BIT == 0 { ReqDirection::HostToDevice } else { ReqDirection::DeviceToHost } } pub const fn req_ty(&self) -> u8 { (self.kind & USB_SETUP_REQ_TY_MASK) >> USB_SETUP_REQ_TY_SHIFT } pub const fn req_recipient(&self) -> u8 { (self.kind & USB_SETUP_RECIPIENT_MASK) >> USB_SETUP_RECIPIENT_SHIFT } pub fn is_allowed_from_api(&self) -> bool { self.req_ty() == ReqType::Class as u8 || self.req_ty() == ReqType::Vendor as u8 } pub const fn get_status() -> Self { Self { kind: 0b1000_0000, request: 0x00, value: 0, index: 0, length: 2, } } pub const fn clear_feature(feature: u16) -> Self { Self { kind: 0b0000_0000, request: 0x01, value: feature, index: 0, length: 0, } } pub const fn set_feature(feature: u16) -> Self { Self { kind: 0b0000_0000, request: 0x03, value: feature, index: 0, length: 0, } } pub const fn set_address(address: u16) -> Self { Self { kind: 0b0000_0000, request: 0x05, value: address, index: 0, length: 0, } } pub const fn get_descriptor( kind: DescriptorKind, index: u8, language: u16, length: u16, ) -> Self { Self { kind: 0b1000_0000, request: 0x06, value: ((kind as u16) << 8) | (index as u16), index: language, length: length, } } pub const fn set_descriptor(kind: u8, index: u8, language: u16, length: u16) -> Self { Self { kind: 0b0000_0000, request: 0x07, value: ((kind as u16) << 8) | (index as u16), index: language, length: length, } } pub const fn get_configuration() -> Self { Self { kind: 0b1000_0000, request: 0x08, value: 0, index: 0, length: 1, } } pub const fn set_configuration(value: u8) -> Self { Self { kind: 0b0000_0000, request: 0x09, value: value as u16, index: 0, length: 0, } } pub const fn set_interfa
}
ce(interface: u8, alternate_setting: u8) -> Self { Self { kind: 0b0000_0001, request: 0x09, value: alternate_setting as u16, index: interface as u16, length: 0, } }
function_block-function_prefixed
[ { "content": "pub fn mode_page_iter(buffer: &[u8]) -> impl Iterator<Item = AnyModePage> {\n\n ModePageIter {\n\n raw: ModePageIterRaw { buffer },\n\n }\n\n}\n", "file_path": "usbscsid/src/scsi/cmds.rs", "rank": 0, "score": 236183.29528297443 }, { "content": "pub fn enable(relative: bool) -> bool {\n\n println!(\"ps2d: Enable vmmouse\");\n\n\n\n unsafe {\n\n let _ = cmd(ABSPOINTER_COMMAND, CMD_ENABLE);\n\n\n\n let (status, _, _, _, _, _) = cmd(ABSPOINTER_STATUS, 0);\n\n \tif (status & 0x0000ffff) == 0 {\n\n \tprintln!(\"ps2d: No vmmouse\");\n\n \t\treturn false;\n\n \t}\n\n\n\n let (version, _, _, _, _, _) = cmd(ABSPOINTER_DATA, 1);\n\n if version != VERSION {\n\n println!(\"ps2d: Invalid vmmouse version: {} instead of {}\", version, VERSION);\n\n let _ = cmd(ABSPOINTER_COMMAND, CMD_DISABLE);\n\n return false;\n\n }\n\n\n\n if relative {\n\n \tcmd(ABSPOINTER_COMMAND, CMD_REQUEST_RELATIVE);\n\n } else {\n\n \tcmd(ABSPOINTER_COMMAND, CMD_REQUEST_ABSOLUTE);\n\n }\n\n }\n\n\n\n return true;\n\n}\n", "file_path": "ps2d/src/vm.rs", "rank": 1, "score": 231351.87562714968 }, { "content": "pub fn setup<'a>(\n\n handle: &'a XhciClientHandle,\n\n protocol: u8,\n\n dev_desc: &DevDesc,\n\n conf_desc: &ConfDesc,\n\n if_desc: &IfDesc,\n\n) -> Option<Box<dyn Protocol + 'a>> {\n\n match protocol {\n\n 0x50 => Some(Box::new(\n\n BulkOnlyTransport::init(handle, conf_desc, if_desc).unwrap(),\n\n )),\n\n _ => None,\n\n }\n\n}\n", "file_path": "usbscsid/src/protocol/mod.rs", "rank": 2, "score": 227802.3137392507 }, { "content": "fn concat(hi: u8, lo: u8) -> u16 {\n\n (u16::from(hi) << 8) | u16::from(lo)\n\n}\n\n\n", "file_path": "usbhidd/src/reqs.rs", "rank": 3, "score": 221785.87222944148 }, { "content": "pub fn format_to_u16(sr: &SampleRate, bps: BitsPerSample, channels:u8) -> u16{\n\n\n\n\n\n\t// 3.3.41\n\n\n\n\tlet base:u16 = match sr.base {\n\n\t\tBaseRate::BR44_1 => { 1 << 14},\n\n\t\tBaseRate::BR48 => { 0 },\n\n\t};\n\n\n\n\tlet mult = ((sr.mult - 1) & 0x7) << 11;\n\n\n\n\tlet div = ((sr.div - 1) & 0x7) << 8;\n\n\n\n\tlet bits = (bps as u16) << 4;\n\n\n\n\tlet chan = ((channels - 1) & 0xF) as u16;\n\n\n\n\tlet val:u16 = base | mult | div | bits | chan;\n\n\n", "file_path": "ihdad/src/hda/stream.rs", "rank": 4, "score": 219724.8128920396 }, { "content": "pub fn get_max_lun(handle: &XhciClientHandle, if_num: u16) -> Result<u8, XhciClientHandleError> {\n\n let mut lun = 0u8;\n\n let buffer = slice::from_mut(&mut lun);\n\n handle.device_request(\n\n PortReqTy::Class,\n\n PortReqRecipient::Interface,\n\n 0xFE,\n\n 0,\n\n if_num,\n\n DeviceReqData::In(buffer),\n\n )?;\n\n Ok(lun)\n\n}\n", "file_path": "usbscsid/src/protocol/bot.rs", "rank": 5, "score": 217526.8349267232 }, { "content": "pub fn parse_pkg_length(data: &[u8]) -> Result<(usize, usize), AmlError> {\n\n let lead_byte = data[0];\n\n let count_bytes: usize = (lead_byte >> 6) as usize;\n\n\n\n if count_bytes == 0 {\n\n return Ok(((lead_byte & 0x3F) as usize, 1 as usize));\n\n }\n\n\n\n let upper_two = (lead_byte >> 4) & 0x03;\n\n if upper_two != 0 {\n\n return Err(AmlError::AmlParseError(\"Invalid package length\"));\n\n }\n\n\n\n let mut current_byte = 0;\n\n let mut pkg_len: usize = (lead_byte & 0x0F) as usize;\n\n\n\n while current_byte < count_bytes {\n\n pkg_len += (data[1 + current_byte] as u32 * 16 * (256 as u32).pow(current_byte as u32)) as usize;\n\n current_byte += 1;\n\n }\n\n\n\n Ok((pkg_len, count_bytes + 1))\n\n}\n", "file_path": "acpid/src/aml/pkglength.rs", "rank": 6, "score": 211212.05977885902 }, { "content": "pub fn parse_target(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n if data[0] == 0x00 {\n\n Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 1\n\n })\n\n } else {\n\n parse_super_name(data, ctx)\n\n }\n\n}\n", "file_path": "acpid/src/aml/namestring.rs", "rank": 7, "score": 208940.97961704002 }, { "content": "pub fn parse_term_list(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n let mut current_offset: usize = 0;\n\n\n\n while current_offset < data.len() {\n\n let res = parse_term_obj(&data[current_offset..], ctx)?;\n\n\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: data.len()\n", "file_path": "acpid/src/aml/termlist.rs", "rank": 8, "score": 205878.58719715322 }, { "content": "pub fn parse_local_obj(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n match data[0] {\n\n 0x68 ..= 0x6E => Ok(AmlParseType {\n\n val: AmlValue::ObjectReference(ObjectReference::LocalObj(data[0] - 0x60)),\n\n len: 1 as usize\n\n }),\n\n _ => Err(AmlError::AmlInvalidOpCode)\n\n }\n\n}\n\n\n", "file_path": "acpid/src/aml/dataobj.rs", "rank": 9, "score": 205878.58719715322 }, { "content": "pub fn parse_term_arg(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_local_obj,\n\n parse_data_obj,\n\n parse_arg_obj,\n\n parse_type2_opcode\n\n };\n\n\n\n Err(AmlError::AmlInvalidOpCode)\n\n}\n\n\n", "file_path": "acpid/src/aml/termlist.rs", "rank": 10, "score": 205878.58719715322 }, { "content": "pub fn parse_def_buffer(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x11);\n\n\n\n let (pkg_length, pkg_length_len) = parse_pkg_length(&data[1..])?;\n\n let buffer_size = parse_term_arg(&data[1 + pkg_length_len..], ctx)?;\n\n let mut byte_list = data[1 + pkg_length_len + buffer_size.len .. 1 + pkg_length].to_vec().clone();\n\n\n\n byte_list.truncate(buffer_size.val.get_as_integer(ctx.acpi_context())? as usize);\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::Buffer(byte_list),\n\n len: 1 + pkg_length\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 11, "score": 205878.58719715322 }, { "content": "pub fn parse_name_string(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n let mut characters: Vec<u8> = vec!();\n\n let mut starting_index: usize = 0;\n\n\n\n if data[0] == 0x5C {\n\n characters.push(data[0]);\n\n starting_index = 1;\n\n } else if data[0] == 0x5E {\n\n while data[starting_index] == 0x5E {\n\n characters.push(data[starting_index]);\n\n starting_index += 1;\n", "file_path": "acpid/src/aml/namestring.rs", "rank": 12, "score": 205878.58719715322 }, { "content": "pub fn parse_namespace_modifier(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_alias_op,\n\n parse_scope_op,\n\n parse_name_op\n\n };\n\n\n\n Err(AmlError::AmlInvalidOpCode)\n\n}\n\n\n", "file_path": "acpid/src/aml/namespacemodifier.rs", "rank": 13, "score": 205878.58719715322 }, { "content": "pub fn parse_data_obj(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_computational_data,\n\n parse_def_package,\n\n parse_def_var_package\n\n };\n\n\n\n Err(AmlError::AmlInvalidOpCode)\n\n}\n\n\n", "file_path": "acpid/src/aml/dataobj.rs", "rank": 14, "score": 205878.58719715322 }, { "content": "pub fn parse_type2_opcode(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_def_increment,\n\n parse_def_acquire,\n\n parse_def_wait,\n\n parse_def_land,\n\n parse_def_lequal,\n\n parse_def_lgreater,\n\n parse_def_lless,\n\n parse_def_lnot,\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 15, "score": 205878.58719715322 }, { "content": "pub fn parse_method_invocation(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n let name = parse_name_string(data, ctx)?;\n\n let method = ctx.get(ctx.acpi_context(), name.val.clone())?;\n\n\n\n let method = match method {\n\n AmlValue::None => return Err(AmlError::AmlDeferredLoad),\n\n _ => method.get_as_method()?\n\n };\n\n\n\n let mut cur = 0;\n\n let mut params: Vec<AmlValue> = vec!();\n", "file_path": "acpid/src/aml/termlist.rs", "rank": 16, "score": 205878.58719715322 }, { "content": "pub fn parse_object_list(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n let mut current_offset: usize = 0;\n\n\n\n while current_offset < data.len() {\n\n let res = parse_object(&data[current_offset..], ctx)?;\n\n\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: data.len()\n", "file_path": "acpid/src/aml/termlist.rs", "rank": 17, "score": 205878.58719715322 }, { "content": "pub fn parse_type1_opcode(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_def_break,\n\n parse_def_breakpoint,\n\n parse_def_continue,\n\n parse_def_noop,\n\n parse_def_fatal,\n\n parse_def_if_else,\n\n parse_def_load,\n\n parse_def_notify,\n", "file_path": "acpid/src/aml/type1opcode.rs", "rank": 18, "score": 205878.58719715325 }, { "content": "pub fn parse_super_name(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_simple_name,\n\n parse_type6_opcode,\n\n parse_debug_obj\n\n };\n\n\n\n Err(AmlError::AmlInvalidOpCode)\n\n}\n\n\n", "file_path": "acpid/src/aml/namestring.rs", "rank": 19, "score": 205878.58719715322 }, { "content": "pub fn parse_named_obj(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_def_bank_field,\n\n parse_def_create_bit_field,\n\n parse_def_create_byte_field,\n\n parse_def_create_word_field,\n\n parse_def_create_dword_field,\n\n parse_def_create_qword_field,\n\n parse_def_create_field,\n\n parse_def_data_region,\n", "file_path": "acpid/src/aml/namedobj.rs", "rank": 20, "score": 205878.58719715325 }, { "content": "pub fn parse_def_package(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n // TODO: Handle deferred loads in here\n\n parser_opcode!(data, 0x12);\n\n\n\n let (pkg_length, pkg_length_len) = parse_pkg_length(&data[1..])?;\n\n let numelements = data[1 + pkg_length_len] as usize;\n\n let mut elements = parse_package_elements_list(&data[2 + pkg_length_len .. 1 + pkg_length], ctx)?.val.get_as_package()?;\n\n\n\n if elements.len() > numelements {\n\n elements = elements[0 .. numelements].to_vec();\n\n } else if numelements > elements.len() {\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 21, "score": 205878.58719715322 }, { "content": "pub fn parse_type6_opcode(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_def_deref_of,\n\n parse_def_ref_of,\n\n parse_def_index,\n\n parse_method_invocation\n\n };\n\n\n\n Err(AmlError::AmlInvalidOpCode)\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 22, "score": 205878.58719715322 }, { "content": "pub fn parse_arg_obj(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n match data[0] {\n\n 0x68 ..= 0x6E => Ok(AmlParseType {\n\n val: AmlValue::ObjectReference(ObjectReference::ArgObj(data[0] - 0x68)),\n\n len: 1 as usize\n\n }),\n\n _ => Err(AmlError::AmlInvalidOpCode)\n\n }\n\n}\n\n\n", "file_path": "acpid/src/aml/dataobj.rs", "rank": 23, "score": 205878.58719715322 }, { "content": "pub fn parse_simple_name(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_name_string,\n\n parse_arg_obj,\n\n parse_local_obj\n\n };\n\n\n\n Err(AmlError::AmlInvalidOpCode)\n\n}\n\n\n", "file_path": "acpid/src/aml/namestring.rs", "rank": 24, "score": 205878.58719715322 }, { "content": "/// Allocate at most `count` interrupt vectors, which can start at any offset. Unless MSI is used\n\n/// and an entire aligned range of vectors is needed, this function should be used.\n\npub fn allocate_interrupt_vectors(cpu_id: usize, count: u8) -> io::Result<Option<(u8, Vec<File>)>> {\n\n allocate_aligned_interrupt_vectors(cpu_id, NonZeroU8::new(1).unwrap(), count)\n\n}\n\n\n", "file_path": "pcid/src/driver_interface/irq_helpers.rs", "rank": 25, "score": 204843.7628801579 }, { "content": "pub fn parse_def_object_type(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x8E);\n\n parser_selector! {\n\n data, ctx,\n\n parse_super_name,\n\n parse_def_ref_of,\n\n parse_def_deref_of,\n\n parse_def_index\n\n }\n\n\n\n Err(AmlError::AmlInvalidOpCode)\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 26, "score": 202965.13090544913 }, { "content": "pub fn parse_data_ref_obj(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_data_obj,\n\n parse_term_arg\n\n };\n\n\n\n match parse_super_name(data, ctx) {\n\n Ok(res) => match res.val {\n\n AmlValue::String(s) => Ok(AmlParseType {\n\n val: AmlValue::ObjectReference(ObjectReference::Object(s)),\n\n len: res.len\n\n }),\n\n _ => Ok(res)\n\n },\n\n Err(e) => Err(e)\n\n }\n\n}\n\n\n", "file_path": "acpid/src/aml/dataobj.rs", "rank": 27, "score": 202965.13090544913 }, { "content": "pub fn parse_def_var_package(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n // TODO: Handle deferred loads in here\n\n parser_opcode!(data, 0x13);\n\n\n\n let (pkg_length, pkg_length_len) = parse_pkg_length(&data[1..])?;\n\n let num_elements = parse_term_arg(&data[1 + pkg_length_len .. 1 + pkg_length], ctx)?;\n\n let mut elements = parse_package_elements_list(&data[1 + pkg_length_len + num_elements.len ..\n\n 1 + pkg_length], ctx)?.val.get_as_package()?;\n\n\n\n let numelements = num_elements.val.get_as_integer(ctx.acpi_context())? as usize;\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 28, "score": 202965.13090544913 }, { "content": "pub fn read<E>(offset: u64, blksize: u32, mut buf: &mut [u8], block_bytes: &mut [u8], mut read: impl FnMut(u64, &mut [u8]) -> Result<(), E>) -> Result<usize, E> {\n\n // TODO: Yield sometimes, perhaps after a few blocks or something.\n\n use std::ops::{Add, Div, Rem};\n\n\n\n fn div_round_up<T>(a: T, b: T) -> T\n\n where\n\n T: Add<Output = T> + Div<Output = T> + Rem<Output = T> + PartialEq + From<u8> + Copy\n\n {\n\n if a % b != T::from(0u8) {\n\n a / b + T::from(1u8)\n\n } else {\n\n a / b\n\n }\n\n }\n\n\n\n let orig_buf_len = buf.len();\n\n\n\n let start_block = offset / u64::from(blksize);\n\n let end_block = div_round_up(offset + buf.len() as u64, u64::from(blksize)); // The first block not in the range\n\n\n", "file_path": "block-io-wrapper/src/lib.rs", "rank": 29, "score": 196356.19075011832 }, { "content": "/// Allocate a single interrupt vector, returning both the vector number (starting from 32 up to\n\n/// 254), and its IRQ handle which is then reserved. Returns Ok(None) if allocation fails due to\n\n/// no available IRQs.\n\npub fn allocate_single_interrupt_vector(cpu_id: usize) -> io::Result<Option<(u8, File)>> {\n\n let (base, mut files) = match allocate_interrupt_vectors(cpu_id, 1) {\n\n Ok(Some((base, files))) => (base, files),\n\n Ok(None) => return Ok(None),\n\n Err(err) => return Err(err),\n\n };\n\n assert_eq!(files.len(), 1);\n\n Ok(Some((base, files.pop().unwrap())))\n\n}\n", "file_path": "pcid/src/driver_interface/irq_helpers.rs", "rank": 30, "score": 196175.07776531816 }, { "content": "/// Allocate multiple interrupt vectors, from the IDT of the specified processor, returning the\n\n/// start vector and the IRQ handles.\n\n///\n\n/// The alignment is a requirement for the allocation range. For example, with an alignment of 8,\n\n/// only ranges that begin with a multiple of eight are accepted. The IRQ handles returned will\n\n/// always correspond to the subsequent IRQ numbers beginning the first value in the return tuple.\n\n///\n\n/// This function is not actually guaranteed to allocate all of the IRQs specified in `count`,\n\n/// since another process might already have requested one vector in the range. The caller must\n\n/// check that the returned vector have the same length as `count`. In the future this function may\n\n/// perhaps lock the entire directory to prevent this from happening, or maybe find the smallest free\n\n/// range with the minimum alignment, to allow other drivers to obtain their necessary IRQs.\n\n///\n\n/// Note that this count/alignment restriction is only mandatory for MSI; MSI-X allows for\n\n/// individually allocated vectors that might be spread out, even on multiple CPUs. Thus, multiple\n\n/// invocations with alignment 1 and count 1 are totally acceptable, although allocating in bulk\n\n/// minimizes the initialization overhead.\n\npub fn allocate_aligned_interrupt_vectors(cpu_id: usize, alignment: NonZeroU8, count: u8) -> io::Result<Option<(u8, Vec<File>)>> {\n\n let cpu_id = u8::try_from(cpu_id).expect(\"usize cpu ids not implemented yet\");\n\n if count == 0 { return Ok(None) }\n\n\n\n let available_irqs = fs::read_dir(format!(\"irq:cpu-{:02x}\", cpu_id))?;\n\n let mut available_irq_numbers = available_irqs.filter_map(|entry| -> Option<io::Result<_>> {\n\n let entry = match entry {\n\n Ok(e) => e,\n\n Err(err) => return Some(Err(err)),\n\n };\n\n\n\n let path = entry.path();\n\n\n\n let file_name = match path.file_name() {\n\n Some(f) => f,\n\n None => return None,\n\n };\n\n\n\n let path_str = match file_name.to_str() {\n\n Some(s) => s,\n", "file_path": "pcid/src/driver_interface/irq_helpers.rs", "rank": 31, "score": 196174.75996417733 }, { "content": "// TODO: Perhaps read the MADT instead?\n\n/// Obtains an interator over all of the visible CPU ids, for use in IRQ allocation and MSI\n\n/// capability structs or MSI-X tables.\n\npub fn cpu_ids() -> io::Result<impl Iterator<Item = io::Result<usize>> + 'static> {\n\n Ok(fs::read_dir(\"irq:\")?\n\n .filter_map(|entry| -> Option<io::Result<_>> { match entry {\n\n Ok(e) => {\n\n let path = e.path();\n\n let file_name = path.file_name()?.to_str()?;\n\n // the file name should be in the format `cpu-<CPU ID>`\n\n if ! file_name.starts_with(\"cpu-\") {\n\n return None;\n\n }\n\n u8::from_str_radix(&file_name[4..], 16).map(usize::from).map(Ok).ok()\n\n }\n\n Err(e) => Some(Err(e)),\n\n } }))\n\n}\n\n\n", "file_path": "pcid/src/driver_interface/irq_helpers.rs", "rank": 32, "score": 195066.1769020532 }, { "content": "pub fn parse_name_seg(data: &[u8]) -> Result<(Vec<u8>, usize), AmlError> {\n\n match data[0] {\n\n 0x41 ..= 0x5A | 0x5F => (),\n\n _ => return Err(AmlError::AmlInvalidOpCode)\n\n }\n\n\n\n match data[1] {\n\n 0x30 ..= 0x39 | 0x41 ..= 0x5A | 0x5F => (),\n\n _ => return Err(AmlError::AmlInvalidOpCode)\n\n }\n\n\n\n match data[2] {\n\n 0x30 ..= 0x39 | 0x41 ..= 0x5A | 0x5F => (),\n\n _ => return Err(AmlError::AmlInvalidOpCode)\n\n }\n\n\n\n match data[3] {\n\n 0x30 ..= 0x39 | 0x41 ..= 0x5A | 0x5F => (),\n\n _ => return Err(AmlError::AmlInvalidOpCode)\n\n }\n\n\n\n let mut name_seg = vec!(data[0], data[1], data[2], data[3]);\n\n while *(name_seg.last().unwrap()) == 0x5F {\n\n name_seg.pop();\n\n }\n\n\n\n Ok((name_seg, 4))\n\n}\n\n\n", "file_path": "acpid/src/aml/namestring.rs", "rank": 33, "score": 191705.30176971268 }, { "content": "pub fn set_global_s_state(context: &AcpiContext, state: u8) {\n\n if state != 5 {\n\n return;\n\n }\n\n let fadt = match context.fadt() {\n\n Some(fadt) => fadt,\n\n None => {\n\n log::error!(\"Cannot set global S-state due to missing FADT.\");\n\n return;\n\n }\n\n };\n\n\n\n let port = fadt.pm1a_control_block as u16;\n\n let mut val = 1 << 13;\n\n\n\n let namespace_guard = context.namespace();\n\n\n\n let namespace = match &*namespace_guard {\n\n Some(namespace) => namespace,\n\n None => {\n", "file_path": "acpid/src/aml/mod.rs", "rank": 34, "score": 187921.8247786538 }, { "content": "fn parse_debug_obj(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode_extended!(data, 0x31);\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::DebugObject,\n\n len: 2\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/namestring.rs", "rank": 35, "score": 182848.25083282217 }, { "content": "fn parse_def_match(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x28);\n\n\n\n let search_pkg = parse_term_arg(&data[1..], ctx)?;\n\n\n\n let first_operation = match data[1 + search_pkg.len] {\n\n 0 => MatchOpcode::MTR,\n\n 1 => MatchOpcode::MEQ,\n\n 2 => MatchOpcode::MLE,\n\n 3 => MatchOpcode::MLT,\n\n 4 => MatchOpcode::MGE,\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 36, "score": 182848.25083282217 }, { "content": "fn parse_def_index(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x88);\n\n\n\n let obj = parse_term_arg(&data[1..], ctx)?;\n\n let idx = parse_term_arg(&data[1 + obj.len..], ctx)?;\n\n let target = parse_target(&data[1 + obj.len + idx.len..], ctx)?;\n\n\n\n let reference = AmlValue::ObjectReference(ObjectReference::Index(Box::new(obj.val), Box::new(idx.val)));\n\n let _ = ctx.modify(ctx.acpi_context(), target.val, reference.clone());\n\n\n\n Ok(AmlParseType {\n\n val: reference,\n\n len: 1 + obj.len + idx.len + target.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 37, "score": 182838.30654852337 }, { "content": "fn parse_def_copy_object(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n // TODO: Compute the result\n\n // TODO: Store the result\n\n parser_opcode!(data, 0x9D);\n\n\n\n let source = parse_term_arg(&data[1..], ctx)?;\n\n let destination = parse_simple_name(&data[1 + source.len..], ctx)?;\n\n\n\n ctx.copy(ctx.acpi_context(), destination.val, source.val.clone())?;\n\n\n\n Ok(AmlParseType {\n\n val: source.val,\n\n len: 1 + source.len + destination.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 38, "score": 179779.54297136606 }, { "content": "fn parse_oem_table_id(hex: [u8; 16]) -> Option<[u8; 8]> {\n\n Some([\n\n parse_hex_2digit(&hex[0..2])?,\n\n parse_hex_2digit(&hex[2..4])?,\n\n parse_hex_2digit(&hex[4..6])?,\n\n parse_hex_2digit(&hex[6..8])?,\n\n parse_hex_2digit(&hex[8..10])?,\n\n parse_hex_2digit(&hex[10..12])?,\n\n parse_hex_2digit(&hex[12..14])?,\n\n parse_hex_2digit(&hex[14..16])?,\n\n ])\n\n}\n\n\n", "file_path": "acpid/src/scheme.rs", "rank": 39, "score": 172702.49338219274 }, { "content": "fn wrap_ring(index: usize, ring_size: usize) -> usize {\n\n (index + 1) & (ring_size - 1)\n\n}\n\n\n\nimpl SchemeBlockMut for Intel8259x {\n\n fn open(&mut self, _path: &str, flags: usize, uid: u32, _gid: u32) -> Result<Option<usize>> {\n\n if uid == 0 {\n\n self.next_id += 1;\n\n self.handles.insert(self.next_id, flags);\n\n Ok(Some(self.next_id))\n\n } else {\n\n Err(Error::new(EACCES))\n\n }\n\n }\n\n\n\n fn dup(&mut self, id: usize, buf: &[u8]) -> Result<Option<usize>> {\n\n if !buf.is_empty() {\n\n return Err(Error::new(EINVAL));\n\n }\n\n\n", "file_path": "ixgbed/src/device.rs", "rank": 40, "score": 168201.95692255083 }, { "content": "fn wrap_ring(index: usize, ring_size: usize) -> usize {\n\n (index + 1) & (ring_size - 1)\n\n}\n\n\n\nimpl SchemeBlockMut for Intel8254x {\n\n fn open(&mut self, _path: &str, flags: usize, uid: u32, _gid: u32) -> Result<Option<usize>> {\n\n if uid == 0 {\n\n self.next_id += 1;\n\n self.handles.insert(self.next_id, flags);\n\n Ok(Some(self.next_id))\n\n } else {\n\n Err(Error::new(EACCES))\n\n }\n\n }\n\n\n\n fn dup(&mut self, id: usize, buf: &[u8]) -> Result<Option<usize>> {\n\n if !buf.is_empty() {\n\n return Err(Error::new(EINVAL));\n\n }\n\n\n", "file_path": "e1000d/src/device.rs", "rank": 41, "score": 168201.95692255083 }, { "content": "fn setup_logging(verbosity: u8) -> Option<&'static RedoxLogger> {\n\n let log_level = match verbosity {\n\n 0 => log::LevelFilter::Info,\n\n 1 => log::LevelFilter::Debug,\n\n _ => log::LevelFilter::Trace,\n\n };\n\n let mut logger = RedoxLogger::new()\n\n .with_output(\n\n OutputBuilder::stderr()\n\n .with_ansi_escape_codes()\n\n .with_filter(log_level)\n\n .flush_on_newline(true)\n\n .build()\n\n );\n\n\n\n match OutputBuilder::in_redox_logging_scheme(\"bus\", \"pci\", \"pcid.log\") {\n\n Ok(b) => logger = logger.with_output(\n\n b.with_filter(log::LevelFilter::Trace)\n\n .flush_on_newline(true)\n\n .build()\n", "file_path": "pcid/src/main.rs", "rank": 42, "score": 166797.40184285463 }, { "content": "struct Request {\n\n address: usize,\n\n total_sectors: usize,\n\n sector: usize,\n\n running_opt: Option<(u32, usize)>,\n\n}\n\n\n\npub struct DiskATA {\n\n id: usize,\n\n port: &'static mut HbaPort,\n\n size: u64,\n\n request_opt: Option<Request>,\n\n clb: Dma<[HbaCmdHeader; 32]>,\n\n ctbas: [Dma<HbaCmdTable>; 32],\n\n _fb: Dma<[u8; 256]>,\n\n buf: Dma<[u8; 256 * 512]>\n\n}\n\n\n\nimpl DiskATA {\n\n pub fn new(id: usize, port: &'static mut HbaPort) -> Result<Self> {\n", "file_path": "ahcid/src/ahci/disk_ata.rs", "rank": 43, "score": 166227.33536695468 }, { "content": "/// Read the local APIC ID of the bootstrap processor.\n\npub fn read_bsp_apic_id() -> io::Result<usize> {\n\n let mut buffer = [0u8; 8];\n\n\n\n let mut file = File::open(\"irq:bsp\")?;\n\n let bytes_read = file.read(&mut buffer)?;\n\n\n\n (if bytes_read == 8 {\n\n usize::try_from(u64::from_le_bytes(buffer))\n\n } else if bytes_read == 4 {\n\n usize::try_from(u32::from_le_bytes([buffer[0], buffer[1], buffer[2], buffer[3]]))\n\n } else {\n\n panic!(\"`irq:` scheme responded with {} bytes, expected {}\", bytes_read, std::mem::size_of::<usize>());\n\n }).or(Err(io::Error::new(io::ErrorKind::InvalidData, \"bad BSP int size\")))\n\n}\n\n\n", "file_path": "pcid/src/driver_interface/irq_helpers.rs", "rank": 44, "score": 162816.28769673302 }, { "content": "fn dma_array<T, const N: usize>() -> Result<[Dma<T>; N]> {\n\n Ok((0..N)\n\n .map(|_| Dma::zeroed().map(|dma| unsafe { dma.assume_init() }))\n\n .collect::<Result<Vec<_>>>()?\n\n .try_into()\n\n .unwrap_or_else(|_| unreachable!()))\n\n}\n\nimpl Intel8254x {\n\n pub unsafe fn new(base: usize) -> Result<Self> {\n\n #[rustfmt::skip]\n\n let mut module = Intel8254x {\n\n base: base,\n\n receive_buffer: dma_array()?,\n\n receive_ring: Dma::zeroed()?.assume_init(),\n\n transmit_buffer: dma_array()?,\n\n receive_index: 0,\n\n transmit_ring: Dma::zeroed()?.assume_init(),\n\n transmit_ring_free: 16,\n\n transmit_index: 0,\n\n transmit_clean_index: 0,\n", "file_path": "e1000d/src/device.rs", "rank": 45, "score": 155376.36332621047 }, { "content": "pub fn get_idle(\n\n handle: &XhciClientHandle,\n\n report_id: u8,\n\n if_num: u16,\n\n) -> Result<u8, XhciClientHandleError> {\n\n let mut idle_rate = 0;\n\n let buffer = slice::from_mut(&mut idle_rate);\n\n handle.device_request(\n\n PortReqTy::Class,\n\n PortReqRecipient::Interface,\n\n GET_IDLE_REQ,\n\n u16::from(report_id),\n\n if_num,\n\n DeviceReqData::In(buffer),\n\n )?;\n\n Ok(idle_rate)\n\n}\n", "file_path": "usbhidd/src/reqs.rs", "rank": 46, "score": 153918.12012123942 }, { "content": "pub fn get_report(\n\n handle: &XhciClientHandle,\n\n report_ty: ReportTy,\n\n report_id: u8,\n\n if_num: u16,\n\n buffer: &mut [u8],\n\n) -> Result<(), XhciClientHandleError> {\n\n handle.device_request(\n\n PortReqTy::Class,\n\n PortReqRecipient::Interface,\n\n GET_REPORT_REQ,\n\n concat(report_ty as u8, report_id),\n\n if_num,\n\n DeviceReqData::In(buffer),\n\n )\n\n}\n", "file_path": "usbhidd/src/reqs.rs", "rank": 47, "score": 153918.12012123942 }, { "content": "pub fn set_protocol(\n\n handle: &XhciClientHandle,\n\n protocol: u8,\n\n if_num: u16,\n\n) -> Result<(), XhciClientHandleError> {\n\n handle.device_request(\n\n PortReqTy::Class,\n\n PortReqRecipient::Interface,\n\n SET_PROTOCOL_REQ,\n\n u16::from(protocol),\n\n if_num,\n\n DeviceReqData::NoData,\n\n )\n\n}\n", "file_path": "usbhidd/src/reqs.rs", "rank": 48, "score": 153918.12012123942 }, { "content": "pub fn set_report(\n\n handle: &XhciClientHandle,\n\n report_ty: ReportTy,\n\n report_id: u8,\n\n if_num: u16,\n\n buffer: &[u8],\n\n) -> Result<(), XhciClientHandleError> {\n\n handle.device_request(\n\n PortReqTy::Class,\n\n PortReqRecipient::Interface,\n\n SET_REPORT_REQ,\n\n concat(report_id, report_ty as u8),\n\n if_num,\n\n DeviceReqData::Out(buffer),\n\n )\n\n}\n", "file_path": "usbhidd/src/reqs.rs", "rank": 49, "score": 153918.12012123942 }, { "content": "pub fn set_idle(\n\n handle: &XhciClientHandle,\n\n duration: u8,\n\n report_id: u8,\n\n if_num: u16,\n\n) -> Result<(), XhciClientHandleError> {\n\n handle.device_request(\n\n PortReqTy::Class,\n\n PortReqRecipient::Interface,\n\n SET_IDLE_REQ,\n\n concat(duration, report_id),\n\n if_num,\n\n DeviceReqData::NoData,\n\n )\n\n}\n", "file_path": "usbhidd/src/reqs.rs", "rank": 50, "score": 153918.12012123942 }, { "content": "pub fn get_protocol(\n\n handle: &XhciClientHandle,\n\n if_num: u16,\n\n) -> Result<u8, XhciClientHandleError> {\n\n let mut protocol = 0;\n\n let buffer = slice::from_mut(&mut protocol);\n\n handle.device_request(\n\n PortReqTy::Class,\n\n PortReqRecipient::Interface,\n\n GET_PROTOCOL_REQ,\n\n 0,\n\n if_num,\n\n DeviceReqData::In(buffer),\n\n )?;\n\n Ok(protocol)\n\n}\n", "file_path": "usbhidd/src/reqs.rs", "rank": 51, "score": 153918.12012123942 }, { "content": "pub fn parse_aml_table(acpi_ctx: &AcpiContext, sdt: impl AmlContainingTable) -> Result<Vec<String>, AmlError> {\n\n parse_aml_with_scope(acpi_ctx, sdt, \"\\\\\".to_owned())\n\n}\n\n\n", "file_path": "acpid/src/aml/mod.rs", "rank": 52, "score": 153839.36412443256 }, { "content": "fn parse_hex_2digit(hex: &[u8]) -> Option<u8> {\n\n parse_hex_digit(hex[0]).and_then(|most_significant| Some((most_significant << 4) | parse_hex_digit(hex[1])?))\n\n}\n\n\n", "file_path": "acpid/src/scheme.rs", "rank": 53, "score": 153486.72007593207 }, { "content": "fn parse_hex_digit(hex: u8) -> Option<u8> {\n\n let hex = hex.to_ascii_lowercase();\n\n\n\n if hex >= b'a' && hex <= b'f' {\n\n Some(hex - b'a' + 10)\n\n } else if hex >= b'0' && hex <= b'9' {\n\n Some(hex - b'0')\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "acpid/src/scheme.rs", "rank": 54, "score": 153486.72007593207 }, { "content": "fn dma_array<T, const N: usize>() -> Result<[Dma<T>; N]> {\n\n Ok((0..N)\n\n .map(|_| Dma::zeroed().map(|dma| unsafe { dma.assume_init() }))\n\n .collect::<Result<Vec<_>>>()?\n\n .try_into()\n\n .unwrap_or_else(|_| unreachable!()))\n\n}\n\n\n\nimpl Alx {\n\n pub unsafe fn new(base: usize) -> Result<Self> {\n\n let mut module = Alx {\n\n base: base,\n\n\n\n vendor_id: 0,\n\n device_id: 0,\n\n subdev_id: 0,\n\n subven_id: 0,\n\n revision: 0,\n\n\n\n cap: 0,\n", "file_path": "alxd/src/device/mod.rs", "rank": 55, "score": 152859.1493481204 }, { "content": "fn parse_def_device(data: &[u8], ctx: &mut AmlExecutionContext) -> ParseResult {\n\n if data.len() < 2 {\n\n return Err(AmlError::AmlParseError(\"DefDevice - data truncated\"))\n\n }\n\n\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n // TODO: How to handle local context deferreds\n\n parser_opcode_extended!(data, 0x82);\n\n\n\n let (pkg_length, pkg_length_len) = parse_pkg_length(&data[2..])?;\n\n let name = parse_name_string(&data[2 + pkg_length_len .. 2 + pkg_length], ctx)?;\n\n\n\n let local_scope_string = get_namespace_string(ctx.acpi_context(), ctx.scope.clone(), name.val)?;\n", "file_path": "acpid/src/aml/namedobj.rs", "rank": 56, "score": 150551.17130988004 }, { "content": "fn parse_def_index_field(data: &[u8], ctx: &mut AmlExecutionContext) -> ParseResult {\n\n if data.len() < 3 {\n\n return Err(AmlError::AmlParseError(\"DefIndexField - data truncated\"))\n\n }\n\n\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode_extended!(data, 0x86);\n\n\n\n let (pkg_length, pkg_length_len) = parse_pkg_length(&data[2..])?;\n\n let idx_name = parse_name_string(&data[2 + pkg_length_len .. 2 + pkg_length], ctx)?;\n\n let data_name = parse_name_string(&data[2 + pkg_length_len + idx_name.len .. 2 + pkg_length], ctx)?;\n\n\n\n let flags_raw = data[2 + pkg_length_len + idx_name.len + data_name.len];\n", "file_path": "acpid/src/aml/namedobj.rs", "rank": 57, "score": 149291.1098643834 }, { "content": "fn parse_oem_id(hex: [u8; 12]) -> Option<[u8; 6]> {\n\n Some([\n\n parse_hex_2digit(&hex[0..2])?,\n\n parse_hex_2digit(&hex[2..4])?,\n\n parse_hex_2digit(&hex[4..6])?,\n\n parse_hex_2digit(&hex[6..8])?,\n\n parse_hex_2digit(&hex[8..10])?,\n\n parse_hex_2digit(&hex[10..12])?,\n\n ])\n\n}\n", "file_path": "acpid/src/scheme.rs", "rank": 58, "score": 148125.98036325065 }, { "content": "pub fn bulk_only_mass_storage_reset(\n\n handle: &XhciClientHandle,\n\n if_num: u16,\n\n) -> Result<(), XhciClientHandleError> {\n\n handle.device_request(\n\n PortReqTy::Class,\n\n PortReqRecipient::Interface,\n\n 0xFF,\n\n 0,\n\n if_num,\n\n DeviceReqData::NoData,\n\n )\n\n}\n", "file_path": "usbscsid/src/protocol/bot.rs", "rank": 59, "score": 147117.71547350325 }, { "content": "pub fn parse_aml_with_scope(acpi_ctx: &AcpiContext, sdt: impl AmlContainingTable, scope: String) -> Result<Vec<String>, AmlError> {\n\n let data = sdt.aml();\n\n let mut ctx = AmlExecutionContext::new(acpi_ctx, scope);\n\n\n\n parse_term_list(data, &mut ctx)?;\n\n\n\n Ok(ctx.namespace_delta)\n\n}\n\n\n", "file_path": "acpid/src/aml/mod.rs", "rank": 60, "score": 145494.36002643371 }, { "content": "pub fn start_cq_reactor_thread(\n\n nvme: Arc<Nvme>,\n\n interrupt_sources: InterruptSources,\n\n receiver: Receiver<NotifReq>,\n\n) -> thread::JoinHandle<()> {\n\n // Actually, nothing prevents us from spawning additional threads. the channel is MPMC and\n\n // everything is properly synchronized. I'm not saying this is strictly required, but with\n\n // multiple completion queues it might actually be worth considering. The (in-kernel) IRQ\n\n // subsystem can have some room for improvement regarding lowering the latency, but MSI-X allows\n\n // multiple vectors to point to different CPUs, so that the load can be balanced across the\n\n // logical processors.\n\n thread::spawn(move || {\n\n CqReactor::new(nvme, interrupt_sources, receiver)\n\n .expect(\"nvmed: failed to setup CQ reactor\")\n\n .run()\n\n })\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct CompletionMessage {\n\n cq_entry: NvmeComp,\n\n}\n\n\n", "file_path": "nvmed/src/nvme/cq_reactor.rs", "rank": 61, "score": 145076.89598129905 }, { "content": "pub fn bos_capability_descs<'a>(\n\n desc: BosDescriptor,\n\n data: &'a [u8],\n\n) -> impl Iterator<Item = BosAnyDevDesc> + 'a {\n\n BosAnyDevDescIter::from(&data[..desc.total_len as usize - std::mem::size_of_val(&desc)])\n\n .take(desc.cap_count as usize)\n\n}\n", "file_path": "xhcid/src/usb/bos.rs", "rank": 62, "score": 144884.6360790957 }, { "content": "fn parse_object(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_namespace_modifier,\n\n parse_named_obj\n\n };\n\n\n\n Err(AmlError::AmlInvalidOpCode)\n\n}\n\n\n", "file_path": "acpid/src/aml/termlist.rs", "rank": 63, "score": 143564.96939664174 }, { "content": "fn parse_def_while(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0xA2);\n\n\n\n let (pkg_length, pkg_length_len) = parse_pkg_length(&data[1..])?;\n\n\n\n loop {\n\n let predicate = parse_term_arg(&data[1 + pkg_length_len..], ctx)?;\n\n if predicate.val.get_as_integer(ctx.acpi_context())? == 0 {\n\n break;\n\n }\n\n\n", "file_path": "acpid/src/aml/type1opcode.rs", "rank": 64, "score": 143564.96939664174 }, { "content": "fn parse_def_and(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x7B);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n let target = parse_target(&data[1 + lhs.len + rhs.len..], ctx)?;\n\n\n\n let result = AmlValue::Integer(lhs.val.get_as_integer(ctx.acpi_context())? & rhs.val.get_as_integer(ctx.acpi_context())?);\n\n\n\n let _ = ctx.modify(ctx.acpi_context(), target.val, result.clone());\n\n\n\n Ok(AmlParseType {\n\n val: result,\n\n len: 1 + lhs.len + rhs.len + target.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 65, "score": 143564.96939664174 }, { "content": "fn parse_def_not(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x80);\n\n\n\n let operand = parse_term_arg(&data[1..], ctx)?;\n\n let target = parse_target(&data[1 + operand.len..], ctx)?;\n\n\n\n let result = AmlValue::Integer(!operand.val.get_as_integer(ctx.acpi_context())?);\n\n\n\n let _ = ctx.modify(ctx.acpi_context(), target.val, result.clone());\n\n\n\n Ok(AmlParseType {\n\n val: result,\n\n len: 1 + operand.len + target.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 66, "score": 143564.96939664174 }, { "content": "fn parse_def_nor(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x7E);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n let target = parse_target(&data[1 + lhs.len + rhs.len..], ctx)?;\n\n\n\n let result = AmlValue::Integer(!(lhs.val.get_as_integer(ctx.acpi_context())? | rhs.val.get_as_integer(ctx.acpi_context())?));\n\n\n\n let _ = ctx.modify(ctx.acpi_context(), target.val, result.clone());\n\n\n\n Ok(AmlParseType {\n\n val: result,\n\n len: 1 + lhs.len + rhs.len + target.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 67, "score": 143564.96939664174 }, { "content": "fn parse_def_or(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x7D);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n let target = parse_target(&data[1 + lhs.len + rhs.len..], ctx)?;\n\n\n\n let result = AmlValue::Integer(lhs.val.get_as_integer(ctx.acpi_context())? | rhs.val.get_as_integer(ctx.acpi_context())?);\n\n\n\n let _ = ctx.modify(ctx.acpi_context(), target.val, result.clone());\n\n\n\n Ok(AmlParseType {\n\n val: result,\n\n len: 1 + lhs.len + rhs.len + target.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 68, "score": 143564.96939664174 }, { "content": "fn parse_def_decrement(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x76);\n\n\n\n let obj = parse_super_name(&data[1..], ctx)?;\n\n\n\n let _namespace = ctx.prelock(ctx.acpi_context());\n\n let value = AmlValue::Integer(ctx.get(ctx.acpi_context(), obj.val.clone())?.get_as_integer(ctx.acpi_context())? - 1);\n\n let _ = ctx.modify(ctx.acpi_context(), obj.val, value.clone());\n\n\n\n Ok(AmlParseType {\n\n val: value,\n\n len: 1 + obj.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 69, "score": 141421.9075432215 }, { "content": "fn parse_def_from_bcd(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode_extended!(data, 0x28);\n\n\n\n let operand = parse_term_arg(&data[2..], ctx)?;\n\n let target = parse_target(&data[2 + operand.len..], ctx)?;\n\n\n\n let mut i = operand.val.get_as_integer(ctx.acpi_context())?;\n\n let mut result = 0;\n\n\n\n while i != 0 {\n\n if i & 0x0F > 10 {\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 70, "score": 141421.9075432215 }, { "content": "fn parse_scope_op(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x10);\n\n\n\n let (pkg_length, pkg_length_len) = parse_pkg_length(&data[1..])?;\n\n let name = parse_name_string(&data[1 + pkg_length_len..], ctx)?;\n\n\n\n let local_scope_string = get_namespace_string(ctx.acpi_context(), ctx.scope.clone(), name.val.clone())?;\n\n let containing_scope_string = ctx.scope.clone();\n\n\n\n ctx.scope = local_scope_string;\n\n parse_term_list(&data[1 + pkg_length_len + name.len .. 1 + pkg_length], ctx)?;\n\n ctx.scope = containing_scope_string;\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 1 + pkg_length\n\n })\n\n}\n", "file_path": "acpid/src/aml/namespacemodifier.rs", "rank": 71, "score": 141421.9075432215 }, { "content": "fn parse_def_ref_of(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x71);\n\n\n\n let obj = parse_super_name(&data[1..], ctx)?;\n\n let res = match obj.val {\n\n AmlValue::String(ref s) => {\n\n match ctx.get(ctx.acpi_context(), AmlValue::String(s.clone()))? {\n\n AmlValue::None => return Err(AmlError::AmlValueError),\n\n _ => ObjectReference::Object(s.clone())\n\n }\n\n },\n\n AmlValue::ObjectReference(ref o) => o.clone(),\n\n _ => return Err(AmlError::AmlValueError)\n\n };\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::ObjectReference(res),\n\n len: 1 + obj.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 72, "score": 141421.9075432215 }, { "content": "fn parse_alias_op(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x06);\n\n\n\n let source_name = parse_name_string(&data[1..], ctx)?;\n\n let alias_name = parse_name_string(&data[1 + source_name.len..], ctx)?;\n\n\n\n let local_scope_string = get_namespace_string(ctx.acpi_context(), ctx.scope.clone(), source_name.val)?;\n\n let local_alias_string = get_namespace_string(ctx.acpi_context(), ctx.scope.clone(), alias_name.val)?;\n\n\n\n ctx.add_to_namespace(ctx.acpi_context(), local_scope_string, AmlValue::Alias(local_alias_string))?;\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 1 + source_name.len + alias_name.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/namespacemodifier.rs", "rank": 73, "score": 141421.9075432215 }, { "content": "fn parse_def_to_bcd(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode_extended!(data, 0x29);\n\n\n\n let operand = parse_term_arg(&data[2..], ctx)?;\n\n let target = parse_target(&data[2 + operand.len..], ctx)?;\n\n\n\n let mut i = operand.val.get_as_integer(ctx.acpi_context())?;\n\n let mut result = 0;\n\n\n\n while i != 0 {\n\n result <<= 4;\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 74, "score": 141421.9075432215 }, { "content": "fn parse_def_divide(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x78);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n let target_remainder = parse_target(&data[1 + lhs.len + rhs.len..], ctx)?;\n\n let target_quotient = parse_target(&data[1 + lhs.len + rhs.len + target_remainder.len..], ctx)?;\n\n\n\n let numerator = lhs.val.get_as_integer(ctx.acpi_context())?;\n\n let denominator = rhs.val.get_as_integer(ctx.acpi_context())?;\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 75, "score": 141421.9075432215 }, { "content": "fn parse_def_concat(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x73);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n let target = parse_target(&data[1 + lhs.len + rhs.len..], ctx)?;\n\n\n\n let result = match lhs.val {\n\n AmlValue::Integer(_i) => {\n\n let j = AmlValue::Integer(rhs.val.get_as_integer(ctx.acpi_context())?);\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 76, "score": 141421.9075432215 }, { "content": "fn parse_field_list(data: &[u8],\n\n ctx: &mut AmlExecutionContext,\n\n selector: FieldSelector,\n\n flags: &mut FieldFlags) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n let mut current_offset: usize = 0;\n\n let mut field_offset: usize = 0;\n\n let mut connection = AmlValue::Uninitialized;\n\n\n\n while current_offset < data.len() {\n\n let res = parse_field_element(&data[current_offset..], ctx, selector.clone(), &mut connection, flags, &mut field_offset)?;\n\n\n\n match ctx.state {\n", "file_path": "acpid/src/aml/namedobj.rs", "rank": 77, "score": 141421.9075432215 }, { "content": "fn parse_computational_data(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n match data[0] {\n\n 0x0A => Ok(AmlParseType {\n\n val: AmlValue::Integer(data[1] as u64),\n\n len: 2 as usize\n\n }),\n\n 0x0B => {\n\n let res = (data[1] as u16) +\n\n ((data[2] as u16) << 8);\n\n\n\n Ok(AmlParseType {\n", "file_path": "acpid/src/aml/dataobj.rs", "rank": 78, "score": 141421.9075432215 }, { "content": "fn parse_term_obj(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_selector! {\n\n data, ctx,\n\n parse_namespace_modifier,\n\n parse_named_obj,\n\n parse_type1_opcode,\n\n parse_type2_opcode\n\n };\n\n\n\n Err(AmlError::AmlInvalidOpCode)\n\n}\n", "file_path": "acpid/src/aml/termlist.rs", "rank": 79, "score": 141421.9075432215 }, { "content": "fn parse_def_wait(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode_extended!(data, 0x25);\n\n\n\n let obj = parse_super_name(&data[2..], ctx)?;\n\n let timeout_obj = parse_term_arg(&data[2 + obj.len..], ctx)?;\n\n\n\n let timeout = timeout_obj.val.get_as_integer(ctx.acpi_context())?;\n\n\n\n let (seconds, nanoseconds) = monotonic();\n\n let starting_time_ns = nanoseconds + (seconds * 1_000_000_000);\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 80, "score": 141421.9075432215 }, { "content": "fn parse_def_xor(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x7F);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n let target = parse_target(&data[1 + lhs.len + rhs.len..], ctx)?;\n\n\n\n let result = AmlValue::Integer(lhs.val.get_as_integer(ctx.acpi_context())? ^ rhs.val.get_as_integer(ctx.acpi_context())?);\n\n\n\n let _ = ctx.modify(ctx.acpi_context(), target.val, result.clone());\n\n\n\n Ok(AmlParseType {\n\n val: result,\n\n len: 1 + lhs.len + rhs.len + target.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 81, "score": 141421.9075432215 }, { "content": "fn parse_def_to_buffer(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x96);\n\n\n\n let operand = parse_term_arg(&data[2..], ctx)?;\n\n let target = parse_target(&data[2 + operand.len..], ctx)?;\n\n\n\n let res = AmlValue::Buffer(operand.val.get_as_buffer(ctx.acpi_context())?);\n\n let _ = ctx.modify(ctx.acpi_context(), target.val, res.clone());\n\n\n\n Ok(AmlParseType {\n\n val: res,\n\n len: 1 + operand.len + target.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 82, "score": 141421.9075432215 }, { "content": "fn parse_def_increment(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x75);\n\n\n\n let obj = parse_super_name(&data[1..], ctx)?;\n\n\n\n let _namespace = ctx.prelock(ctx.acpi_context());\n\n let value = AmlValue::Integer(ctx.get(ctx.acpi_context(), obj.val.clone())?.get_as_integer(ctx.acpi_context())? + 1);\n\n let _ = ctx.modify(ctx.acpi_context(), obj.val, value.clone());\n\n\n\n Ok(AmlParseType {\n\n val: value,\n\n len: 1 + obj.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 83, "score": 141421.9075432215 }, { "content": "fn parse_def_land(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x90);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n\n\n let result = if lhs.val.get_as_integer(ctx.acpi_context())? > 0 && rhs.val.get_as_integer(ctx.acpi_context())? > 0 { 1 } else { 0 };\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::IntegerConstant(result),\n\n len: 1 + lhs.len + rhs.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 84, "score": 141421.9075432215 }, { "content": "fn parse_def_to_string(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x9C);\n\n\n\n let operand = parse_term_arg(&data[1..], ctx)?;\n\n let length = parse_term_arg(&data[1 + operand.len..], ctx)?;\n\n let target = parse_target(&data[1 + operand.len + length.len..], ctx)?;\n\n\n\n let buf = operand.val.get_as_buffer(ctx.acpi_context())?;\n\n let mut string = match String::from_utf8(buf) {\n\n Ok(s) => s,\n\n Err(_) => return Err(AmlError::AmlValueError)\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 85, "score": 141421.9075432215 }, { "content": "fn parse_name_op(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x08);\n\n\n\n let name = parse_name_string(&data[1..], ctx)?;\n\n let data_ref_obj = parse_data_ref_obj(&data[1 + name.len..], ctx)?;\n\n\n\n let local_scope_string = get_namespace_string(ctx.acpi_context(), ctx.scope.clone(), name.val)?;\n\n\n\n ctx.add_to_namespace(ctx.acpi_context(), local_scope_string, data_ref_obj.val)?;\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 1 + name.len + data_ref_obj.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/namespacemodifier.rs", "rank": 86, "score": 141421.9075432215 }, { "content": "fn parse_def_size_of(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x87);\n\n\n\n let name = parse_super_name(&data[1..], ctx)?;\n\n let obj = ctx.get(ctx.acpi_context(), name.val)?;\n\n\n\n let res = match obj {\n\n AmlValue::Buffer(ref v) => v.len(),\n\n AmlValue::String(ref s) => s.len(),\n\n AmlValue::Package(ref p) => p.len(),\n\n _ => return Err(AmlError::AmlValueError)\n\n };\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::Integer(res as u64),\n\n len: 1 + name.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 87, "score": 141421.9075432215 }, { "content": "fn parse_def_deref_of(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x83);\n\n\n\n let obj = parse_term_arg(&data[1..], ctx)?;\n\n let res = ctx.get(ctx.acpi_context(), obj.val)?;\n\n\n\n match res {\n\n AmlValue::None => Err(AmlError::AmlValueError),\n\n _ => Ok(AmlParseType {\n\n val: res,\n\n len: 1 + obj.len\n\n })\n\n }\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 88, "score": 141421.9075432215 }, { "content": "fn parse_def_lgreater(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x94);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n\n\n let result = if lhs.val.get_as_integer(ctx.acpi_context())? > rhs.val.get_as_integer(ctx.acpi_context())? { 1 } else { 0 };\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::IntegerConstant(result),\n\n len: 1 + lhs.len + rhs.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 89, "score": 141421.9075432215 }, { "content": "fn parse_def_lnot(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x92);\n\n\n\n let operand = parse_term_arg(&data[1..], ctx)?;\n\n let result = if operand.val.get_as_integer(ctx.acpi_context())? == 0 { 1 } else { 0 };\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::IntegerConstant(result),\n\n len: 1 + operand.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 90, "score": 141421.9075432215 }, { "content": "fn parse_def_add(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x72);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n let target = parse_target(&data[1 + lhs.len + rhs.len..], ctx)?;\n\n\n\n let result = AmlValue::Integer(lhs.val.get_as_integer(ctx.acpi_context())? + rhs.val.get_as_integer(ctx.acpi_context())?);\n\n\n\n let _ = ctx.modify(ctx.acpi_context(), target.val, result.clone());\n\n\n\n Ok(AmlParseType {\n\n val: result,\n\n len: 1 + lhs.len + rhs.len + target.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 91, "score": 141421.9075432215 }, { "content": "fn parse_def_to_integer(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x99);\n\n\n\n let operand = parse_term_arg(&data[2..], ctx)?;\n\n let target = parse_target(&data[2 + operand.len..], ctx)?;\n\n\n\n let res = AmlValue::Integer(operand.val.get_as_integer(ctx.acpi_context())?);\n\n\n\n let _ = ctx.modify(ctx.acpi_context(), target.val, res.clone());\n\n\n\n Ok(AmlParseType {\n\n val: res,\n\n len: 1 + operand.len + target.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 92, "score": 141421.9075432215 }, { "content": "fn parse_def_lequal(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x93);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n\n\n let result = if lhs.val.get_as_integer(ctx.acpi_context())? == rhs.val.get_as_integer(ctx.acpi_context())? { 1 } else { 0 };\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::IntegerConstant(result),\n\n len: 1 + lhs.len + rhs.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 93, "score": 141421.9075432215 }, { "content": "fn parse_field_element(data: &[u8],\n\n ctx: &mut AmlExecutionContext,\n\n selector: FieldSelector,\n\n connection: &mut AmlValue,\n\n flags: &mut FieldFlags,\n\n offset: &mut usize) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n let length = if let Ok(field) = parse_named_field(data, ctx) {\n\n let local_scope_string = get_namespace_string(ctx.acpi_context(), ctx.scope.clone(), AmlValue::String(field.val.name.clone()))?;\n\n\n\n ctx.add_to_namespace(ctx.acpi_context(), local_scope_string, AmlValue::FieldUnit(FieldUnit {\n\n selector: selector.clone(),\n\n connection: Box::new(connection.clone()),\n", "file_path": "acpid/src/aml/namedobj.rs", "rank": 94, "score": 141421.9075432215 }, { "content": "fn parse_def_lor(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x91);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n\n\n let result = if lhs.val.get_as_integer(ctx.acpi_context())? > 0 || rhs.val.get_as_integer(ctx.acpi_context())? > 0 { 1 } else { 0 };\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::IntegerConstant(result),\n\n len: 1 + lhs.len + rhs.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 95, "score": 141421.9075432215 }, { "content": "fn parse_def_store(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x70);\n\n\n\n let operand = parse_term_arg(&data[1..], ctx)?;\n\n let target = parse_super_name(&data[1 + operand.len..], ctx)?;\n\n\n\n let _ = ctx.modify(ctx.acpi_context(), target.val.clone(), operand.val);\n\n\n\n Ok(AmlParseType {\n\n val: target.val,\n\n len: 1 + operand.len + target.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 96, "score": 141421.9075432215 }, { "content": "fn parse_def_subtract(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x74);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n let target = parse_target(&data[1 + lhs.len + rhs.len..], ctx)?;\n\n\n\n let result = AmlValue::Integer(lhs.val.get_as_integer(ctx.acpi_context())? - rhs.val.get_as_integer(ctx.acpi_context())?);\n\n\n\n let _ = ctx.modify(ctx.acpi_context(), target.val, result.clone());\n\n\n\n Ok(AmlParseType {\n\n val: result,\n\n len: 1 + lhs.len + rhs.len + target.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 97, "score": 141421.9075432215 }, { "content": "fn parse_def_acquire(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode_extended!(data, 0x23);\n\n\n\n let obj = parse_super_name(&data[1..], ctx)?;\n\n let timeout = (data[2 + obj.len] as u16) + ((data[3 + obj.len] as u16) << 8);\n\n\n\n let (seconds, nanoseconds) = monotonic();\n\n let starting_time_ns = nanoseconds + (seconds * 1_000_000_000);\n\n\n\n loop {\n\n match ctx.acquire_mutex(ctx.acpi_context(), obj.val.clone()) {\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 98, "score": 141421.9075432215 }, { "content": "fn parse_def_lless(data: &[u8],\n\n ctx: &mut AmlExecutionContext) -> ParseResult {\n\n match ctx.state {\n\n ExecutionState::EXECUTING => (),\n\n _ => return Ok(AmlParseType {\n\n val: AmlValue::None,\n\n len: 0\n\n })\n\n }\n\n\n\n parser_opcode!(data, 0x95);\n\n\n\n let lhs = parse_term_arg(&data[1..], ctx)?;\n\n let rhs = parse_term_arg(&data[1 + lhs.len..], ctx)?;\n\n\n\n let result = if lhs.val.get_as_integer(ctx.acpi_context())? < rhs.val.get_as_integer(ctx.acpi_context())? { 1 } else { 0 };\n\n\n\n Ok(AmlParseType {\n\n val: AmlValue::IntegerConstant(result),\n\n len: 1 + lhs.len + rhs.len\n\n })\n\n}\n\n\n", "file_path": "acpid/src/aml/type2opcode.rs", "rank": 99, "score": 141421.9075432215 } ]
Rust
src/app_service.rs
Zignar-Technologies/did-api
18efc8fab44c055d2eddfd4567cf830d011e94d6
use std::str::FromStr; use actix_web::{post, web, HttpResponse}; use identity::{ core::{FromJson, ToJson}, credential::{Credential, Presentation}, iota::{ClientMap, IotaDID}, prelude::IotaDocument, }; use serde_json::{json, Value}; use crate::utils_did::validator::CredentialValidation; use crate::utils_did::validator::PresentationValidation; use crate::{ jsons_did::{ create_did_vm::CreateDidVm, create_vc::CreateVc, create_vp::CreateVp, holder_credential::HolderCredential, holder_presentation::HolderPresentation, remove_vm::RemoveVm, }, utils_did::{common, user::User}, }; #[post("/create_did")] async fn createDidApi(data_json: web::Json<CreateDidVm>) -> HttpResponse { let data = data_json.into_inner(); let account = User::new(data.nick_name.to_string(), data.password.to_string()) .expect("Error Account"); let identity = account.create_identity().unwrap(); let iota_did: &IotaDID = identity.try_did().unwrap(); let explorer = iota_did .network() .unwrap() .explorer_url() .unwrap() .to_string(); println!( "[Example] Explore the DID Document = {}/{}", iota_did .network() .unwrap() .explorer_url() .unwrap() .to_string(), iota_did.to_string() ); let explorer = format!("{}/{}", explorer, iota_did.to_string()); HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": false, "Status": "Did Created", "did": iota_did, "Explorer": explorer, })) } #[post("/add_verif_method")] async fn addVerifMethodApi(data_json: web::Json<CreateDidVm>) -> HttpResponse { let data = data_json.into_inner(); let iota_did = IotaDID::from_str(&data.did.unwrap()).unwrap(); let mut account = User::new( data.nick_name.to_string(), data.password.to_string(), ) .expect("Error Account"); let vm_randon = account.create_method(&iota_did).unwrap(); let explorer = iota_did .network() .unwrap() .explorer_url() .unwrap() .to_string(); println!( "[Example] Explore the DID Document = {}/{}", iota_did .network() .unwrap() .explorer_url() .unwrap() .to_string(), iota_did.to_string() ); let explorer = format!("{}/{}", explorer, iota_did.to_string()); HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": false, "Status": "Method Created", "Explorer": explorer, "vm_name": vm_randon })) } #[post("/create_vc")] async fn createVcApi(data_json: web::Json<CreateVc>) -> HttpResponse { let data = data_json.into_inner(); let data_issuer = data.issuer; let iota_did = IotaDID::from_str(&data_issuer.did.unwrap()).unwrap(); let account = User::new( data_issuer.nick_name.to_string(), data_issuer.password.to_string() ) .expect("Error Account"); let data_holder = data.holder; let mut credential: Credential = common::issue_degree(&iota_did, &data_holder).unwrap(); account.sign_c(&iota_did, &data_issuer.vm_name.unwrap(), &mut credential); let resolved: IotaDocument = account.resolve_identity(&iota_did).unwrap(); let verified: bool = resolved.verify_data(&credential).is_ok(); if verified == false { return HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": true, "Verified": verified, })); } let credential_str = credential.clone().to_json().unwrap(); let credential_json: Value = serde_json::from_str(&credential_str).unwrap(); HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": false, "Status": "Credential Created", "Verified": verified, "Credential": credential_json })) } #[post("/verify_validity_credential")] pub async fn verifyValidityApiCred(data_json: web::Json<HolderCredential>) -> HttpResponse { let client: ClientMap = ClientMap::new(); let validation: CredentialValidation = common::check_credential(&client, data_json).unwrap(); if validation.verified == false { return HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": true, "validation": validation.verified, })); } HttpResponse::Ok() .content_type("application/json") .json(json!({ "error": false, "validation": validation.verified, })) } #[post("/create_vp")] pub async fn createVpApi(data_json: web::Json<CreateVp>) -> HttpResponse { let data = data_json.into_inner(); let data_holder = data.holder; let iota_did = IotaDID::from_str(&data_holder.did.unwrap()).unwrap(); let account = User::new( data_holder.nick_name.to_string(), data_holder.password.to_string(), ) .expect("Error Account"); let data_holder_c = serde_json::to_string(&data.holder_credential).unwrap(); let credential: Credential = Credential::from_json(&data_holder_c).unwrap(); let mut presentation: Presentation = common::holder_presentation(&iota_did, credential).unwrap(); account.sign_p(&iota_did, &data_holder.vm_name.unwrap(), &mut presentation); let resolved: IotaDocument = account.resolve_identity(&iota_did).unwrap(); let verified: bool = resolved.verify_data(&presentation).is_ok(); if verified == false { return HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": true, "Verified": verified, })); } let presentation_str = presentation.clone().to_json().unwrap(); let presentation_json: Value = serde_json::from_str(&presentation_str).unwrap(); HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": false, "Status": "Presentation Created", "verified": verified, "Presentation": presentation_json })) } #[post("/verify_validity_presentation")] pub async fn verifyValidityApiPres(data_json: web::Json<HolderPresentation>) -> HttpResponse { let client: ClientMap = ClientMap::new(); let validation: PresentationValidation = common::check_presentation(&client, data_json).unwrap(); if validation.verified == false { return HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": true, "validation": validation.verified, })); } HttpResponse::Ok() .content_type("application/json") .json(json!({ "error": false, "validation": validation.verified, })) } #[post("/remove_vm")] async fn removeVmApi(data_json: web::Json<RemoveVm>) -> HttpResponse { let data = data_json.into_inner(); let data_issuer = data.issuer; let iota_did = IotaDID::from_str(&data_issuer.did.unwrap()).unwrap(); let mut account = User::new( data_issuer.nick_name.to_string(), data_issuer.password.to_string(), ) .expect("Error Account"); account.delete_method(&iota_did, data_issuer.vm_name.unwrap()); HttpResponse::Ok() .content_type("application/json") .json(json!({ "error": false, "Status": "Removed VM" })) }
use std::str::FromStr; use actix_web::{post, web, HttpResponse}; use identity::{ core::{FromJson, ToJson}, credential::{Credential, Presentation}, iota::{ClientMap, IotaDID}, prelude::IotaDocument, }; use serde_json::{json, Value}; use crate::utils_did::validator::CredentialValidation; use crate::utils_did::validator::PresentationValidation; use crate::{ jsons_did::{ create_did_vm::CreateDidVm, create_vc::CreateVc, create_vp::CreateVp, holder_credential::HolderCredential, holder_presentation::HolderPresentation, remove_vm::RemoveVm, }, utils_did::{common, user::User}, }; #[post("/create_did")] async fn createDidApi(data_json: web::Json<CreateDidVm>) -> HttpResponse { let data = data_json.into_inner(); let account = User::new(data.nick_name.to_string(), data.password.to_string()) .expect("Error Account"); let identity = account.create_identity().unwrap(); let iota_did: &IotaDID = identity.try_did().unwrap(); let explorer = iota_did .network() .unwrap() .explorer_url() .unwrap() .to_string(); println!( "[Example] Explore the DID Document = {}/{}", iota_did .network() .unwrap() .explorer_url() .unwrap() .to_string(), iota_did.to_string() ); let explorer = format!("{}/{}", explorer, iota_did.to_string()); HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": false, "Status": "Did Created", "did": iota_did, "Explorer": explorer, })) } #[post("/add_verif_method")] async fn addVerifMethodApi(data_json: web::Json<CreateDidVm>) -> HttpResponse { let data = data_json.into_inner(); let iota_did = IotaDID::from_str(&data.did.unwrap()).unwrap(); let mut account = User::new( data.nick_name.to_string(), data.password.to_string(), ) .expect("Error Account"); let vm_randon = account.create_method(&iota_did).unwrap(); let explorer = iota_did .network() .unwrap() .explorer_url() .unwrap() .to_string(); println!( "[Example] Explore the DID Document = {}/{}", iota_did .network() .unwrap() .explorer_url() .unwrap() .to_string(), iota_did.to_string() ); let explorer = format!("{}/{}", explorer, iota_did.to_string()); HttpResponse::Ok() .content_type("application/json") .json(json!({ "Err
#[post("/create_vc")] async fn createVcApi(data_json: web::Json<CreateVc>) -> HttpResponse { let data = data_json.into_inner(); let data_issuer = data.issuer; let iota_did = IotaDID::from_str(&data_issuer.did.unwrap()).unwrap(); let account = User::new( data_issuer.nick_name.to_string(), data_issuer.password.to_string() ) .expect("Error Account"); let data_holder = data.holder; let mut credential: Credential = common::issue_degree(&iota_did, &data_holder).unwrap(); account.sign_c(&iota_did, &data_issuer.vm_name.unwrap(), &mut credential); let resolved: IotaDocument = account.resolve_identity(&iota_did).unwrap(); let verified: bool = resolved.verify_data(&credential).is_ok(); if verified == false { return HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": true, "Verified": verified, })); } let credential_str = credential.clone().to_json().unwrap(); let credential_json: Value = serde_json::from_str(&credential_str).unwrap(); HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": false, "Status": "Credential Created", "Verified": verified, "Credential": credential_json })) } #[post("/verify_validity_credential")] pub async fn verifyValidityApiCred(data_json: web::Json<HolderCredential>) -> HttpResponse { let client: ClientMap = ClientMap::new(); let validation: CredentialValidation = common::check_credential(&client, data_json).unwrap(); if validation.verified == false { return HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": true, "validation": validation.verified, })); } HttpResponse::Ok() .content_type("application/json") .json(json!({ "error": false, "validation": validation.verified, })) } #[post("/create_vp")] pub async fn createVpApi(data_json: web::Json<CreateVp>) -> HttpResponse { let data = data_json.into_inner(); let data_holder = data.holder; let iota_did = IotaDID::from_str(&data_holder.did.unwrap()).unwrap(); let account = User::new( data_holder.nick_name.to_string(), data_holder.password.to_string(), ) .expect("Error Account"); let data_holder_c = serde_json::to_string(&data.holder_credential).unwrap(); let credential: Credential = Credential::from_json(&data_holder_c).unwrap(); let mut presentation: Presentation = common::holder_presentation(&iota_did, credential).unwrap(); account.sign_p(&iota_did, &data_holder.vm_name.unwrap(), &mut presentation); let resolved: IotaDocument = account.resolve_identity(&iota_did).unwrap(); let verified: bool = resolved.verify_data(&presentation).is_ok(); if verified == false { return HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": true, "Verified": verified, })); } let presentation_str = presentation.clone().to_json().unwrap(); let presentation_json: Value = serde_json::from_str(&presentation_str).unwrap(); HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": false, "Status": "Presentation Created", "verified": verified, "Presentation": presentation_json })) } #[post("/verify_validity_presentation")] pub async fn verifyValidityApiPres(data_json: web::Json<HolderPresentation>) -> HttpResponse { let client: ClientMap = ClientMap::new(); let validation: PresentationValidation = common::check_presentation(&client, data_json).unwrap(); if validation.verified == false { return HttpResponse::Ok() .content_type("application/json") .json(json!({ "Error": true, "validation": validation.verified, })); } HttpResponse::Ok() .content_type("application/json") .json(json!({ "error": false, "validation": validation.verified, })) } #[post("/remove_vm")] async fn removeVmApi(data_json: web::Json<RemoveVm>) -> HttpResponse { let data = data_json.into_inner(); let data_issuer = data.issuer; let iota_did = IotaDID::from_str(&data_issuer.did.unwrap()).unwrap(); let mut account = User::new( data_issuer.nick_name.to_string(), data_issuer.password.to_string(), ) .expect("Error Account"); account.delete_method(&iota_did, data_issuer.vm_name.unwrap()); HttpResponse::Ok() .content_type("application/json") .json(json!({ "error": false, "Status": "Removed VM" })) }
or": false, "Status": "Method Created", "Explorer": explorer, "vm_name": vm_randon })) }
function_block-function_prefixed
[ { "content": "pub fn holder_presentation(holder: &IotaDID, credential: Credential) -> Result<Presentation> {\n\n // Create VC \"subject\" field containing subject ID and claims about it.\n\n\n\n // Create an unsigned Presentation from the previously issued Verifiable Credential.\n\n let presentation: Presentation = PresentationBuilder::default()\n\n // .id(Url::parse(\"asdf:foo:a87w3guasbdfuasbdfs\").unwrap())\n\n .holder(Url::parse(holder.as_str()).unwrap())\n\n .credential(credential)\n\n .build()\n\n .unwrap();\n\n\n\n Ok(presentation)\n\n}\n\n\n\n/// Convenience function for checking that a verifiable credential is valid and not revoked.\n\n#[tokio::main]\n\npub async fn check_credential(\n\n client: &ClientMap,\n\n credential: web::Json<HolderCredential>,\n\n) -> Result<CredentialValidation> {\n", "file_path": "src/utils_did/common.rs", "rank": 0, "score": 42252.54164933992 }, { "content": "/// Helper that takes two DID Documents (identities) for issuer and subject, and\n\n/// creates an unsigned credential with claims about subject by issuer.\n\npub fn issue_degree(issuer: &IotaDID, data_holder: &HolderData) -> Result<Credential> {\n\n // Create VC \"subject\" field containing subject ID and claims about it.\n\n\n\n let subject: Subject = Subject::from_json_value(json!(data_holder.to_owned())).unwrap();\n\n\n\n // Build credential using subject above and issuer.\n\n let credential: Credential = Credential::builder(Default::default())\n\n .id(Url::parse(\"https://example.edu/credentials/3732\").unwrap())\n\n .issuer(Url::parse(issuer.as_str()).unwrap())\n\n .type_(\"UniversityDegreeCredential\")\n\n .subject(subject)\n\n .build()\n\n .unwrap();\n\n\n\n Ok(credential)\n\n}\n\n\n", "file_path": "src/utils_did/common.rs", "rank": 1, "score": 40161.04248058627 }, { "content": "use serde::Serialize;\n\nuse serde_derive::Deserialize;\n\n\n\nuse super::{holder_credential::HolderCredential};\n\n\n\n#[derive(Serialize, Deserialize,Debug)]\n\npub struct HolderPresentation { \n\n #[serde(rename = \"@context\")]\n\n pub context: String,\n\n pub verifiableCredential: HolderCredential,\n\n pub holder: String,\n\n pub proof: HolderProof,\n\n pub r#type: String\n\n}\n\n\n\n#[derive(Serialize, Deserialize,Debug)]\n\npub struct HolderProof { \n\n signatureValue: String,\n\n r#type: String,\n\n verificationMethod: String\n\n}", "file_path": "src/jsons_did/holder_presentation.rs", "rank": 2, "score": 20386.81148793974 }, { "content": "use serde_derive::Deserialize;\n\nuse serde_derive::Serialize;\n\n\n\n\n\n#[derive(Serialize, Deserialize,Debug)]\n\npub struct HolderData { \n\n pub id: String,\n\n pub name: String,\n\n pub degreeName: String,\n\n pub degreeType: String,\n\n pub GPA: String,\n\n}", "file_path": "src/jsons_did/holder_data.rs", "rank": 3, "score": 20385.739108252703 }, { "content": "\n\nuse serde::Serialize;\n\nuse serde_derive::Deserialize;\n\n\n\nuse super::{holder_data::HolderData, create_did_vm::CreateDidVm};\n\n\n\n#[derive(Serialize, Deserialize,Debug)]\n\npub struct CreateVc { \n\n pub issuer: CreateDidVm,\n\n pub holder: HolderData,\n\n}\n\n ", "file_path": "src/jsons_did/create_vc.rs", "rank": 4, "score": 20281.35364651219 }, { "content": "\n\nuse serde::Serialize;\n\nuse serde_derive::Deserialize;\n\n\n\nuse super::{create_did_vm::CreateDidVm, holder_credential::HolderCredential};\n\n\n\n#[derive(Serialize, Deserialize,Debug)]\n\npub struct CreateVp { \n\n pub holder: CreateDidVm,\n\n pub holder_credential: HolderCredential,\n\n}\n\n ", "file_path": "src/jsons_did/create_vp.rs", "rank": 5, "score": 20278.780642812388 }, { "content": "use serde::Serialize;\n\nuse serde_derive::Deserialize;\n\n\n\n#[derive(Serialize, Deserialize,Debug)]\n\npub struct CreateDidVm { \n\n pub nick_name: String,\n\n pub password: String,\n\n pub did: Option<String>,\n\n pub vm_name: Option<String>,\n\n}", "file_path": "src/jsons_did/create_did_vm.rs", "rank": 6, "score": 20277.64839789505 }, { "content": "pub fn randomVM() -> String {\n\n const CHARSET: &[u8] = b\"ABCDEFGHIJKLMNOPQRSTUVWXYZ9\";\n\n let SEED_LEN: usize = 8;\n\n let mut rng = rand::thread_rng();\n\n let vm: String = (0..SEED_LEN)\n\n .map(|_| {\n\n let idx = rng.gen_range(0..CHARSET.len());\n\n CHARSET[idx] as char\n\n })\n\n .collect();\n\n vm\n\n}\n", "file_path": "src/utils_did/rnd_vm_name.rs", "rank": 7, "score": 19945.772286785035 }, { "content": "use crate::jsons_did::holder_credential::HolderCredential;\n\nuse crate::jsons_did::holder_data::HolderData;\n\nuse crate::jsons_did::holder_presentation::HolderPresentation;\n\nuse actix_web::web;\n\nuse identity::credential::{Presentation, PresentationBuilder};\n\n// use identity::iota::{CredentialValidator, PresentationValidation};\n\nuse crate::utils_did::validator::CredentialValidator;\n\nuse crate::utils_did::validator::PresentationValidation;\n\nuse crate::utils_did::validator::CredentialValidation;\n\nuse identity::prelude::*;\n\nuse identity::{\n\n core::{FromJson, Url},\n\n credential::{Credential, Subject},\n\n did::DID,\n\n iota::{ClientMap, IotaDID},\n\n};\n\nuse serde_json::json;\n\n/// Helper that takes two DID Documents (identities) for issuer and subject, and\n\n/// creates an unsigned credential with claims about subject by issuer.\n", "file_path": "src/utils_did/common.rs", "rank": 11, "score": 15.632971286682668 }, { "content": "use std::path::PathBuf;\n\n\n\nuse crate::utils_did::rnd_vm_name::randomVM;\n\nuse identity::account::{IdentityCreate, IdentityState};\n\nuse identity::prelude::*;\n\nuse identity::{\n\n account::{Account, AccountStorage},\n\n credential::{Credential, Presentation},\n\n iota::IotaDID,\n\n prelude::IotaDocument,\n\n};\n\n\n\n#[derive(Debug)]\n\npub struct User {\n\n account: Account,\n\n}\n\n\n\nimpl User {\n\n #[tokio::main]\n\n pub async fn new(\n", "file_path": "src/utils_did/user.rs", "rank": 12, "score": 15.277210950190327 }, { "content": " pub async fn create_identity(&self) -> Result<IdentityState> {\n\n Ok(self\n\n .account\n\n .create_identity(IdentityCreate::default())\n\n .await\n\n .unwrap())\n\n }\n\n\n\n #[tokio::main]\n\n pub async fn create_method(&mut self, iota_did: &IotaDID) -> Result<String> {\n\n let vm_name: String = randomVM();\n\n self.account\n\n .update_identity(&iota_did)\n\n .create_method()\n\n .fragment(&vm_name)\n\n .apply()\n\n .await\n\n .unwrap();\n\n Ok(vm_name)\n\n }\n", "file_path": "src/utils_did/user.rs", "rank": 16, "score": 12.880707193227973 }, { "content": " .sign(&key, vm_name, presentation)\n\n .await\n\n .unwrap()\n\n }\n\n\n\n #[tokio::main]\n\n pub async fn resolve_identity(&self, iota_did: &IotaDID) -> Result<IotaDocument> {\n\n Ok(self.account.resolve_identity(&iota_did).await.unwrap())\n\n }\n\n}\n", "file_path": "src/utils_did/user.rs", "rank": 19, "score": 12.129971192070347 }, { "content": "\n\n #[tokio::main]\n\n pub async fn delete_method(&mut self, iota_did: &IotaDID, vm_name: String) -> () {\n\n self.account\n\n .update_identity(&iota_did)\n\n .delete_method()\n\n .fragment(&vm_name)\n\n .apply()\n\n .await\n\n .unwrap();\n\n }\n\n\n\n #[tokio::main]\n\n pub async fn sign_c(&self, key: &IotaDID, vm_name: &str, credential: &mut Credential) -> () {\n\n self.account.sign(&key, vm_name, credential).await.unwrap()\n\n }\n\n\n\n #[tokio::main]\n\n pub async fn sign_p(&self, key: &IotaDID, vm_name: &str, presentation: &mut Presentation) -> () {\n\n self.account\n", "file_path": "src/utils_did/user.rs", "rank": 21, "score": 11.830687269764152 }, { "content": " // let presentation: Presentation = Presentation::from_json(&data_holder_c).unwrap();\n\n\n\n // Create a `CredentialValidator` instance to fetch and validate all\n\n // associated DID Documents from the Tangle.\n\n let validator: CredentialValidator<ClientMap> = CredentialValidator::new(&client);\n\n\n\n // Perform the validation operation.\n\n let validation: PresentationValidation = validator\n\n .check_presentation(&presentation_json)\n\n .await\n\n .unwrap();\n\n // println!(\"validation = {:#?}\", validation);\n\n Ok(validation)\n\n}\n", "file_path": "src/utils_did/common.rs", "rank": 22, "score": 10.666187672415324 }, { "content": "#![allow(non_snake_case)]\n\nuse actix_cors::Cors;\n\nuse actix_web::{http::header, App, HttpServer};\n\nmod app_service;\n\nmod jsons_did;\n\nmod utils_did;\n\nuse crate::app_service::createDidApi;\n\nuse crate::app_service::createVcApi;\n\nuse crate::app_service::createVpApi;\n\nuse crate::app_service::removeVmApi;\n\nuse crate::app_service::verifyValidityApiCred;\n\nuse crate::app_service::verifyValidityApiPres;\n\nuse crate::app_service::addVerifMethodApi;\n\n#[actix_web::main]\n\nasync fn main() -> std::io::Result<()> {\n\n // init env vars\n\n dotenv::from_path(\"./development.env\").ok();\n\n\n\n // building address and ip\n\n let port = std::env::var(\"PORT_API\").unwrap_or(\"8080\".to_string());\n", "file_path": "src/main.rs", "rank": 24, "score": 10.581002980348803 }, { "content": "use std::collections::BTreeMap;\n\nuse serde::de::DeserializeOwned;\n\nuse serde::Serialize;\n\n\n\nuse identity::{\n\n core::{FromJson, Object},\n\n credential::{Credential, Presentation},\n\n iota::{Client, IotaDID, TangleResolve},\n\n prelude::IotaDocument,\n\n};\n\nuse identity::iota::Error;\n\nuse identity::iota::Result;\n\n\n\n// Copyright 2020-2021 IOTA Stiftung\n\n// SPDX-License-Identifier: Apache-2.0\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\n\npub struct CredentialValidation<T = Object> {\n\n pub credential: Credential<T>,\n\n pub issuer: DocumentValidation,\n", "file_path": "src/utils_did/validator.rs", "rank": 27, "score": 9.13878055605749 }, { "content": " // Convert the Verifiable Credential to JSON to potentially \"exchange\" with a verifier\n\n let credential_json = serde_json::to_string(&credential.into_inner()).unwrap();\n\n\n\n // Create a `CredentialValidator` instance to fetch and validate all\n\n // associated DID Documents from the Tangle.\n\n let validator: CredentialValidator<ClientMap> = CredentialValidator::new(client);\n\n\n\n // Perform the validation operation.\n\n let validation: CredentialValidation = validator.check(&credential_json).await.unwrap();\n\n Ok(validation)\n\n}\n\n\n\n#[tokio::main]\n\npub async fn check_presentation(\n\n client: &ClientMap,\n\n presentation: web::Json<HolderPresentation>,\n\n) -> Result<PresentationValidation> {\n\n // Convert the Verifiable Credential to JSON to potentially \"exchange\" with a verifier\n\n let presentation_json = serde_json::to_string(&presentation.into_inner()).unwrap();\n\n\n", "file_path": "src/utils_did/common.rs", "rank": 28, "score": 8.666691165242902 }, { "content": "### Steps\n\nIn this process, you will complete the different steps from the perspective of one of the mentioned roles above:\n\n\n\n1. **Holder:** Create a DID (Decentralized Identifier) document for Alice.\n\n - **POST** => http://localhost:6000/create_did\n\n ```json\n\n {\n\n \"nick_name\": \"Alice\",\n\n \"password\": \"my_password\"\n\n }\n\n ```\n\n **Response**\n\n ```json\n\n {\n\n \"Error\": false,\n\n \"Explorer\": \"https://explorer.iota.org/mainnet/identity-resolver/did:iota:6jAqUhoszQ79fWfx87Ga7o4TxkLcK6uBTm7LE2k26dAR\",\n\n \"Status\": \"Did Created\",\n\n \"did\": \"did:iota:6jAqUhoszQ79fWfx87Ga7o4TxkLcK6uBTm7LE2k26dAR\"\n\n }\n\n ```\n\n\n\n2. **Issuer:** Create a DID document for the University of Oslo.\n\n - **POST** => http://localhost:6000/create_did\n\n ```json\n\n {\n\n \"nick_name\": \"University of Oslo\",\n\n \"password\": \"my_password\"\n\n }\n\n ```\n\n \n\n **Response**\n\n ```json\n\n {\n\n \"Error\": false,\n\n \"Explorer\": \"https://explorer.iota.org/mainnet/identity-resolver/did:iota:4wN7q5NZKwCnkFtcv6VM42EQR9vC3DqxsLXB6pkUR1wV\",\n\n \"Status\": \"Did Created\",\n\n \"did\": \"did:iota:4wN7q5NZKwCnkFtcv6VM42EQR9vC3DqxsLXB6pkUR1wV\"\n\n }\n\n ```\n\n\n\n3. **Issuer:** Add a verification method \"degreeVerifications\" to the University's DID document with the purpose to verify Alice's degree. Since it's expected, that the University will have to sign more than just Alice's degree.\n\n - **POST** => http://localhost:6000/add_verif_method\n\n ```json\n\n {\n\n \"nick_name\": \"University of Oslo\",\n\n \"password\": \"my_password\",\n\n \"did\": \"did:iota:4wN7q5NZKwCnkFtcv6VM42EQR9vC3DqxsLXB6pkUR1wV\"\n\n }\n\n ```\n\n **Response**\n\n ```json\n\n {\n\n \"Error\": false,\n\n \"Explorer\": \"https://explorer.iota.org/mainnet/identity-resolver/did:iota:4wN7q5NZKwCnkFtcv6VM42EQR9vC3DqxsLXB6pkUR1wV\",\n\n \"Status\": \"Method Created\",\n\n \"vm_name\": \"XMCOV9LC\"\n\n }\n", "file_path": "README.md", "rank": 30, "score": 7.681276214779757 }, { "content": " &self,\n\n presentation: Presentation<T, U>,\n\n ) -> Result<PresentationValidation<T, U>>\n\n where\n\n T: Clone + Serialize,\n\n U: Clone + Serialize,\n\n {\n\n let holder_url: &str = presentation\n\n .holder\n\n .as_ref()\n\n .map(|holder| holder.as_str())\n\n .ok_or(Error::InvalidPresentationHolder)?;\n\n\n\n // Resolve the holder DID Document and validate the digital signature.\n\n let holder_doc: DocumentValidation = self.validate_document(holder_url).await?;\n\n\n\n let mut credentials: Vec<CredentialValidation<U>> = Vec::new();\n\n\n\n // Resolve and validate all associated credentials.\n\n for credential in presentation.verifiable_credential.iter() {\n", "file_path": "src/utils_did/validator.rs", "rank": 31, "score": 7.312675963589413 }, { "content": " /// Deserializes the given JSON-encoded `Presentation` and\n\n /// validates all associated DID documents/`Credential`s.\n\n pub async fn check_presentation<T, U>(&self, data: &str) -> Result<PresentationValidation<T, U>>\n\n where\n\n T: Clone + DeserializeOwned + Serialize,\n\n U: Clone + DeserializeOwned + Serialize,\n\n {\n\n self.validate_presentation(Presentation::from_json(data)?)\n\n .await\n\n }\n\n\n\n /// Validates the `Credential` proof and all relevant DID documents.\n\n ///\n\n /// Note: The credential is expected to have a proof created by the issuing party.\n\n /// Note: The credential issuer URL is expected to be a valid DID.\n\n /// Note: Credential subject IDs are expected to be valid DIDs (if present).\n\n pub async fn validate_credential<T>(\n\n &self,\n\n credential: Credential<T>,\n\n ) -> Result<CredentialValidation<T>>\n", "file_path": "src/utils_did/validator.rs", "rank": 32, "score": 6.788413316985985 }, { "content": " credentials.push(self.validate_credential(credential.clone()).await?);\n\n }\n\n\n\n // Verify the presentation signature using the holders DID Document\n\n let presentation_verified: bool = holder_doc.document.verify_data(&presentation).is_ok();\n\n\n\n // Check if all credentials are verified\n\n let credentials_verified: bool = credentials.iter().all(|credential| credential.verified);\n\n\n\n // The presentation is truly verified if all associated documents are verified\n\n let verified: bool = holder_doc.verified && presentation_verified && credentials_verified;\n\n\n\n Ok(PresentationValidation {\n\n presentation,\n\n holder: holder_doc,\n\n credentials,\n\n verified,\n\n })\n\n }\n\n\n", "file_path": "src/utils_did/validator.rs", "rank": 33, "score": 6.563706244302139 }, { "content": "\n\n // Check if all subjects have valid signatures\n\n let subjects_verified: bool = subjects.values().all(|subject| subject.verified);\n\n\n\n // The credential is truly verified if all associated documents are verified\n\n let verified: bool = issuer_doc.verified && credential_verified && subjects_verified;\n\n\n\n Ok(CredentialValidation {\n\n credential,\n\n issuer: issuer_doc,\n\n subjects,\n\n verified,\n\n })\n\n }\n\n\n\n /// Validates the `Presentation` proof and all relevant DID documents.\n\n ///\n\n /// Note: The presentation holder is expected to be a valid DID.\n\n /// Note: The presentation is expected to have a proof created by the holder.\n\n pub async fn validate_presentation<T, U>(\n", "file_path": "src/utils_did/validator.rs", "rank": 34, "score": 6.198116259358177 }, { "content": " where\n\n T: Serialize,\n\n {\n\n // Resolve the issuer DID Document and validate the digital signature.\n\n let issuer_url: &str = credential.issuer.url().as_str();\n\n let issuer_doc: DocumentValidation = self.validate_document(issuer_url).await?;\n\n\n\n let mut subjects: BTreeMap<String, DocumentValidation> = BTreeMap::new();\n\n\n\n // Resolve all credential subjects with `id`s - we assume all ids are DIDs.\n\n for id in credential\n\n .credential_subject\n\n .iter()\n\n .filter_map(|subject| subject.id.as_ref())\n\n {\n\n subjects.insert(id.to_string(), self.validate_document(id.as_str()).await?);\n\n }\n\n\n\n // Verify the credential signature using the issuers DID Document\n\n let credential_verified: bool = issuer_doc.document.verify_data(&credential).is_ok();\n", "file_path": "src/utils_did/validator.rs", "rank": 35, "score": 5.829820611443333 }, { "content": " )\n\n .service(createDidApi)\n\n .service(createVcApi)\n\n .service(createVpApi)\n\n .service(verifyValidityApiCred)\n\n .service(verifyValidityApiPres)\n\n .service(removeVmApi)\n\n .service(addVerifMethodApi)\n\n })\n\n .bind(&address)\n\n .unwrap_or_else(|err| {\n\n panic!(\n\n \"🔥🔥🔥 Couldn't start the server in port {}: {:?}\",\n\n port, err\n\n )\n\n })\n\n .run()\n\n .await?;\n\n Ok(server)\n\n}\n", "file_path": "src/main.rs", "rank": 36, "score": 5.810013758840897 }, { "content": "pub mod create_did_vm;\n\npub mod create_vc;\n\npub mod holder_data;\n\npub mod holder_credential;\n\npub mod create_vp;\n\npub mod holder_presentation;\n\npub mod remove_vm;", "file_path": "src/jsons_did/mod.rs", "rank": 37, "score": 5.798595487308849 }, { "content": "use serde::Serialize;\n\nuse serde_derive::Deserialize;\n\n\n\nuse super::{holder_data::HolderData};\n\n\n\n#[derive(Serialize, Deserialize,Debug)]\n\npub struct HolderCredential { \n\n #[serde(rename = \"@context\")]\n\n pub context: String,\n\n pub id: String,\n\n pub credentialSubject: HolderData,\n\n pub issuanceDate: String,\n\n pub issuer: String,\n\n pub proof: HolderProof,\n\n pub r#type: Vec<String>\n\n}\n\n\n\n#[derive(Serialize, Deserialize,Debug)]\n\npub struct HolderProof { \n\n signatureValue: String,\n\n r#type: String,\n\n verificationMethod: String\n\n}", "file_path": "src/jsons_did/holder_credential.rs", "rank": 38, "score": 5.736135881789075 }, { "content": "## IOTA Identity API Tutorial\n\n\n\nYoutube: [IOTA - Decentralized Identities API - Explained](https://www.youtube.com/watch?v=mY0If3JZmhc&t)\n\n\n\n### Problem Description\n\nIn this tutorial you will utilize the [Rust Account of the IOTA Identity framework](https://github.com/iotaledger/identity.rs/tree/dev/examples/account) to solve the problem described below. [identity.rs](https://github.com/iotaledger/identity.rs/blob/dev/README.md):\n\n> Alice recently graduated from the University of Oslo with a Bachelor of Computer Science. Now she wants to apply for a remote job at the IOTA Foundation and needs to digitally prove the existence and validity of her degree. What she needs is an immutable and verifiable credential, which has been approved by both the University of Oslo and herself, before presenting it to her possible new employer.\n\n\n\n### Roles\n\nAs described [here](https://www.iota.org/solutions/digital-identity), IOTA Identity builds on the W3C's proposed standards for a digital identity framework and thus is based on three roles:\n\n- Holder (Alice)\n\n- Issuer (University of Oslo)\n\n- Verifier (IOTA Foundation)\n\n\n\n### Flow-Chart\n\n![banner](./identity_tutorial_chart.png)\n\n\n\n\n", "file_path": "README.md", "rank": 39, "score": 5.735479694191005 }, { "content": " ```\n\n\n\n4. **Holder:** Add a verification method to Alice's DID document with the purpose to present her degree to a third party.\n\n - **POST** => http://localhost:6000/add_verif_method\n\n ```json\n\n {\n\n \"nick_name\": \"Alice\",\n\n \"password\": \"my_password\",\n\n \"did\": \"did:iota:6jAqUhoszQ79fWfx87Ga7o4TxkLcK6uBTm7LE2k26dAR\"\n\n }\n\n ```\n\n **Response**\n\n ```json\n\n {\n\n \"Error\": false,\n\n \"Explorer\": \"https://explorer.iota.org/mainnet/identity-resolver/did:iota:6jAqUhoszQ79fWfx87Ga7o4TxkLcK6uBTm7LE2k26dAR\",\n\n \"Status\": \"Method Created\",\n\n \"vm_name\": \"MZXQMFPP\"\n\n }\n\n ```\n\n\n\n\n\n5. **Holder:** Setup a document representing Alice's degree, containing her DID.\n\n ```json\n\n \"holder\": {\n\n \"id\": \"did:iota:6jAqUhoszQ79fWfx87Ga7o4TxkLcK6uBTm7LE2k26dAR\",\n\n \"name\": \"Alice\",\n\n \"degreeName\": \"Bachelor of Computer Science\",\n\n \"degreeType\": \"BachelorDegree\",\n\n \"GPA\": \"4.0\"\n\n }\n\n ```\n\n\n\n6. **Issuer:** Sign the degree document with the University's verification method to obtain a verifiable credential.\n\n - **POST** => http://localhost:6000/create_vc\n\n ```json\n\n {\n\n \"issuer\": {\n\n \"nick_name\": \"University of Oslo\",\n\n \"password\": \"my_password\",\n\n \"did\": \"did:iota:4wN7q5NZKwCnkFtcv6VM42EQR9vC3DqxsLXB6pkUR1wV\",\n\n \"vm_name\": \"XMCOV9LC\"\n\n },\n\n \"holder\": {\n\n \"id\": \"did:iota:6jAqUhoszQ79fWfx87Ga7o4TxkLcK6uBTm7LE2k26dAR\",\n\n \"name\": \"Alice\",\n\n \"degreeName\": \"Bachelor of Computer Science\",\n\n \"degreeType\": \"BachelorDegree\",\n\n \"GPA\": \"4.0\"\n\n }\n\n }\n\n ```\n\n **Response**\n\n ```json\n\n {\n\n \"Credential\": {\n\n \"@context\": \"https://www.w3.org/2018/credentials/v1\",\n\n \"credentialSubject\": {\n\n \"GPA\": \"4.0\",\n\n ...\n\n }\n\n ...\n\n },\n\n \"Error\": false,\n\n \"Status\": \"Credential Created\",\n\n \"Verified\": true\n\n }\n", "file_path": "README.md", "rank": 40, "score": 5.715840007409648 }, { "content": "\n\nuse serde::Serialize;\n\nuse serde_derive::Deserialize;\n\n\n\nuse super::{create_did_vm::CreateDidVm};\n\n\n\n#[derive(Serialize, Deserialize,Debug)]\n\npub struct RemoveVm { \n\n pub issuer: CreateDidVm,\n\n}\n\n ", "file_path": "src/jsons_did/remove_vm.rs", "rank": 41, "score": 5.184105746762924 }, { "content": " pub subjects: BTreeMap<String, DocumentValidation>,\n\n pub verified: bool,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\n\npub struct PresentationValidation<T = Object, U = Object> {\n\n pub presentation: Presentation<T, U>,\n\n pub holder: DocumentValidation,\n\n pub credentials: Vec<CredentialValidation<U>>,\n\n pub verified: bool,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\n\npub struct DocumentValidation {\n\n pub did: IotaDID,\n\n pub document: IotaDocument,\n\n pub metadata: Object,\n\n pub verified: bool,\n\n}\n\n\n", "file_path": "src/utils_did/validator.rs", "rank": 42, "score": 4.456421252828442 }, { "content": " nick_name: String,\n\n password: String,\n\n ) -> Result<User> {\n\n // Stronghold settings\n\n let stronghold_path: PathBuf =\n\n format!(\"./accounts_stronghold/{}.stronghold\", nick_name).into();\n\n\n\n // let password_json = &password;\n\n let password: String = Into::into(password);\n\n\n\n let account: Account = Account::builder()\n\n .storage(AccountStorage::Stronghold(stronghold_path, Some(password)))\n\n .build()\n\n .await\n\n .unwrap();\n\n\n\n Ok(Self { account: account })\n\n }\n\n\n\n #[tokio::main]\n", "file_path": "src/utils_did/user.rs", "rank": 43, "score": 4.44511202728793 }, { "content": " async fn validate_document(&self, did: impl AsRef<str>) -> Result<DocumentValidation> {\n\n let did: IotaDID = did.as_ref().parse()?;\n\n let document: IotaDocument = self.client.resolve(&did).await?;\n\n // let verified: bool = document.verify().is_ok();\n\n\n\n Ok(DocumentValidation {\n\n did,\n\n document,\n\n metadata: Object::new(),\n\n verified: true,\n\n })\n\n }\n\n}\n", "file_path": "src/utils_did/validator.rs", "rank": 44, "score": 4.417770667090842 }, { "content": "#[derive(Clone, Copy, Debug)]\n\npub struct CredentialValidator<'a, R: TangleResolve = Client> {\n\n client: &'a R,\n\n}\n\n\n\nimpl<'a, R: TangleResolve> CredentialValidator<'a, R> {\n\n /// Creates a new `CredentialValidator`.\n\n pub fn new(client: &'a R) -> Self {\n\n Self { client }\n\n }\n\n\n\n /// Deserializes the given JSON-encoded `Credential` and validates\n\n /// all associated DID documents.\n\n pub async fn check<T>(&self, data: &str) -> Result<CredentialValidation<T>>\n\n where\n\n T: DeserializeOwned + Serialize,\n\n {\n\n self.validate_credential(Credential::from_json(data)?).await\n\n }\n\n\n", "file_path": "src/utils_did/validator.rs", "rank": 45, "score": 4.274032649412686 }, { "content": " ```\n\n\n\n7. **Holder:** Alice verifies the credentials to make sure it was actually signed by a key associated to the University DID.\n\n - **POST** => http://localhost:6000/verify_validity_credential\n\n\n\n ```json\n\n {\n\n \"@context\": \"https://www.w3.org/2018/credentials/v1\",\n\n \"credentialSubject\": {\n\n \"GPA\": \"4.0\",\n\n ...\n\n }\n\n ...\n\n }\n\n ```\n\n **Response**\n\n ```json\n\n {\n\n \"error\": false,\n\n \"validation\": true\n\n }\n\n ```\n\n\n\n8. **Holder:** Alice signs verifiable credential with private key of Alices's verification method in order to get a verifiable presentation.\n\n - **POST** => http://localhost:6000/create_vp\n\n ```json\n\n {\n\n \"holder\": {\n\n \"nick_name\": \"Alice\",\n\n \"password\": \"my_password\",\n\n \"did\": \"did:iota:6jAqUhoszQ79fWfx87Ga7o4TxkLcK6uBTm7LE2k26dAR\",\n\n \"vm_name\": \"MZXQMFPP\"\n\n },\n\n \"holder_credential\": {\n\n \"@context\": \"https://www.w3.org/2018/credentials/v1\",\n\n \"credentialSubject\": {\n\n \"GPA\": \"4.0\",\n\n ...\n\n }\n\n ...\n\n }\n\n }\n\n ```\n\n **Response**\n\n ```json\n\n {\n\n \"Presentation\": {\n\n \"@context\": \"https://www.w3.org/2018/credentials/v1\",\n\n \"holder\": \"did:iota:6jAqUhoszQ79fWfx87Ga7o4TxkLcK6uBTm7LE2k26dAR\",\n\n \"proof\": {\n\n \"type\": \"JcsEd25519Signature2020\",\n\n ...\n\n }\n\n ...\n\n },\n\n \"Error\": false,\n\n \"Status\": \"Presentation Created\",\n\n \"Verified\": true\n\n }\n\n ```\n\n\n\n9. **Verifier:** The IOTA Foundation verfies Alice's and the University's signatures with their respective public keys by checking the verifiable presentation.\n", "file_path": "README.md", "rank": 46, "score": 4.1648909213320495 }, { "content": " let host = std::env::var(\"HOST_API\").unwrap_or(\"127.0.0.1\".to_string());\n\n let address = format!(\"{}:{}\", host, port);\n\n\n\n println!(\"API => {}\", address);\n\n\n\n // building host frontend\n\n let host_cors = std::env::var(\"HOST_CORS\").unwrap_or(\"127.0.0.1\".to_string());\n\n\n\n println!(\"Frontend Cors => {}\", host_cors);\n\n\n\n let server = HttpServer::new(move || {\n\n App::new()\n\n .wrap(\n\n Cors::default()\n\n .allowed_origin(&host_cors)\n\n .allowed_methods(vec![\"POST\"])\n\n .allowed_headers(vec![header::AUTHORIZATION, header::ACCEPT])\n\n .allowed_header(header::CONTENT_TYPE)\n\n .supports_credentials()\n\n .max_age(3600),\n", "file_path": "src/main.rs", "rank": 47, "score": 3.725125264567783 }, { "content": " - **POST** => http://localhost:6000/verify_validity_presentation\n\n\n\n ```json\n\n {\n\n \"@context\": \"https://www.w3.org/2018/credentials/v1\",\n\n \"holder\": \"did:iota:6jAqUhoszQ79fWfx87Ga7o4TxkLcK6uBTm7LE2k26dAR\",\n\n \"proof\": {\n\n \"type\": \"JcsEd25519Signature2020\",\n\n ...\n\n }\n\n ...\n\n }\n\n ```\n\n **Response**\n\n ```json\n\n {\n\n \"error\": false,\n\n \"validation\": true\n\n }\n\n ```\n\n\n\n10. **Issuer:** Unfortunately, the University found out that Alice was cheating on her final exam. Therefore, the University revokes the verification of Alice's credential. Removing the verification method. Note that Alice could also revoke her signature on the verifiable presentation, removing the verification method from her.\n\n - **POST** => http://localhost:6000/remove_vm\n\n \n\n ```json\n\n {\n\n \"issuer\": {\n\n \"nick_name\": \"University of Oslo\",\n\n \"password\": \"my_password\",\n\n \"did\": \"did:iota:4wN7q5NZKwCnkFtcv6VM42EQR9vC3DqxsLXB6pkUR1wV\",\n\n \"vm_name\": \"XMCOV9LC\"\n\n }\n\n }\n\n ```\n\n **Response**\n\n ```json\n\n {\n\n \"Status\": \"Removed VM\",\n\n \"error\": false\n\n }\n\n ```\n\n\n\n11. **Verifier:** The IOTA Foundation verifies Alice's and the University's signatures again by checking the verifiable presentation and finds out that the University revoked their signature.\n\n - **POST** => http://localhost:6000/verify_validity_presentation\n\n\n\n ```json\n\n {\n\n \"@context\": \"https://www.w3.org/2018/credentials/v1\",\n\n \"holder\": \"did:iota:6jAqUhoszQ79fWfx87Ga7o4TxkLcK6uBTm7LE2k26dAR\",\n\n \"proof\": {\n\n \"type\": \"JcsEd25519Signature2020\",\n\n ...\n\n }\n\n ...\n\n }\n\n ```\n\n **Response**\n\n ```json\n\n {\n\n \"Error\": true,\n\n \"validation\": false\n\n }\n", "file_path": "README.md", "rank": 48, "score": 2.939354002389959 }, { "content": "use rand::Rng;\n\n\n", "file_path": "src/utils_did/rnd_vm_name.rs", "rank": 49, "score": 2.609409409377883 } ]
Rust
src/lib.rs
Noskcaj19/macos-notifications
04c02ef0bf1c63bd9244eef0c3e09e8116cee8fc
extern crate cocoa; #[macro_use] extern crate objc; use objc::declare::ClassDecl; use objc::runtime::{self, Class, Method, Object, Sel}; use cocoa::base::nil; use cocoa::foundation::NSString; pub fn init() -> bool { let superclass = Class::get("NSObject").unwrap(); let mut decl = ClassDecl::new("NSBundleOverride", superclass).unwrap(); extern "C" fn bundle_identifier_override(_: &Object, _cmd: Sel) -> *mut Object { unsafe { NSString::alloc(nil).init_str("com.apple.Terminal") } } unsafe { decl.add_method( sel!(__bundleIdentifier), bundle_identifier_override as extern "C" fn(&Object, Sel) -> *mut Object, ); } decl.register(); let cls = Class::get("NSBundle").unwrap(); unsafe { let bi_original = runtime::class_getInstanceMethod(cls, Sel::register("bundleIdentifier")) as *mut Method; let custom_cls = Class::get("NSBundleOverride").unwrap(); let bi_override = runtime::class_getInstanceMethod(custom_cls, Sel::register("__bundleIdentifier")) as *mut Method; runtime::method_exchangeImplementations(bi_original, bi_override); } unsafe { let main_bundle: *mut Object = msg_send![cls, mainBundle]; let id: *mut Object = msg_send![main_bundle, bundleIdentifier]; return id.isEqualToString("com.apple.Terminal"); } } pub enum NotificationImage<'a> { Url(&'a str), File(&'a str), } pub struct Notification<'a> { title: Option<&'a str>, subtitle: Option<&'a str>, body: Option<&'a str>, content_image: Option<NotificationImage<'a>>, app_image: Option<NotificationImage<'a>>, } impl<'a> Notification<'a> { pub fn new() -> Notification<'a> { Notification { title: None, subtitle: None, body: None, content_image: None, app_image: None, } } pub fn title(&mut self, title: &'a str) -> &mut Notification<'a> { self.title = Some(title); self } pub fn subtitle(&mut self, subtitle: &'a str) -> &mut Notification<'a> { self.subtitle = Some(subtitle); self } pub fn body(&mut self, body: &'a str) -> &mut Notification<'a> { self.body = Some(body); self } pub fn content_image(&mut self, image: NotificationImage<'a>) -> &mut Notification<'a> { self.content_image = Some(image); self } pub fn app_image(&mut self, image: NotificationImage<'a>) -> &mut Notification<'a> { self.app_image = Some(image); self } pub fn deliver(&self) { let notification_cls = Class::get("NSUserNotification").unwrap(); let center = Class::get("NSUserNotificationCenter").unwrap(); unsafe { let notification: *mut Object = msg_send![notification_cls, alloc]; let notification: *mut Object = msg_send![notification, init]; if let Some(title) = self.title { msg_send![notification, setTitle:NSString::alloc(nil).init_str(title)]; } if let Some(subtitle) = self.subtitle { msg_send![notification, setSubtitle:NSString::alloc(nil).init_str(subtitle)]; } if let Some(body) = self.body { msg_send![notification, setInformativeText:NSString::alloc(nil).init_str(body)]; } if let Some(ref image_data) = self.content_image { let img_cls = Class::get("NSImage").unwrap(); let image: *mut Object = msg_send![img_cls, alloc]; let image: *mut Object = match image_data { &NotificationImage::File(file) => msg_send![ image, initWithContentsOfFile: NSString::alloc(nil).init_str(file) ], &NotificationImage::Url(url) => { let url_cls = Class::get("NSURL").unwrap(); let nsurl: *mut Object = msg_send![ url_cls, URLWithString:NSString::alloc(nil).init_str(url) ]; msg_send![image, initWithContentsOfURL: nsurl] } }; msg_send![notification, setContentImage: image]; } if let Some(ref image_data) = self.app_image { let img_cls = Class::get("NSImage").unwrap(); let image: *mut Object = msg_send![img_cls, alloc]; let image: *mut Object = match image_data { &NotificationImage::File(file) => msg_send![ image, initWithContentsOfFile: NSString::alloc(nil).init_str(file) ], &NotificationImage::Url(url) => { let url_cls = Class::get("NSURL").unwrap(); let nsurl: *mut Object = msg_send![url_cls, URLWithString:NSString::alloc(nil).init_str(url)]; msg_send![image, initWithContentsOfURL: nsurl] } }; msg_send![notification, setValue: image forKey:NSString::alloc(nil).init_str("_identityImage")]; } let default_center: *mut Object = msg_send![center, defaultUserNotificationCenter]; msg_send![default_center, deliverNotification: notification]; msg_send![notification, release]; self.runloop(); } } fn runloop(&self) { let runloop_cls = Class::get("NSRunLoop").unwrap(); let date_cls = Class::get("NSDate").unwrap(); unsafe { let current_run_loop: *mut Object = msg_send![runloop_cls, currentRunLoop]; let till_date: *mut Object = msg_send![date_cls, dateWithTimeIntervalSinceNow:0.2]; msg_send![current_run_loop, runUntilDate: till_date]; } } } #[cfg(test)] mod tests { use super::{Notification, NotificationImage}; #[test] fn init() { super::init(); } #[test] fn title() { let mut note = Notification::new(); note.title("A title"); note.deliver(); } #[test] fn subtitle() { let mut note = Notification::new(); note.title("A title"); note.subtitle("Subtitle content"); note.deliver(); } #[test] fn body() { let mut note = Notification::new(); note.title("A title"); note.subtitle("Subtitle content"); note.body("Body content"); note.deliver(); } #[test] fn content_img_path() { let mut note = Notification::new(); note.content_image(NotificationImage::File("/")); note.deliver(); } #[test] fn content_image_url() { let mut note = Notification::new(); note.content_image(NotificationImage::Url("https://google.com")); note.deliver(); } #[test] fn app_img_path() { let mut note = Notification::new(); note.app_image(NotificationImage::File("/")); note.deliver(); } #[test] fn app_img_url() { let mut note = Notification::new(); note.app_image(NotificationImage::Url("https://google.com")); note.deliver(); } }
extern crate cocoa; #[macro_use] extern crate objc; use objc::declare::ClassDecl; use objc::runtime::{self, Class, Method, Object, Sel}; use cocoa::base::nil; use cocoa::foundation::NSString; pub fn init() -> bool { let superclass = Class::get("NSObject").unwrap(); let mut decl = ClassDecl::new("NSBundleOverride", superclass).unwrap(); extern "C" fn bundle_identifier_override(_: &Object, _cmd: Sel) -> *mut Object { unsafe { NSString::alloc(nil).init_str("com.apple.Terminal") } } unsafe { decl.add_method( sel!(__bundleIdentifier), bundle_identifier_override as extern "C" fn(&Object, Sel) -> *mut Object, ); } decl.register(); let cls = Class::get("NSBundle").unwrap(); unsafe { let bi_original = runtime::class_getInstanceMethod(cls, Sel::register("bundleIdentifier")) as *mut Method; let custom_cls = Class::get("NSBundleOverride").unwrap(); let bi_override = runtime::class_getInstanceMethod(custom_cls, Sel::register("__bundleIdentifier")) as *mut Method; runtime::method_exchangeImplementations(bi_original, bi_override); } unsafe { let main_bundle: *mut Object = msg_send![cls, mainBundle]; let id: *mut Object = msg_send![main_bundle, bundleIdentifier]; return id.isEqualToString("com.apple.Terminal"); } } pub enum NotificationImage<'a> { Url(&'a str), File(&'a str), } pub struct Notification<'a> { title: Option<&'a str>, subtitle: Option<&'a str>, body: Option<&'a str>, content_image: Option<NotificationImage<'a>>, app_image: Option<NotificationImage<'a>>, } impl<'a> Notification<'a> { pub fn new() -> Notification<'a> { Notification { title: None, subtitle: None, body: None, content_image: None, app_image: None, } } pub fn title(&mut self, title: &'a str) -> &mut Notification<'a> { self.title = Some(title); self } pub fn subtitle(&mut self, subtitle: &'a str) -> &mut Notification<'a> { self.subtitle = Some(subtitle); self } pub fn body(&mut self, body: &'a str) -> &mut Notification<'a> { self.body = Some(body); self } pub fn content_image(&mut self, image: NotificationImage<'a>) -> &mut Notification<'a> { self.content_image = Some(image); self } pub fn app_image(&mut self, image: NotificationImage<'a>) -> &mut Notification<'a> { self.app_image = Some(image); self } pub fn deliver(&self) { let notification_cls = Class::get("NSUserNotification").unwrap(); let center = Class::get("NSUserNotificationCenter").unwrap(); unsafe { let notification: *mut Object = msg_send![notification_cls, alloc]; let notification: *mut Object = msg_send![notification, init]; if let Some(title) = self.title { msg_send![notification, setTitle:NSString::alloc(nil).init_str(title)]; } if let Some(subtitle) = self.subtitle { msg_send![notification, setSubtitle:NSString::alloc(nil).init_str(subtitle)]; } if let Some(body) = self.body { msg_send![notification, setInformativeText:NSString::alloc(nil).init_str(body)]; }
if let Some(ref image_data) = self.app_image { let img_cls = Class::get("NSImage").unwrap(); let image: *mut Object = msg_send![img_cls, alloc]; let image: *mut Object = match image_data { &NotificationImage::File(file) => msg_send![ image, initWithContentsOfFile: NSString::alloc(nil).init_str(file) ], &NotificationImage::Url(url) => { let url_cls = Class::get("NSURL").unwrap(); let nsurl: *mut Object = msg_send![url_cls, URLWithString:NSString::alloc(nil).init_str(url)]; msg_send![image, initWithContentsOfURL: nsurl] } }; msg_send![notification, setValue: image forKey:NSString::alloc(nil).init_str("_identityImage")]; } let default_center: *mut Object = msg_send![center, defaultUserNotificationCenter]; msg_send![default_center, deliverNotification: notification]; msg_send![notification, release]; self.runloop(); } } fn runloop(&self) { let runloop_cls = Class::get("NSRunLoop").unwrap(); let date_cls = Class::get("NSDate").unwrap(); unsafe { let current_run_loop: *mut Object = msg_send![runloop_cls, currentRunLoop]; let till_date: *mut Object = msg_send![date_cls, dateWithTimeIntervalSinceNow:0.2]; msg_send![current_run_loop, runUntilDate: till_date]; } } } #[cfg(test)] mod tests { use super::{Notification, NotificationImage}; #[test] fn init() { super::init(); } #[test] fn title() { let mut note = Notification::new(); note.title("A title"); note.deliver(); } #[test] fn subtitle() { let mut note = Notification::new(); note.title("A title"); note.subtitle("Subtitle content"); note.deliver(); } #[test] fn body() { let mut note = Notification::new(); note.title("A title"); note.subtitle("Subtitle content"); note.body("Body content"); note.deliver(); } #[test] fn content_img_path() { let mut note = Notification::new(); note.content_image(NotificationImage::File("/")); note.deliver(); } #[test] fn content_image_url() { let mut note = Notification::new(); note.content_image(NotificationImage::Url("https://google.com")); note.deliver(); } #[test] fn app_img_path() { let mut note = Notification::new(); note.app_image(NotificationImage::File("/")); note.deliver(); } #[test] fn app_img_url() { let mut note = Notification::new(); note.app_image(NotificationImage::Url("https://google.com")); note.deliver(); } }
if let Some(ref image_data) = self.content_image { let img_cls = Class::get("NSImage").unwrap(); let image: *mut Object = msg_send![img_cls, alloc]; let image: *mut Object = match image_data { &NotificationImage::File(file) => msg_send![ image, initWithContentsOfFile: NSString::alloc(nil).init_str(file) ], &NotificationImage::Url(url) => { let url_cls = Class::get("NSURL").unwrap(); let nsurl: *mut Object = msg_send![ url_cls, URLWithString:NSString::alloc(nil).init_str(url) ]; msg_send![image, initWithContentsOfURL: nsurl] } }; msg_send![notification, setContentImage: image]; }
if_condition
[ { "content": "fn main() {\n\n let dir = get_dir();\n\n let image1 = dir.join(\"rust.png\");\n\n let image2 = dir.join(\"ferris.png\");\n\n\n\n // Call init so we get notifcations\n\n macos_notifications::init();\n\n\n\n // Construct and send a new notification\n\n Notification::new()\n\n .title(\"Image\")\n\n .content_image(NotificationImage::File(image1.to_str().unwrap()))\n\n .app_image(NotificationImage::File(image2.to_str().unwrap()))\n\n .deliver();\n\n}\n", "file_path": "examples/image_path.rs", "rank": 1, "score": 34525.96053527108 }, { "content": "fn main() {\n\n let image_url = \"https://upload.wikimedia.org/wikipedia/commons/thumb/d/d5/Rust_programming_language_black_logo.svg/200px-Rust_programming_language_black_logo.svg.png\";\n\n\n\n // Call init so we get notifcations\n\n macos_notifications::init();\n\n\n\n // Construct and send a new notification\n\n Notification::new()\n\n .title(\"Image\")\n\n .app_image(NotificationImage::Url(image_url))\n\n .deliver();\n\n}\n", "file_path": "examples/image_url.rs", "rank": 2, "score": 34525.96053527108 }, { "content": "fn get_dir() -> PathBuf {\n\n let mut dir = env::current_dir().unwrap();\n\n dir.push(\"examples\");\n\n dir\n\n}\n\n\n", "file_path": "examples/image_path.rs", "rank": 3, "score": 28108.250952464306 }, { "content": "fn main() {\n\n // Call init so we get notifcations\n\n macos_notifications::init();\n\n\n\n // Construct and send a new notification\n\n macos_notifications::Notification::new()\n\n .title(\"Notification\")\n\n .body(\"Hello, World!\")\n\n .deliver();\n\n}\n", "file_path": "examples/hello_world.rs", "rank": 4, "score": 21241.2206347346 }, { "content": "extern crate macos_notifications;\n\nuse macos_notifications::{Notification, NotificationImage};\n\n\n", "file_path": "examples/image_url.rs", "rank": 5, "score": 14278.198755172894 }, { "content": "extern crate macos_notifications;\n\n\n\nuse macos_notifications::{Notification, NotificationImage};\n\n\n\nuse std::env;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "examples/image_path.rs", "rank": 6, "score": 14278.074947934381 }, { "content": "# macOS Notifcations\n\n\n\nShows notifications on macos from rust.\n\n\n\nUses the NSUserNotifcation APIs\n\n\n\nSee `examples/` for usage\n", "file_path": "README.md", "rank": 7, "score": 8613.676695946237 }, { "content": "Copyright 2018 Noskcaj19\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "LICENSE.md", "rank": 8, "score": 8612.665728463999 }, { "content": "extern crate macos_notifications;\n\n\n", "file_path": "examples/hello_world.rs", "rank": 21, "score": 4.837962062462918 } ]
Rust
research/gaia-x/pegasus/pegasus/src/operator/iteration/switch.rs
bmmcq/GraphScope
a480d941f3a3f1270ddc0570e72059e6a34dab24
use std::rc::Rc; use crate::api::{IterCondition, Notification}; use crate::communication::input::{new_input_session, InputProxy}; use crate::communication::output::{new_output, OutputProxy}; use crate::communication::Output; use crate::config::LOOP_OPT; use crate::data::{MarkedData, MicroBatch}; use crate::errors::JobExecError; use crate::graph::Port; use crate::operator::{Notifiable, OperatorCore}; use crate::progress::EndSignal; use crate::tag::tools::map::TidyTagMap; use crate::{Data, Tag}; pub(crate) struct SwitchOperator<D> { scope_level: u32, cond: IterCondition<D>, iter_scope: TidyTagMap<Vec<EndGuard>>, } impl<D> SwitchOperator<D> { pub fn new(scope_level: u32, cond: IterCondition<D>) -> Self { assert!(scope_level > 0); SwitchOperator { scope_level, cond, iter_scope: TidyTagMap::new(scope_level - 1) } } } impl<D: Data> OperatorCore for SwitchOperator<D> { fn on_receive( &mut self, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<(), JobExecError> { assert_eq!(inputs.len(), 2); let mut main = new_input_session::<D>(&inputs[0]); let leave = new_output::<D>(&outputs[0]); let enter = new_output::<D>(&outputs[1]); main.for_each_batch(|dataset| { if dataset.is_last() { let tag = dataset.tag.to_parent_uncheck(); trace_worker!("{:?} into iteration at scope level {}", tag, self.scope_level); self.iter_scope.insert(tag, vec![]); } switch(dataset, &self.cond, leave, enter) })?; let mut feedback = new_input_session::<D>(&inputs[1]); feedback.for_each_batch(|dataset| { if !dataset.is_empty() && log_enabled!(log::Level::Trace) { trace_worker!("receive feedback data of scope {:?}", dataset.tag); } if dataset.tag.current_uncheck() >= self.cond.max_iters { if !dataset.is_empty() { let mut leave_session = leave.new_session(&dataset.tag)?; for d in dataset.drain() { leave_session.give(d)?; } } if let Some(end) = dataset.take_end() { let p = end.tag.to_parent_uncheck(); let mut ends = self .iter_scope .remove(&p) .expect("unknown iteration scope;"); leave.notify_end(end)?; for e in ends.drain(..) { if let Some(end) = e.try_unwrap() { if end.tag.is_root() { assert!(self.iter_scope.is_empty()); debug_worker!( "all scopes out of iteration at scope level {};", self.scope_level ); enter.notify_end(end.clone())?; } trace_worker!("{:?} out of iteration", end.tag); leave.notify_end(end)?; } else { } } } } else { if !dataset.is_empty() { switch(dataset, &self.cond, leave, enter)?; } else { if let Some(end) = dataset.take_end() { let p = end.tag.to_parent_uncheck(); if self.iter_scope.contains_key(&p) { enter.notify_end(end)?; } else { error_worker!( "weird scope[{:?}] end in iteration {};", end.tag, self.scope_level ); unreachable!( "weird scope[{:?}] end in iteration {};", end.tag, self.scope_level ); } } else { error_worker!("both data and signal empty of {:?}", dataset.tag); unreachable!("both data and signal empty of {:?}", dataset.tag); } } } Ok(()) }) } } fn switch<D: Data>( dataset: &mut MicroBatch<D>, cond: &IterCondition<D>, leave: &Output<D>, enter: &Output<D>, ) -> Result<(), JobExecError> { if !dataset.is_last() { let mut leave_session = leave.new_session(&dataset.tag)?; let mut enter_session = enter.new_session(&dataset.tag)?; for d in dataset.drain() { if cond.is_converge(&d)? { leave_session.give(d)?; } else { enter_session.give(d)?; } } } else { debug_worker!("stop to send data of the {:?};", &dataset.tag); if !dataset.is_empty() { let tag = dataset.tag(); let mut leave_session = leave.new_session(&tag)?; let mut enter_session = enter.new_session(&tag)?; for item in dataset.drain_to_end() { match item { MarkedData::Data(d) => { if cond.is_converge(&d)? { leave_session.give(d)?; } else { enter_session.give(d)?; } } MarkedData::Marked(d, e) => { if let Some(d) = d { if cond.is_converge(&d)? { enter_session.notify_end(e)?; leave_session.give(d)?; } else { enter_session.give_last(d, e)?; } } else { enter_session.notify_end(e)?; } } } } } else if let Some(end) = dataset.take_end() { enter.notify_end(end)?; } else { unreachable!("both data and signal empty of {:?}", dataset.tag); } } Ok(()) } impl<D: Data> Notifiable for SwitchOperator<D> { fn on_notify(&mut self, n: Notification, outputs: &[Box<dyn OutputProxy>]) -> Result<(), JobExecError> { debug_worker!("on notify of {:?} on in port {}", n.tag(), n.port); let n_len = n.tag().len(); assert!(n_len < self.scope_level as usize); if n.port == 0 { if self.iter_scope.is_empty() { let end = n.take_end(); if end.tag.is_root() { debug_worker!("all scopes out of iteration at scope level {};", self.scope_level); outputs[0].notify_end(end.clone())?; outputs[1].notify_end(end)?; } } else { let end = EndGuard::new(n.take_end()); for (t, v) in self.iter_scope.iter_mut() { if &end.end.tag == &*t || end.end.tag.is_parent_of(&t) { v.push(end.clone()); } } } } Ok(()) } fn on_cancel( &mut self, port: Port, tag: Tag, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<bool, JobExecError> { if port.port == 0 { assert!(tag.len() < self.scope_level as usize); for input in inputs.iter() { input.cancel_scope(&tag); input.propagate_cancel(&tag)?; } for output in outputs.iter() { output.skip(&tag)?; } } else { assert_eq!(port.port, 1); if tag.len() == self.scope_level as usize { if let Some(nth) = tag.current() { if nth != 0 { inputs[1].cancel_scope(&tag); inputs[1].propagate_cancel(&tag)?; outputs[1].skip(&tag)?; } else { inputs[0].cancel_scope(&tag); if *LOOP_OPT { inputs[0].propagate_cancel_uncheck(&tag)?; } outputs[1].skip(&tag)?; } } else { unreachable!() } } } Ok(true) } } #[derive(Clone)] struct EndGuard { end: Rc<EndSignal>, } impl EndGuard { fn new(end: EndSignal) -> Self { EndGuard { end: Rc::new(end) } } fn try_unwrap(self) -> Option<EndSignal> { Rc::try_unwrap(self.end).ok() } } unsafe impl Send for EndGuard {}
use std::rc::Rc; use crate::api::{IterCondition, Notification}; use crate::communication::input::{new_input_session, InputProxy}; use crate::communication::output::{new_output, OutputProxy}; use crate::communication::Output; use crate::config::LOOP_OPT; use crate::data::{MarkedData, MicroBatch}; use crate::errors::JobExecError; use crate::graph::Port; use crate::operator::{Notifiable, OperatorCore}; use crate::progress::EndSignal; use crate::tag::tools::map::TidyTagMap; use crate::{Data, Tag}; pub(crate) struct SwitchOperator<D> { scope_level: u32, cond: IterCondition<D>, iter_scope: TidyTagMap<Vec<EndGuard>>, } impl<D> SwitchOperator<D> { pub fn new(scope_level: u32, cond: IterCondition<D>) -> Self { assert!(scope_level > 0); SwitchOperator { scope_level, cond, iter_scope: TidyTagMap::new(scope_level - 1) } } } impl<D: Data> OperatorCore for SwitchOperator<D> { fn on_receive( &mut self, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<(), JobExecError> { assert_eq!(inputs.len(), 2); let mut main = new_input_session::<D>(&inputs[0]); let leave = new_output::<D>(&outputs[0]); let enter = new_output::<D>(&outputs[1]); main.for_each_batch(|dataset| { if dataset.is_last() { let tag = dataset.tag.to_parent_uncheck(); trace_worker!("{:?} into iteration at scope level {}", tag, self.scope_level); self.iter_scope.insert(tag, vec![]); } switch(dataset, &self.cond, leave, enter) })?; let mut feedback = new_input_session::<D>(&inputs[1]); feedback.for_each_batch(|dataset| { if !dataset.is_empty() && log_enabled!(log::Level::Trace) { trace_worker!("receive feedback data of scope {:?}", dataset.tag); } if dataset.tag.current_uncheck() >= self.cond.max_iters { if !dataset.is_empty() { let mut leave_session = leave.new_session(&dataset.tag)?; for d in dataset.drain() { leave_session.give(d)?; }
error_worker!("both data and signal empty of {:?}", dataset.tag); unreachable!("both data and signal empty of {:?}", dataset.tag); } } } Ok(()) }) } } fn switch<D: Data>( dataset: &mut MicroBatch<D>, cond: &IterCondition<D>, leave: &Output<D>, enter: &Output<D>, ) -> Result<(), JobExecError> { if !dataset.is_last() { let mut leave_session = leave.new_session(&dataset.tag)?; let mut enter_session = enter.new_session(&dataset.tag)?; for d in dataset.drain() { if cond.is_converge(&d)? { leave_session.give(d)?; } else { enter_session.give(d)?; } } } else { debug_worker!("stop to send data of the {:?};", &dataset.tag); if !dataset.is_empty() { let tag = dataset.tag(); let mut leave_session = leave.new_session(&tag)?; let mut enter_session = enter.new_session(&tag)?; for item in dataset.drain_to_end() { match item { MarkedData::Data(d) => { if cond.is_converge(&d)? { leave_session.give(d)?; } else { enter_session.give(d)?; } } MarkedData::Marked(d, e) => { if let Some(d) = d { if cond.is_converge(&d)? { enter_session.notify_end(e)?; leave_session.give(d)?; } else { enter_session.give_last(d, e)?; } } else { enter_session.notify_end(e)?; } } } } } else if let Some(end) = dataset.take_end() { enter.notify_end(end)?; } else { unreachable!("both data and signal empty of {:?}", dataset.tag); } } Ok(()) } impl<D: Data> Notifiable for SwitchOperator<D> { fn on_notify(&mut self, n: Notification, outputs: &[Box<dyn OutputProxy>]) -> Result<(), JobExecError> { debug_worker!("on notify of {:?} on in port {}", n.tag(), n.port); let n_len = n.tag().len(); assert!(n_len < self.scope_level as usize); if n.port == 0 { if self.iter_scope.is_empty() { let end = n.take_end(); if end.tag.is_root() { debug_worker!("all scopes out of iteration at scope level {};", self.scope_level); outputs[0].notify_end(end.clone())?; outputs[1].notify_end(end)?; } } else { let end = EndGuard::new(n.take_end()); for (t, v) in self.iter_scope.iter_mut() { if &end.end.tag == &*t || end.end.tag.is_parent_of(&t) { v.push(end.clone()); } } } } Ok(()) } fn on_cancel( &mut self, port: Port, tag: Tag, inputs: &[Box<dyn InputProxy>], outputs: &[Box<dyn OutputProxy>], ) -> Result<bool, JobExecError> { if port.port == 0 { assert!(tag.len() < self.scope_level as usize); for input in inputs.iter() { input.cancel_scope(&tag); input.propagate_cancel(&tag)?; } for output in outputs.iter() { output.skip(&tag)?; } } else { assert_eq!(port.port, 1); if tag.len() == self.scope_level as usize { if let Some(nth) = tag.current() { if nth != 0 { inputs[1].cancel_scope(&tag); inputs[1].propagate_cancel(&tag)?; outputs[1].skip(&tag)?; } else { inputs[0].cancel_scope(&tag); if *LOOP_OPT { inputs[0].propagate_cancel_uncheck(&tag)?; } outputs[1].skip(&tag)?; } } else { unreachable!() } } } Ok(true) } } #[derive(Clone)] struct EndGuard { end: Rc<EndSignal>, } impl EndGuard { fn new(end: EndSignal) -> Self { EndGuard { end: Rc::new(end) } } fn try_unwrap(self) -> Option<EndSignal> { Rc::try_unwrap(self.end).ok() } } unsafe impl Send for EndGuard {}
} if let Some(end) = dataset.take_end() { let p = end.tag.to_parent_uncheck(); let mut ends = self .iter_scope .remove(&p) .expect("unknown iteration scope;"); leave.notify_end(end)?; for e in ends.drain(..) { if let Some(end) = e.try_unwrap() { if end.tag.is_root() { assert!(self.iter_scope.is_empty()); debug_worker!( "all scopes out of iteration at scope level {};", self.scope_level ); enter.notify_end(end.clone())?; } trace_worker!("{:?} out of iteration", end.tag); leave.notify_end(end)?; } else { } } } } else { if !dataset.is_empty() { switch(dataset, &self.cond, leave, enter)?; } else { if let Some(end) = dataset.take_end() { let p = end.tag.to_parent_uncheck(); if self.iter_scope.contains_key(&p) { enter.notify_end(end)?; } else { error_worker!( "weird scope[{:?}] end in iteration {};", end.tag, self.scope_level ); unreachable!( "weird scope[{:?}] end in iteration {};", end.tag, self.scope_level ); } } else {
random
[ { "content": "pub fn u32_to_vec(x: u32) -> Vec<u8> {\n\n int_to_vec!(x, u32)\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/common/bytes/transform.rs", "rank": 0, "score": 354433.37192331563 }, { "content": "/// Parses an escape sequence within a string literal.\n\nfn parse_escape_sequence<Iter: Iterator<Item = char>>(iter: &mut Iter) -> ExprResult<char> {\n\n match iter.next() {\n\n Some('\"') => Ok('\"'),\n\n Some('\\\\') => Ok('\\\\'),\n\n Some(c) => Err(ExprError::IllegalEscapeSequence(format!(\"\\\\{}\", c))),\n\n None => Err(ExprError::IllegalEscapeSequence(\"\\\\\".to_string())),\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/common/src/expr_parse/token.rs", "rank": 1, "score": 311561.93267584656 }, { "content": "pub fn parse_proerty_as_string(data: Vec<u8>, data_type: &DataType) -> Option<String> {\n\n let mut rdr = Cursor::new(data);\n\n match *data_type {\n\n DataType::String | DataType::Date => {\n\n let len = rdr.read_i32::<BigEndian>().unwrap();\n\n let mut ret = String::new();\n\n rdr.read_to_string(&mut ret).unwrap();\n\n assert_eq!(len as usize, ret.len());\n\n Some(format!(\"\\\"{}\\\"\", ret))\n\n }\n\n DataType::Double => {\n\n let ret = rdr.read_f64::<BigEndian>().unwrap();\n\n Some(ret.to_string())\n\n }\n\n DataType::Float => {\n\n let ret = rdr.read_f32::<BigEndian>().unwrap();\n\n Some(ret.to_string())\n\n }\n\n DataType::Long => {\n\n let ret = rdr.read_i64::<BigEndian>().unwrap();\n", "file_path": "interactive_engine/executor/store/groot/src/api/property.rs", "rank": 2, "score": 310133.49885895645 }, { "content": "pub fn new_input_session<D: Data>(input: &Box<dyn InputProxy>) -> InputSession<D> {\n\n RefWrapInput::<D>::downcast(input).new_session()\n\n}\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/communication/input/mod.rs", "rank": 3, "score": 305640.9122772126 }, { "content": "pub fn parse_str_to_data_type(value: &str) -> Result<DataType, String> {\n\n match value.to_lowercase().as_str() {\n\n \"bool\" => Ok(DataType::Bool),\n\n \"char\" => Ok(DataType::Char),\n\n \"short\" => Ok(DataType::Short),\n\n \"int\" => Ok(DataType::Int),\n\n \"long\" => Ok(DataType::Long),\n\n \"float\" => Ok(DataType::Float),\n\n \"double\" => Ok(DataType::Double),\n\n \"bytes\" => Ok(DataType::Bytes),\n\n \"string\" => Ok(DataType::String),\n\n \"date\" => Ok(DataType::Date),\n\n v => {\n\n if v.starts_with(\"list<\") {\n\n let tmp = &v[5..v.len() - 1];\n\n let sub_type = DataType::from(tmp);\n\n match sub_type {\n\n DataType::Int => Ok(DataType::ListInt),\n\n DataType::Long => Ok(DataType::ListLong),\n\n DataType::Float => Ok(DataType::ListFloat),\n", "file_path": "interactive_engine/executor/store/groot/src/schema/data_type.rs", "rank": 4, "score": 304457.28976270324 }, { "content": "pub fn create_schema_proto(partition_num: u32, version: u32, type_defs: Vec<TypeDef>) -> SchemaProto {\n\n let mut proto = SchemaProto::new();\n\n proto.set_version(version as i32);\n\n proto.set_partitionNum(partition_num as i32);\n\n for t in type_defs {\n\n proto.field_type.push(t.to_proto());\n\n }\n\n proto\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/schema/test_util.rs", "rank": 5, "score": 298864.6163879608 }, { "content": "pub fn root() -> Tag {\n\n Tag::Root\n\n}\n\n\n\nimpl Tag {\n\n pub fn inherit(parent: &Tag, current: u32) -> Self {\n\n match parent {\n\n &Tag::Root => Tag::One(current),\n\n &Tag::One(v) => Tag::Two(v, current),\n\n &Tag::Two(a, b) => Tag::Three(a, b, current),\n\n &Tag::Three(a, b, c) => {\n\n let mut v = Vec::with_capacity(4);\n\n v.push(a);\n\n v.push(b);\n\n v.push(c);\n\n Tag::Spilled(v)\n\n }\n\n Tag::Spilled(t) => {\n\n let mut v = Vec::with_capacity(t.len() + 1);\n\n v.extend_from_slice(&t[..]);\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/tag/mod.rs", "rank": 6, "score": 296806.8437628456 }, { "content": "/// Parse one item from `&str` in the iterator to a given type `T`.\n\n/// After the parsing, the iterator will be moved to the next item.\n\n///\n\n/// Return\n\n/// * succeed: the parsed data of type `T` succeed\n\n/// * parsing error: `GDBError::ParseError`\n\n/// * empty iterator: `GDBError::OutOfBoundError`\n\nfn _parse_one_item_to<'a, T: FromStr, Iter: Iterator<Item = &'a str>>(iter: &mut Iter) -> GDBResult<T> {\n\n if let Some(item) = iter.next() {\n\n item.parse::<T>()\n\n .map_err(|_| GDBError::ParseError)\n\n } else {\n\n Err(GDBError::OutOfBoundError)\n\n }\n\n}\n\n\n\n/// The vertex's meta data including global_id and label_id\n\n#[derive(Abomonation, PartialEq, Clone, Debug)]\n\npub struct VertexMeta<G> {\n\n pub global_id: G,\n\n pub label: Label,\n\n}\n\n\n\n/// The edge's meta data after parsing from the csv file.\n\n#[derive(Abomonation, PartialEq, Clone, Debug)]\n\npub struct EdgeMeta<G> {\n\n pub src_global_id: G,\n", "file_path": "interactive_engine/executor/store/exp_store/src/parser.rs", "rank": 7, "score": 295244.66815730074 }, { "content": "pub fn hash64_with_seed(data: &[u8], length: usize, seed: u32) -> i64 {\n\n let m = 0xc6a4a7935bd1e995_u64;\n\n let r = 47;\n\n let mut h = (seed as u64 & 0xffffffff_u64) ^ (m.wrapping_mul(length as u64));\n\n let length8 = length / 8;\n\n for i in 0..length8 {\n\n let i8 = i * 8;\n\n let mut k = (data[i8] & 0xff) as u64\n\n + (((data[i8 + 1] & 0xff) as u64) << 8)\n\n + (((data[i8 + 2] & 0xff) as u64) << 16)\n\n + (((data[i8 + 3] & 0xff) as u64) << 24)\n\n + (((data[i8 + 4] & 0xff) as u64) << 32)\n\n + (((data[i8 + 5] & 0xff) as u64) << 40)\n\n + (((data[i8 + 6] & 0xff) as u64) << 48)\n\n + (((data[i8 + 7] & 0xff) as u64) << 56);\n\n k = k.wrapping_mul(m);\n\n k ^= k >> r;\n\n k = k.wrapping_mul(m);\n\n h ^= k;\n\n h = h.wrapping_mul(m);\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/mod.rs", "rank": 8, "score": 291865.0987495133 }, { "content": "pub fn tokenize(string: &str) -> ExprResult<Vec<Token>> {\n\n partial_tokens_to_tokens(&str_to_partial_tokens(string)?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_tokenize() {\n\n // ((1 + 2) * 2) ^ 3 == 6 ^ 3\n\n let case1 = tokenize(\"((1 + 1e-3) * 2) ^^ 3 == 6 ^^ 3\").unwrap();\n\n let expected_case1 = vec![\n\n Token::LBrace,\n\n Token::LBrace,\n\n Token::Int(1),\n\n Token::Plus,\n\n Token::Float(0.001),\n\n Token::RBrace,\n\n Token::Star,\n", "file_path": "interactive_engine/executor/ir/common/src/expr_parse/token.rs", "rank": 9, "score": 290034.0824817191 }, { "content": "#[allow(dead_code)]\n\npub fn create_schema(partition_num: u32, version: u32, type_defs: Vec<TypeDef>) -> Arc<dyn Schema> {\n\n let proto = create_schema_proto(partition_num, version, type_defs);\n\n SchemaBuilder::from(&proto).build()\n\n}\n", "file_path": "interactive_engine/executor/store/groot/src/schema/test_util.rs", "rank": 10, "score": 289600.8988375445 }, { "content": "fn get_or_set_tag_id(tag_pb: &mut common_pb::NameOrId, plan_meta: &mut PlanMeta) -> IrResult<TagId> {\n\n use common_pb::name_or_id::Item;\n\n if let Some(tag_item) = tag_pb.item.as_mut() {\n\n let (_, tag_id) = match tag_item {\n\n Item::Name(tag) => plan_meta.get_or_set_tag_id(tag),\n\n Item::Id(id) => {\n\n plan_meta.set_max_tag_id(*id as TagId + 1);\n\n (true, *id as TagId)\n\n }\n\n };\n\n *tag_pb = (tag_id as i32).into();\n\n\n\n Ok(tag_id)\n\n } else {\n\n Err(IrError::MissingData(\"NameOrId::Item\".to_string()))\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/core/src/plan/logical.rs", "rank": 11, "score": 283667.19986384915 }, { "content": "/// Resolves all partial tokens by converting them to complex tokens.\n\nfn partial_tokens_to_tokens(mut tokens: &[PartialToken]) -> ExprResult<Vec<Token>> {\n\n let mut result = Vec::new();\n\n let mut recent_token: Option<Token> = None;\n\n while !tokens.is_empty() {\n\n let first = tokens[0].clone();\n\n let second = tokens.get(1).cloned();\n\n let third = tokens.get(2).cloned();\n\n let mut cutoff = 2;\n\n\n\n let curr_token = match first {\n\n PartialToken::Token(token) => {\n\n cutoff = 1;\n\n Some(token)\n\n }\n\n PartialToken::Literal(literal) => {\n\n cutoff = 1;\n\n if let Ok(number) = literal.parse::<i64>() {\n\n Some(Token::Int(number))\n\n } else if let Ok(number) = literal.parse::<f64>() {\n\n Some(Token::Float(number))\n", "file_path": "interactive_engine/executor/ir/common/src/expr_parse/token.rs", "rank": 12, "score": 280897.5808207496 }, { "content": "pub fn ls<P: AsRef<Path>>(path: P) -> Result<Vec<String>, String> {\n\n let mut ret = Vec::new();\n\n let paths = fs::read_dir(path).map_err(|e| format!(\"{:?}\", e))?;\n\n for path in paths {\n\n let path_buf = path.map_err(|e| format!(\"{:?}\", e))?.path();\n\n let filename = path_buf\n\n .to_str()\n\n .ok_or_else(|| format!(\"error\"))?\n\n .to_owned();\n\n ret.push(filename);\n\n }\n\n Ok(ret)\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/util/fs.rs", "rank": 13, "score": 279651.83813953295 }, { "content": "pub fn read_id<R: ReadExt>(reader: &mut R) -> io::Result<ID> {\n\n reader.read_u64()\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/graph_proxy/src/apis/graph/mod.rs", "rank": 14, "score": 278305.57752347837 }, { "content": "fn double_to_data_type(x: f64, data_type: &DataType) -> GraphTraceResult<Vec<u8>> {\n\n match *data_type {\n\n DataType::Bool => Ok(Property::Bool(x != 0.0).to_vec()),\n\n DataType::Char => {\n\n if x > u8::max_value() as f64 || x < u8::min_value() as f64 {\n\n let msg = format!(\"{} cannot be transformed to char\", x);\n\n let err = graph_err!(GraphErrorCode::DataError, msg, double_to_data_type, x, data_type);\n\n Err(err)\n\n } else {\n\n Ok(Property::Char(x as u8).to_vec())\n\n }\n\n }\n\n DataType::Short => {\n\n if x > i16::max_value() as f64 || x < i16::min_value() as f64 {\n\n let msg = format!(\"{} cannot be transformed to short\", x);\n\n let err = graph_err!(GraphErrorCode::DataError, msg, double_to_data_type, x, data_type);\n\n Err(err)\n\n } else {\n\n Ok(Property::Short(x as i16).to_vec())\n\n }\n", "file_path": "interactive_engine/executor/store/groot/src/api/property.rs", "rank": 15, "score": 277168.79251477215 }, { "content": "fn long_to_data_type(x: i64, data_type: &DataType) -> GraphTraceResult<Vec<u8>> {\n\n match *data_type {\n\n DataType::Bool => Ok(Property::Bool(x != 0).to_vec()),\n\n DataType::Char => {\n\n if x > u8::max_value() as i64 || x < 0 {\n\n let msg = format!(\"{} cannot be transformed to char\", x);\n\n let err = graph_err!(GraphErrorCode::DataError, msg, long_to_data_type, x, data_type);\n\n Err(err)\n\n } else {\n\n Ok(Property::Char(x as u8).to_vec())\n\n }\n\n }\n\n DataType::Short => {\n\n if x > i16::max_value() as i64 || x < i16::min_value() as i64 {\n\n let msg = format!(\"{} cannot be transformed to short\", x);\n\n let err = graph_err!(GraphErrorCode::DataError, msg, long_to_data_type, x, data_type);\n\n Err(err)\n\n } else {\n\n Ok(Property::Short(x as i16).to_vec())\n\n }\n", "file_path": "interactive_engine/executor/store/groot/src/api/property.rs", "rank": 16, "score": 277168.79251477215 }, { "content": "#[inline(always)]\n\npub fn new_output<'a, D: Data>(generic: &'a Box<dyn OutputProxy>) -> &'a RefWrapOutput<D> {\n\n RefWrapOutput::<D>::downcast(generic)\n\n}\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/communication/output/mod.rs", "rank": 17, "score": 276921.7970824239 }, { "content": "pub fn build_channels<T: Data>(\n\n id: ChannelId, local_workers: u32, server_index: u32, server_conf: &ServerConf,\n\n) -> Result<LinkedList<ChannelResource<T>>, BuildJobError> {\n\n let workers = local_workers as usize;\n\n let servers = server_conf.get_servers();\n\n if servers.is_empty() {\n\n return Ok(build_local_channels(id, workers));\n\n }\n\n\n\n let server_index = server_index as usize;\n\n\n\n if servers.len() == 1 && server_index == 0 {\n\n return Ok(build_local_channels(id, workers));\n\n }\n\n let my_server_id = servers[server_index];\n\n\n\n // prepare local channels;\n\n let mut to_local_pushes = LinkedList::new();\n\n let mut local_pull = LinkedList::new();\n\n {\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/data_plane/mod.rs", "rank": 18, "score": 276830.6289179134 }, { "content": "fn has_any<T: Data>(mut stream: Stream<T>) -> Result<SingleItem<bool>, BuildJobError> {\n\n stream\n\n .set_upstream_batch_capacity(1)\n\n .set_upstream_batch_size(1);\n\n let x = stream.unary(\"any_global\", |info| {\n\n let mut any_map = TidyTagMap::<()>::new(info.scope_level);\n\n move |input, output| {\n\n input.for_each_batch(|batch| {\n\n if !batch.is_empty() {\n\n if !any_map.contains_key(batch.tag()) {\n\n any_map.insert(batch.tag().clone(), ());\n\n output\n\n .new_session(batch.tag())?\n\n .give(Single(true))?;\n\n }\n\n batch.clear();\n\n\n\n if batch.is_last() {\n\n any_map.remove(batch.tag());\n\n }\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/operator/concise/any.rs", "rank": 19, "score": 273679.18453446304 }, { "content": "/// Turn a sequence of tokens with bracket, into a suffix order\n\npub fn to_suffix_expr<E: ExprToken + std::fmt::Debug>(expr: Vec<E>) -> ExprResult<Vec<E>> {\n\n let mut stack: Vec<E> = Vec::with_capacity(expr.len());\n\n let mut results: Vec<E> = Vec::with_capacity(expr.len());\n\n\n\n for token in expr {\n\n if token.is_operand() {\n\n results.push(token);\n\n } else if token.is_left_brace() {\n\n stack.push(token);\n\n } else if token.is_right_brace() {\n\n let mut is_left_brace = false;\n\n while !stack.is_empty() {\n\n let recent = stack.pop().unwrap();\n\n if recent.is_left_brace() {\n\n is_left_brace = true;\n\n break;\n\n } else {\n\n results.push(recent);\n\n }\n\n }\n", "file_path": "interactive_engine/executor/ir/common/src/expr_parse/mod.rs", "rank": 20, "score": 271871.4487055974 }, { "content": "pub fn write_id<W: WriteExt>(writer: &mut W, id: ID) -> io::Result<()> {\n\n writer.write_u64(id)\n\n}\n\n\n\n/// The number of bits in an `ID`\n\npub const ID_BITS: usize = std::mem::size_of::<ID>() * 8;\n\n\n\n/// Primary key in storage, including single column pk and multi column pks.\n\npub type PKV = OneOrMany<(NameOrId, Object)>;\n\n\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\n\npub enum Direction {\n\n Out = 0,\n\n In = 1,\n\n Both = 2,\n\n}\n\n\n\nimpl From<algebra_pb::edge_expand::Direction> for Direction {\n\n fn from(direction: algebra_pb::edge_expand::Direction) -> Self\n\n where\n", "file_path": "interactive_engine/executor/ir/graph_proxy/src/apis/graph/mod.rs", "rank": 21, "score": 271135.3148057709 }, { "content": " public long data() {\n\n return nativeData(this.address);\n", "file_path": "analytical_engine/java/grape-jdk/src/main/java/com/alibaba/graphscope/stdcxx/FFIByteVecVector.java", "rank": 22, "score": 269367.61609033117 }, { "content": " public long data() {\n\n return nativeData(this.address);\n", "file_path": "analytical_engine/java/grape-jdk/src/main/java/com/alibaba/graphscope/stdcxx/FFIIntVecVector.java", "rank": 23, "score": 269367.6160903312 }, { "content": "#[inline]\n\nfn check_connection<R: ReadExt>(conn: &mut R) -> std::io::Result<Option<(u64, u32)>> {\n\n let handshake = conn.read_u128()?;\n\n Ok(check_handshake(handshake))\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/network/src/transport/mod.rs", "rank": 24, "score": 267397.1773833603 }, { "content": "pub fn run_opt<DI, DO, F>(conf: JobConf, sink: ResultSink<DO>, mut logic: F) -> Result<(), JobSubmitError>\n\nwhere\n\n DI: Data,\n\n DO: Debug + Send + 'static,\n\n F: FnMut(&mut Worker<DI, DO>) -> Result<(), BuildJobError>,\n\n{\n\n init_env();\n\n let peer_guard = Arc::new(AtomicUsize::new(0));\n\n let conf = Arc::new(conf);\n\n let workers = allocate_local_worker(&conf)?;\n\n if workers.is_none() {\n\n return Ok(());\n\n }\n\n let worker_ids = workers.unwrap();\n\n let mut workers = Vec::new();\n\n for id in worker_ids {\n\n let mut worker = Worker::new(&conf, id, &peer_guard, sink.clone());\n\n let _g = crate::worker_id::guard(worker.id);\n\n logic(&mut worker)?;\n\n workers.push(worker);\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/lib.rs", "rank": 25, "score": 267060.1468712191 }, { "content": "pub fn get_servers() -> Vec<u64> {\n\n let lock = SERVERS\n\n .read()\n\n .expect(\"fetch read lock failure;\");\n\n lock.to_vec()\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/lib.rs", "rank": 26, "score": 266942.124677938 }, { "content": "#[inline]\n\nfn read_dump<T: Copy, R: Read>(reader: &mut R) -> std::io::Result<Vec<T>> {\n\n let size = reader.read_u64::<LittleEndian>()? as usize;\n\n if size > 0 {\n\n let mut buf = vec![0u8; size];\n\n reader.read_exact(&mut buf)?;\n\n let list = unsafe {\n\n let ptr = buf.as_ptr() as *mut T;\n\n let length = size / std::mem::size_of::<T>();\n\n Vec::from_raw_parts(ptr, length, length)\n\n };\n\n std::mem::forget(buf);\n\n Ok(list)\n\n } else {\n\n Ok(vec![])\n\n }\n\n}\n\n\n\npub struct NeighborsBackend {\n\n mmap: Mmap,\n\n len: usize,\n", "file_path": "interactive_engine/executor/engine/pegasus/graph/src/lib.rs", "rank": 27, "score": 266721.5624903686 }, { "content": "/// Each serving consists of a tuple, where tuple.0 indicates the player id,\n\n/// and tuple.1 indicates the ball that it is serving. The game continues\n\n/// until any player hits a LOSS ball, or it exceeds a random `max_iters`.\n\nfn single_play(serving: Stream<(u32, u32)>) -> Result<Stream<(u32, u32)>, BuildJobError> {\n\n let max_iters = 30;\n\n let mut until = IterCondition::<(u32, u32)>::max_iters(max_iters);\n\n until.until(move |(_player, ball)| Ok(*ball == LOSS));\n\n\n\n serving.iterate_until(until, |start| {\n\n start\n\n // Hit the ball to the opponent side, aka, 0 -> 1, 1 -> 0\n\n .repartition(|(player, _ball)| Ok((*player ^ 1) as u64))\n\n .map(|(player, ball)| {\n\n // The larger ball is, the easier it is to hit the ball back, which means\n\n // the less possible for the other player to loss (hit a zero number)\n\n let new_ball = thread_rng().gen_range(LOSS..ball);\n\n println!(\"Player {:?} hits a new ball {:?}\", player ^ 1, new_ball);\n\n Ok((player ^ 1, new_ball))\n\n })\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/examples/ping_pong.rs", "rank": 28, "score": 265538.45094947715 }, { "content": "#[inline]\n\nfn dump<T: Copy, W: Write>(writer: &mut W, list: &Vec<T>) -> std::io::Result<()> {\n\n let len = list.len();\n\n let buf = unsafe {\n\n let ptr = list.as_ptr() as *const u8;\n\n let size = len * std::mem::size_of::<T>();\n\n std::slice::from_raw_parts(ptr, size)\n\n };\n\n writer.write_u64::<LittleEndian>(buf.len() as u64)?;\n\n writer.write_all(buf)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/graph/src/lib.rs", "rank": 29, "score": 263685.6985399383 }, { "content": "fn create_src(id: u32, source: &mut Source<i32>) -> Result<(Stream<i32>, Stream<i32>), BuildJobError> {\n\n let src1 = if id == 0 { source.input_from(1..5)? } else { source.input_from(8..10)? };\n\n let (src1, src2) = src1.copied()?;\n\n let src2 = src2.map(|x| Ok(x + 1))?;\n\n Ok((src1, src2))\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/tests/join_test.rs", "rank": 30, "score": 258662.31302543418 }, { "content": "pub fn i16_to_vec(x: i16) -> Vec<u8> {\n\n int_to_vec!(x, i16)\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/common/bytes/transform.rs", "rank": 31, "score": 257830.94985296874 }, { "content": "pub fn i32_to_vec(x: i32) -> Vec<u8> {\n\n int_to_vec!(x, i32)\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/common/bytes/transform.rs", "rank": 32, "score": 257830.94985296874 }, { "content": "pub fn i64_to_vec(x: i64) -> Vec<u8> {\n\n int_to_vec!(x, i64)\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/common/bytes/transform.rs", "rank": 33, "score": 257830.94985296874 }, { "content": "pub fn u64_to_vec(x: u64) -> Vec<u8> {\n\n int_to_vec!(x, u64)\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/common/bytes/transform.rs", "rank": 34, "score": 257830.94985296874 }, { "content": "#[inline]\n\nfn setup_connection<W: WriteExt>(server_id: u64, hb_sec: u32, conn: &mut W) -> std::io::Result<()> {\n\n let handshake = get_handshake(server_id, hb_sec);\n\n conn.write_u128(handshake)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n fn handshake(server_id: u64) {\n\n let value = get_handshake(server_id, 5);\n\n assert_eq!(Some((server_id, 5)), check_handshake(value), \"error handshake on {}\", server_id);\n\n }\n\n\n\n #[test]\n\n fn hand_shake_rw_test() {\n\n for i in 0u64..65536 {\n\n handshake(i);\n\n }\n\n handshake(!0);\n\n }\n\n}\n", "file_path": "interactive_engine/executor/engine/pegasus/network/src/transport/mod.rs", "rank": 35, "score": 256101.18889665432 }, { "content": "pub fn spawn_batch<T: Task + 'static, I: IntoIterator<Item = T>>(tasks: I) -> Result<(), RejectError<()>> {\n\n EXECUTOR.1.spawn_batch(tasks)\n\n}\n", "file_path": "interactive_engine/executor/engine/pegasus/executor/src/reactor.rs", "rank": 36, "score": 255620.08577453642 }, { "content": "pub fn partition<P: AsRef<Path>>(_path: P, _partitions: usize) -> std::io::Result<Vec<MemIdTopoGraph>> {\n\n todo!()\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/graph/src/lib.rs", "rank": 37, "score": 255503.26614274812 }, { "content": "pub fn parse_property(data: &str, data_type: DataType) -> Property {\n\n match data_type {\n\n DataType::Bool => match data {\n\n \"true\" => Property::Bool(true),\n\n \"false\" => Property::Bool(false),\n\n _ => Property::Unknown,\n\n },\n\n DataType::Char => match data.len() {\n\n 1 => Property::Char(data.as_bytes()[0]),\n\n _ => Property::Unknown,\n\n },\n\n DataType::Short => match data.parse::<i16>() {\n\n Ok(x) => Property::Short(x),\n\n _ => Property::Unknown,\n\n },\n\n DataType::Int => match data.parse::<i32>() {\n\n Ok(x) => Property::Int(x),\n\n _ => Property::Unknown,\n\n },\n\n DataType::Long => match data.parse::<i64>() {\n", "file_path": "interactive_engine/executor/store/groot/src/api/property.rs", "rank": 38, "score": 254188.54550359357 }, { "content": "fn iterate_x_apply_x_flatmap_count_x_map_x(workers: u32) {\n\n let name = format!(\"iterate_x_apply_x_flatmap_count_x_map_x_{}_test\", workers);\n\n let mut conf = JobConf::new(name);\n\n conf.set_workers(workers);\n\n let mut result = pegasus::run(conf, || {\n\n let index = pegasus::get_current_worker().index + 1;\n\n let src = vec![index];\n\n move |input, output| {\n\n input\n\n .input_from(src)?\n\n .iterate(10, |start| {\n\n start\n\n .repartition(|x| Ok(*x as u64))\n\n .apply(|stream| stream.flat_map(|x| Ok(0..x + 1))?.count())?\n\n .map(|(_p, c)| Ok(c as u32))\n\n })?\n\n .sink_into(output)\n\n }\n\n })\n\n .expect(\"submit job failure;\");\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/tests/iteration_test.rs", "rank": 39, "score": 254094.0237409788 }, { "content": "pub fn gen_edge_properties(\n\n si: SnapshotId, edge_type: &EdgeKind, id: &EdgeId, type_def: &TypeDef,\n\n) -> HashMap<PropertyId, Value> {\n\n let mut map = HashMap::new();\n\n for prop_def in type_def.get_prop_defs() {\n\n let p = edge_prop(si, edge_type, id, prop_def.r#type);\n\n map.insert(prop_def.id, p);\n\n }\n\n map\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/tests/data.rs", "rank": 40, "score": 253675.23691937834 }, { "content": "pub fn gen_vertex_properties(\n\n si: SnapshotId, label: LabelId, id: VertexId, type_def: &TypeDef,\n\n) -> HashMap<PropertyId, Value> {\n\n let mut map = HashMap::new();\n\n for prop_def in type_def.get_prop_defs() {\n\n let p = vertex_prop(si, label, id, prop_def.r#type);\n\n map.insert(prop_def.id, p);\n\n }\n\n map\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/tests/data.rs", "rank": 41, "score": 253675.23691937834 }, { "content": "/// Define the cyclic dataflow\n\npub trait Iteration<D: Data> {\n\n /// To iterate through a sub-dataflow given by `func` by at most `max_iters` times.\n\n /// The input data stream will serve as the input of `func` in the first iteration,\n\n /// and the output data stream of `func` of next current iteration will automatically be\n\n /// the input of `func` of the next iteration. In the dataflow, this will introduce a cyclic\n\n /// (or feedback) edge between the last operator and first operator of `func`, as the name\n\n /// cyclic dataflow has suggested.\n\n ///\n\n /// # Example\n\n /// ```\n\n /// # use pegasus::{JobConf};\n\n /// # use pegasus::api::{Sink, Collect, Iteration, Map};\n\n ///\n\n /// # let conf = JobConf::new(\"flat_map_example\");\n\n /// let mut results = pegasus::run(conf, || {\n\n /// |input, output| {\n\n /// input\n\n /// .input_from(0..6u32)?\n\n /// // add each value ten times\n\n /// .iterate(10, |input| {\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/api/iteration.rs", "rank": 42, "score": 252988.1136319414 }, { "content": "pub fn gen_edge_prop_map(\n\n si: SnapshotId, edge_type: &EdgeKind, id: &EdgeId, prop_defs: &Vec<&PropDef>,\n\n) -> HashMap<PropertyId, Value> {\n\n let mut map = HashMap::new();\n\n for prop_def in prop_defs {\n\n let p = edge_prop(si, edge_type, id, prop_def.r#type);\n\n map.insert(prop_def.id, p);\n\n }\n\n map\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/tests/data.rs", "rank": 43, "score": 251088.83978741942 }, { "content": "pub fn gen_edge_update_properties(\n\n si: SnapshotId, edge_type: &EdgeKind, id: &EdgeId, type_def: &TypeDef,\n\n) -> HashMap<PropertyId, Value> {\n\n let mut map = HashMap::new();\n\n let x = si as i64\n\n + (edge_type.edge_label_id + edge_type.src_vertex_label_id + edge_type.dst_vertex_label_id) as i64\n\n + (id.src_id + id.dst_id + id.inner_id) as i64;\n\n let count = ValueType::count() as i64;\n\n for i in x..x + count / 2 {\n\n let prop_id = (i % count) as PropertyId + 1;\n\n let prop_def = type_def.get_prop_def(prop_id).unwrap();\n\n let v = edge_prop(si, edge_type, id, prop_def.r#type);\n\n map.insert(prop_id, v);\n\n }\n\n map\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/tests/data.rs", "rank": 44, "score": 251088.83978741942 }, { "content": "pub fn gen_vertex_update_properties(\n\n si: SnapshotId, label: LabelId, id: VertexId, type_def: &TypeDef,\n\n) -> HashMap<PropertyId, Value> {\n\n let mut map = HashMap::new();\n\n let x = si as i64 + label as i64 + id as i64;\n\n let count = ValueType::count() as i64;\n\n for i in x..x + count / 2 {\n\n let prop_id = (i % count) as PropertyId + 1;\n\n let prop_def = type_def.get_prop_def(prop_id).unwrap();\n\n let v = vertex_prop(si, label, id, prop_def.r#type);\n\n map.insert(prop_id, v);\n\n }\n\n map\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/tests/data.rs", "rank": 45, "score": 251088.83978741942 }, { "content": "pub fn parse_properties<'a, Iter: Iterator<Item = &'a str>>(\n\n mut record_iter: Iter, _header: Option<&[ColumnMeta]>,\n\n) -> GDBResult<Row> {\n\n let mut properties = Row::default();\n\n if _header.is_none() {\n\n return Ok(properties);\n\n }\n\n\n\n let header = _header.unwrap();\n\n let mut header_iter = header.iter();\n\n\n\n while let Some(val) = record_iter.next() {\n\n // unwrap the property and type\n\n if let Some(ColumnMeta { name, data_type, is_primary_key: _ }) = header_iter.next() {\n\n match data_type {\n\n &DataType::String => properties.push(object!(val.to_string())),\n\n &DataType::Integer => properties.push(object!(val.parse::<i32>()?)),\n\n &DataType::Long => properties.push(object!(val.parse::<i64>()?)),\n\n &DataType::Double => properties.push(object!(val.parse::<f64>()?)),\n\n &DataType::Date => properties.push(object!(parse_datetime(val)?)),\n", "file_path": "interactive_engine/executor/store/exp_store/src/parser.rs", "rank": 46, "score": 248663.1262660831 }, { "content": "/// Get the tag info from the given name_or_id\n\n/// - in pb::Pattern transformation, tag is required to be id instead of str\n\npub fn pb_name_or_id_to_id(name_or_id: &common_pb::NameOrId) -> IrPatternResult<TagId> {\n\n match name_or_id.item {\n\n Some(common_pb::name_or_id::Item::Id(tag_id)) => Ok(tag_id as TagId),\n\n _ => {\n\n Err(ParsePbError::ParseError(format!(\"tag should be id, while it is {:?}\", name_or_id)).into())\n\n }\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/core/src/glogue/pattern.rs", "rank": 47, "score": 248323.85383473526 }, { "content": "fn new_batch<D>(tag: Tag, worker: u32, buf: Buffer<D>) -> MicroBatch<D> {\n\n MicroBatch::new(tag.clone(), worker, buf.into_read_only())\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/operator/concise/correlate.rs", "rank": 48, "score": 246915.09147540954 }, { "content": "pub fn get_codec_version(data: &[u8]) -> CodecVersion {\n\n let reader = UnsafeBytesReader::new(data);\n\n reader.read_i32(0).to_be()\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/codec.rs", "rank": 49, "score": 246903.3902095628 }, { "content": "pub fn startup(conf: Configuration) -> Result<(), StartupError> {\n\n if let Some(pool_size) = conf.max_pool_size {\n\n pegasus_executor::set_core_pool_size(pool_size as usize);\n\n }\n\n pegasus_executor::try_start_executor_async();\n\n\n\n let mut servers = HashSet::new();\n\n let server_id = conf.server_id();\n\n servers.insert(server_id);\n\n if let Some(id) = set_server_id(server_id) {\n\n return Err(StartupError::AlreadyStarted(id));\n\n }\n\n if let Some(net_conf) = conf.network_config() {\n\n if let Some(peers) = net_conf.get_servers()? {\n\n let addr = net_conf.local_addr()?;\n\n let conn_conf = net_conf.get_connection_param();\n\n for p in peers.iter() {\n\n servers.insert(p.id);\n\n }\n\n let addr = pegasus_network::start_up(server_id, conn_conf, addr, peers)?;\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/lib.rs", "rank": 50, "score": 244559.7360751517 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n if cfg!(target_os = \"linux\") {\n\n println!(\"cargo:rustc-link-arg=-Wl,--unresolved-symbols=ignore-all\");\n\n } else if cfg!(target_os = \"macos\") {\n\n println!(\"cargo:rustc-link-arg=-Wl,-undefined\");\n\n println!(\"cargo:rustc-link-arg=-Wl,dynamic_lookup\");\n\n } else {\n\n unimplemented!()\n\n }\n\n Ok(())\n\n}\n", "file_path": "interactive_engine/executor/ir/integrated/build.rs", "rank": 51, "score": 240431.65919695696 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n codegen_inplace()\n\n}\n\n\n\n#[cfg(feature = \"proto_inplace\")]\n\nuse std::path::PathBuf;\n\n#[cfg(feature = \"proto_inplace\")]\n\nconst GEN_DIR: &'static str = \"src/generated\";\n\n\n", "file_path": "interactive_engine/executor/ir/common/build.rs", "rank": 52, "score": 240431.65919695696 }, { "content": "#[bench]\n\nfn tag_two_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = HashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::Two(3, 6), ());\n\n map.remove(&Tag::Two(3, 6));\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 53, "score": 239679.29923840376 }, { "content": "#[bench]\n\nfn tag_three_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = HashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::Three(3, 6, 9), ());\n\n map.remove(&Tag::Three(3, 6, 9));\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 54, "score": 239679.29923840376 }, { "content": "#[bench]\n\nfn tag_two_a_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = AHashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::Two(3, 6), ());\n\n map.remove(&Tag::Two(3, 6));\n\n })\n\n}\n\n\n\n// #[bench]\n\n// fn tag_two_hash_w_r_hit_1000(b: &mut test::Bencher) {\n\n// let mut map = HashMap::new();\n\n// for i in 0..1000 {\n\n// map.insert(Tag::Two(3, i), ());\n\n// }\n\n// b.iter(|| {\n\n// map.insert(Tag::Two(3, 2000), ());\n\n// map.remove(&Tag::Two(3, 2000));\n\n// })\n\n// }\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 55, "score": 239679.29923840376 }, { "content": "#[bench]\n\nfn tag_one_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = HashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::One(77), ());\n\n map.remove(&Tag::One(77));\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 56, "score": 239679.29923840376 }, { "content": "#[bench]\n\nfn tag_root_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = HashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::Root, ());\n\n map.remove(&Tag::Root);\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 57, "score": 239679.29923840376 }, { "content": "#[bench]\n\nfn tag_one_a_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = AHashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::One(77), ());\n\n map.remove(&Tag::One(77));\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 58, "score": 239679.29923840376 }, { "content": "#[bench]\n\nfn tag_three_a_hash_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = AHashMap::with_capacity(1024);\n\n b.iter(|| {\n\n map.insert(Tag::Three(3, 6, 9), ());\n\n map.remove(&Tag::Three(3, 6, 9));\n\n })\n\n}\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 59, "score": 239679.29923840376 }, { "content": "#[bench]\n\nfn read_vec_deque(b: &mut Bencher) {\n\n let mut queue = VecDeque::new();\n\n //let item = \"bench\".to_owned();\n\n for _ in 0..572726701 {\n\n queue.push_back(FlatPtr(!0, 65536));\n\n }\n\n\n\n b.iter(|| queue.pop_front());\n\n println!(\"after read queue.size {}\", queue.len());\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/common/benches/queue.rs", "rank": 60, "score": 238067.79547666345 }, { "content": "#[bench]\n\nfn write_vec_deque(b: &mut Bencher) {\n\n let mut queue = VecDeque::new();\n\n b.iter(|| queue.push_back(FlatPtr(!0, 65536)));\n\n println!(\"after write queue.size {}\", queue.len());\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/common/benches/queue.rs", "rank": 61, "score": 238067.79547666345 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n println!(\"cargo:rerun-if-changed=proto/clickhouse_grpc.proto\");\n\n codegen_inplace()\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/benchmark/build.rs", "rank": 62, "score": 237809.96525467772 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n println!(\"cargo:rerun-if-changed=proto/job_service.proto\");\n\n println!(\"cargo:rerun-if-changed=proto/job_plan.proto\");\n\n codegen_inplace()\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/server/build.rs", "rank": 63, "score": 237809.96525467772 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n codegen_inplace()\n\n}\n\n\n\n#[cfg(feature = \"with_v6d\")]\n\nconst NATIVE_DIR: &'static str = \"src/store_impl/v6d/native\";\n\n\n", "file_path": "interactive_engine/executor/store/global_query/build.rs", "rank": 64, "score": 237809.96525467772 }, { "content": "#[bench]\n\nfn tag_one_int_map_w_r_hit(b: &mut test::Bencher) {\n\n let mut map = IntMap::default();\n\n b.iter(|| {\n\n map.insert(77, ());\n\n map.remove(&77);\n\n })\n\n}\n\n\n\n// #[bench]\n\n// fn tag_one_hash_w_r_hit_1000(b: &mut test::Bencher) {\n\n// let mut map = HashMap::new();\n\n// for i in 0..1024u32 {\n\n// map.insert(Tag::One(i), ());\n\n// }\n\n// b.iter(|| {\n\n// map.insert(Tag::One(2000), ());\n\n// map.remove(&Tag::One(2000));\n\n// })\n\n// }\n\n//\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/benches/bench_tag.rs", "rank": 65, "score": 237383.61057360828 }, { "content": "pub fn run<DI, DO, F, FN>(conf: JobConf, func: F) -> Result<ResultStream<DO>, JobSubmitError>\n\nwhere\n\n DI: Data,\n\n DO: Debug + Send + 'static,\n\n F: Fn() -> FN,\n\n FN: FnOnce(&mut Source<DI>, ResultSink<DO>) -> Result<(), BuildJobError> + 'static,\n\n{\n\n let (tx, rx) = crossbeam_channel::unbounded();\n\n let sink = ResultSink::new(tx);\n\n let cancel_hook = sink.get_cancel_hook().clone();\n\n let results = ResultStream::new(conf.job_id, cancel_hook, rx);\n\n run_opt(conf, sink, |worker| worker.dataflow(func()))?;\n\n Ok(results)\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/lib.rs", "rank": 66, "score": 236767.96418183314 }, { "content": "// g.V(5).in().out().hasId(5).in()\n\nfn modern_graph_filter_flatmap_test() -> ResultStream<u32> {\n\n let mut conf = JobConf::default();\n\n let num_workers = 2;\n\n conf.set_workers(num_workers);\n\n let result_stream = pegasus::run(conf, || {\n\n let src = if pegasus::get_current_worker().index == 0 { vec![] } else { vec![5] };\n\n move |input, output| {\n\n input\n\n .input_from(src)?\n\n .repartition(|x| Ok(*x as u64))\n\n .flat_map(move |x| Ok(MAP.get(&x).unwrap().1.iter().cloned()))?\n\n .repartition(|x| Ok(*x as u64))\n\n .flat_map(move |x| Ok(MAP.get(&x).unwrap().0.iter().cloned()))?\n\n .repartition(|x| Ok(*x as u64))\n\n .filter(|x| Ok(*x == 5))?\n\n .repartition(|x| Ok(*x as u64))\n\n .flat_map(|x| Ok(MAP.get(&x).unwrap().1.iter().cloned()))?\n\n .sink_into(output)\n\n }\n\n })\n\n .expect(\"submit job failure\");\n\n result_stream\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/tests/filter_test.rs", "rank": 67, "score": 235919.05543866684 }, { "content": "pub fn hash64(data: &[u8], length: usize) -> i64 {\n\n let seed = 0xc70f6907;\n\n hash64_with_seed(data, length, seed)\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/graph/mod.rs", "rank": 68, "score": 235485.60847079355 }, { "content": "#[inline]\n\npub fn file_name(path: &str) -> Result<&str, String> {\n\n let name = Path::new(path)\n\n .file_name()\n\n .ok_or_else(|| format!(\"{} get filename failed\", path))?;\n\n name.to_str()\n\n .ok_or_else(|| format!(\"{:?} to str failed\", name))\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/util/fs.rs", "rank": 69, "score": 233350.17739438196 }, { "content": "#[inline]\n\npub fn file_size(path: &str) -> Result<usize, String> {\n\n fs::metadata(path)\n\n .map(|meta| meta.len() as usize)\n\n .map_err(|e| format!(\"get metadata of {} failed, {:?}\", path, e))\n\n}\n\n\n", "file_path": "interactive_engine/executor/store/groot/src/db/util/fs.rs", "rank": 70, "score": 233350.17739438196 }, { "content": "fn try_semi_join_output<L: Data + HasKey, R: Data + HasKey>(\n\n helper: &mut Helper<L, R>, output: &Output<L>, is_anti: bool, tag: &Tag,\n\n) -> Result<(), JobExecError> {\n\n if !helper.is_end(tag) {\n\n return Ok(());\n\n }\n\n let mut session = output.new_session(tag)?;\n\n if let Some(MapEntry { data: map, indicator: _ }) = helper.left_map.remove(tag) {\n\n for entry in map\n\n .values()\n\n // A semi join would output the items that have been matched in the join,\n\n // while an anti join output those that have not been matched.\n\n // Here, `entry.indicator ^ is_anti` does the above assertion.\n\n .filter(|entry| entry.indicator ^ is_anti)\n\n {\n\n session.give_iterator(entry.data.clone().into_iter())?;\n\n }\n\n }\n\n helper.right_map.remove(tag);\n\n Ok(())\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/operator/concise/keyed/join.rs", "rank": 71, "score": 232732.01119426815 }, { "content": "fn try_outer_join_output<L: Data + HasKey, R: Data + HasKey>(\n\n helper: &mut Helper<L, R>, mut session: OutputSession<(Option<L>, Option<R>)>, output_left: bool,\n\n outoutput_right: bool, tag: &Tag,\n\n) -> Result<(), JobExecError> {\n\n if !helper.is_end(tag) {\n\n return Ok(());\n\n }\n\n if let Some(MapEntry { data: map, indicator: _ }) = helper.left_map.remove(tag) {\n\n if output_left {\n\n // The case of left/full outer join, which must output a <L, None> for any left item\n\n // that has not been matched (`entry.indicator = false`)\n\n for entry in map.values().filter(|entry| !entry.indicator) {\n\n session.give_iterator(\n\n entry\n\n .data\n\n .clone()\n\n .into_iter()\n\n .map(|l| (Some(l), None)),\n\n )?;\n\n }\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/operator/concise/keyed/join.rs", "rank": 72, "score": 232732.01119426815 }, { "content": " public static void main(String[] args) throws JsonProcessingException {\n\n Map<String, FileColumnMapping> m = new HashMap<>();\n\n\n\n m.put(\"comment_0_0.csv\", new Comment().toFileColumnMapping());\n\n m.put(\"forum_0_0.csv\", new Forum().toFileColumnMapping());\n\n m.put(\"person_0_0.csv\", new Person().toFileColumnMapping());\n\n m.put(\"post_0_0.csv\", new Post().toFileColumnMapping());\n\n m.put(\"place_0_0.csv\", new Place().toFileColumnMapping());\n\n m.put(\"tag_0_0.csv\", new Tag().toFileColumnMapping());\n\n m.put(\"tagclass_0_0.csv\", new Tagclass().toFileColumnMapping());\n\n m.put(\"organisation_0_0.csv\", new Organisation().toFileColumnMapping());\n\n\n\n m.put(\n\n \"comment_hasCreator_person_0_0.csv\",\n\n new CommentHasCreatorPerson().toFileColumnMapping());\n\n m.put(\"comment_hasTag_tag_0_0.csv\", new CommentHasTagTag().toFileColumnMapping());\n\n m.put(\n\n \"comment_isLocatedIn_place_0_0.csv\",\n\n new CommentIsLocatedInPlace().toFileColumnMapping());\n\n m.put(\"comment_replyOf_comment_0_0.csv\", new CommentReplyOfComment().toFileColumnMapping());\n\n m.put(\"comment_replyOf_post_0_0.csv\", new CommentReplyOfPost().toFileColumnMapping());\n\n m.put(\"forum_containerOf_post_0_0.csv\", new ForumContainerOfPost().toFileColumnMapping());\n\n m.put(\"forum_hasMember_person_0_0.csv\", new ForumHasMemberPerson().toFileColumnMapping());\n\n m.put(\n\n \"forum_hasModerator_person_0_0.csv\",\n\n new ForumHasModeratorPerson().toFileColumnMapping());\n\n m.put(\"forum_hasTag_tag_0_0.csv\", new ForumHasTagTag().toFileColumnMapping());\n\n m.put(\"person_hasInterest_tag_0_0.csv\", new PersonHasInterestTag().toFileColumnMapping());\n\n m.put(\n\n \"person_isLocatedIn_place_0_0.csv\",\n\n new PersonIsLocatedInPlace().toFileColumnMapping());\n\n m.put(\"person_knows_person_0_0.csv\", new PersonKnowsPerson().toFileColumnMapping());\n\n m.put(\"person_likes_comment_0_0.csv\", new PersonLikesComment().toFileColumnMapping());\n\n m.put(\"person_likes_post_0_0.csv\", new PersonLikesPost().toFileColumnMapping());\n\n m.put(\n\n \"person_studyAt_organisation_0_0.csv\",\n\n new PersonStudyAtOrganisation().toFileColumnMapping());\n\n m.put(\n\n \"person_workAt_organisation_0_0.csv\",\n\n new PersonWorkAtOrganisation().toFileColumnMapping());\n\n m.put(\"post_hasCreator_person_0_0.csv\", new PostHasCreatorPerson().toFileColumnMapping());\n\n m.put(\"post_hasTag_tag_0_0.csv\", new PostHasTagTag().toFileColumnMapping());\n\n m.put(\"post_isLocatedIn_place_0_0.csv\", new PostIsLocatedInPlace().toFileColumnMapping());\n\n m.put(\n\n \"organisation_isLocatedIn_place_0_0.csv\",\n\n new OrganisationIsLocatedInPlace().toFileColumnMapping());\n\n m.put(\"place_isPartOf_place_0_0.csv\", new PlaceIsPartOfPlace().toFileColumnMapping());\n\n m.put(\"tag_hasType_tagclass_0_0.csv\", new TagHasTypeTagclass().toFileColumnMapping());\n\n m.put(\n\n \"tagclass_isSubclassOf_tagclass_0_0.csv\",\n\n new TagclassIsSubclassOfTagclass().toFileColumnMapping());\n\n\n\n System.out.println(new ObjectMapper().writeValueAsString(m));\n", "file_path": "interactive_engine/data_load_tools/src/test/java/com/alibaba/maxgraph/dataload/ldbc/jsongen/Main.java", "rank": 73, "score": 232532.21598764492 }, { "content": "pub trait SwitchFn<D: Data> {\n\n fn switch<L, R, F>(self, func: F) -> Result<(Stream<L>, Stream<R>), BuildJobError>\n\n where\n\n L: Data,\n\n R: Data,\n\n F: Fn(&D) -> FnResult<bool> + Send + 'static;\n\n\n\n fn switch_mut<L, R, F, B>(self, builder: B) -> Result<(Stream<L>, Stream<R>), BuildJobError>\n\n where\n\n L: Data,\n\n R: Data,\n\n F: FnMut(&D) -> FnResult<bool> + Send + 'static,\n\n B: Fn() -> F + Send + 'static;\n\n}\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/api/concise/switch.rs", "rank": 74, "score": 232428.2470014506 }, { "content": "#[inline]\n\nfn encode_result<T>(_result: T) -> Vec<u8> {\n\n unimplemented!()\n\n}\n", "file_path": "interactive_engine/executor/engine/pegasus/benchmark/src/queries/ldbc/ic1.rs", "rank": 75, "score": 232302.24420937037 }, { "content": "pub trait ScopeByKey<K: Data + Key, V: Data> {\n\n fn segment_apply<F, T>(self, task: F) -> Result<Stream<Pair<K, T>>, BuildJobError>\n\n where\n\n T: Data,\n\n F: FnOnce(Stream<V>) -> Result<SingleItem<T>, BuildJobError>;\n\n}\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/api/concise/keyed/apply.rs", "rank": 76, "score": 231846.12939258077 }, { "content": "struct ScopedAggregate<D: Data>(PhantomData<D>);\n\n\n\nimpl<D: Data> ScopedAggregate<D> {\n\n fn new() -> Self {\n\n ScopedAggregate(std::marker::PhantomData)\n\n }\n\n}\n\n\n\nimpl<D: Data> BatchRouteFunction<D> for ScopedAggregate<D> {\n\n fn route(&self, batch: &MicroBatch<D>) -> FnResult<u64> {\n\n Ok(batch.tag.current_uncheck() as u64)\n\n }\n\n}\n\n\n\npub struct AggregateBatchPush<D: Data> {\n\n push: ExchangeByBatchPush<D>,\n\n}\n\n\n\nimpl<D: Data> AggregateBatchPush<D> {\n\n pub fn new(info: ChannelInfo, pushes: Vec<EventEmitPush<D>>) -> Self {\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/communication/decorator/aggregate.rs", "rank": 77, "score": 231837.65602817203 }, { "content": "pub fn get_handshake(server_id: u64, hb: u32) -> u128 {\n\n let mut value = (PASS_PHRASE as u128) << 96;\n\n let server_id = server_id as u128;\n\n value |= server_id << 32;\n\n value |= hb as u128;\n\n value\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/network/src/transport/mod.rs", "rank": 78, "score": 231041.5485823984 }, { "content": "public class Tag extends VertexInfo {\n\n String label = \"tag\";\n\n\n\n String[] propertyNames = new String[] {\"id\", \"name\", \"url\"};\n\n\n\n public String getLabel() {\n\n return label;\n\n }\n\n\n\n public String[] getPropertyNames() {\n\n return propertyNames;\n\n }\n", "file_path": "interactive_engine/data_load_tools/src/test/java/com/alibaba/maxgraph/dataload/ldbc/jsongen/Tag.java", "rank": 79, "score": 230785.19172984088 }, { "content": " private Writable[] data;\n", "file_path": "analytical_engine/java/grape-giraph/src/main/java/com/alibaba/graphscope/utils/Gid2DataFixed.java", "rank": 80, "score": 230096.78028283053 }, { "content": " public GSVertexArray<DATA_T> data() {\n\n if (Objects.isNull(ffiVertexDataContext)) {\n\n return null;\n\n }\n\n return ffiVertexDataContext.data();\n", "file_path": "analytical_engine/java/grape-jdk/src/main/java/com/alibaba/graphscope/context/VertexDataContext.java", "rank": 81, "score": 230096.78028283053 }, { "content": " private ArrayList<Writable> data;\n", "file_path": "analytical_engine/java/grape-giraph/src/main/java/com/alibaba/graphscope/utils/Gid2DataResizable.java", "rank": 82, "score": 230096.78028283053 }, { "content": "pub fn get_resource_mut<T: Any>() -> Option<ResourceMut<T>> {\n\n RESOURCES.with(|res| {\n\n let mut borrow = res.borrow_mut();\n\n let type_id = TypeId::of::<T>();\n\n if let Some(r) = borrow.get_mut(&type_id) {\n\n let resource: &mut T = r.downcast_mut::<T>().expect(\"type id error;\");\n\n let ptr = resource as *mut T;\n\n Some(ResourceMut { ptr })\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/resource.rs", "rank": 83, "score": 228085.51035918196 }, { "content": "public class Main {\n\n public static void main(String[] args) throws JsonProcessingException {\n\n Map<String, FileColumnMapping> m = new HashMap<>();\n\n\n\n m.put(\"comment_0_0.csv\", new Comment().toFileColumnMapping());\n\n m.put(\"forum_0_0.csv\", new Forum().toFileColumnMapping());\n\n m.put(\"person_0_0.csv\", new Person().toFileColumnMapping());\n\n m.put(\"post_0_0.csv\", new Post().toFileColumnMapping());\n\n m.put(\"place_0_0.csv\", new Place().toFileColumnMapping());\n\n m.put(\"tag_0_0.csv\", new Tag().toFileColumnMapping());\n\n m.put(\"tagclass_0_0.csv\", new Tagclass().toFileColumnMapping());\n\n m.put(\"organisation_0_0.csv\", new Organisation().toFileColumnMapping());\n\n\n\n m.put(\n\n \"comment_hasCreator_person_0_0.csv\",\n\n new CommentHasCreatorPerson().toFileColumnMapping());\n\n m.put(\"comment_hasTag_tag_0_0.csv\", new CommentHasTagTag().toFileColumnMapping());\n\n m.put(\n\n \"comment_isLocatedIn_place_0_0.csv\",\n\n new CommentIsLocatedInPlace().toFileColumnMapping());\n\n m.put(\"comment_replyOf_comment_0_0.csv\", new CommentReplyOfComment().toFileColumnMapping());\n\n m.put(\"comment_replyOf_post_0_0.csv\", new CommentReplyOfPost().toFileColumnMapping());\n\n m.put(\"forum_containerOf_post_0_0.csv\", new ForumContainerOfPost().toFileColumnMapping());\n\n m.put(\"forum_hasMember_person_0_0.csv\", new ForumHasMemberPerson().toFileColumnMapping());\n\n m.put(\n\n \"forum_hasModerator_person_0_0.csv\",\n\n new ForumHasModeratorPerson().toFileColumnMapping());\n\n m.put(\"forum_hasTag_tag_0_0.csv\", new ForumHasTagTag().toFileColumnMapping());\n\n m.put(\"person_hasInterest_tag_0_0.csv\", new PersonHasInterestTag().toFileColumnMapping());\n\n m.put(\n\n \"person_isLocatedIn_place_0_0.csv\",\n\n new PersonIsLocatedInPlace().toFileColumnMapping());\n\n m.put(\"person_knows_person_0_0.csv\", new PersonKnowsPerson().toFileColumnMapping());\n\n m.put(\"person_likes_comment_0_0.csv\", new PersonLikesComment().toFileColumnMapping());\n\n m.put(\"person_likes_post_0_0.csv\", new PersonLikesPost().toFileColumnMapping());\n\n m.put(\n\n \"person_studyAt_organisation_0_0.csv\",\n\n new PersonStudyAtOrganisation().toFileColumnMapping());\n\n m.put(\n\n \"person_workAt_organisation_0_0.csv\",\n\n new PersonWorkAtOrganisation().toFileColumnMapping());\n\n m.put(\"post_hasCreator_person_0_0.csv\", new PostHasCreatorPerson().toFileColumnMapping());\n\n m.put(\"post_hasTag_tag_0_0.csv\", new PostHasTagTag().toFileColumnMapping());\n\n m.put(\"post_isLocatedIn_place_0_0.csv\", new PostIsLocatedInPlace().toFileColumnMapping());\n\n m.put(\n\n \"organisation_isLocatedIn_place_0_0.csv\",\n\n new OrganisationIsLocatedInPlace().toFileColumnMapping());\n\n m.put(\"place_isPartOf_place_0_0.csv\", new PlaceIsPartOfPlace().toFileColumnMapping());\n\n m.put(\"tag_hasType_tagclass_0_0.csv\", new TagHasTypeTagclass().toFileColumnMapping());\n\n m.put(\n\n \"tagclass_isSubclassOf_tagclass_0_0.csv\",\n\n new TagclassIsSubclassOfTagclass().toFileColumnMapping());\n\n\n\n System.out.println(new ObjectMapper().writeValueAsString(m));\n\n }\n", "file_path": "interactive_engine/data_load_tools/src/test/java/com/alibaba/maxgraph/dataload/ldbc/jsongen/Main.java", "rank": 84, "score": 226985.59564313065 }, { "content": " public StdVector<GSVertexArray<DATA_T>> data() {\n\n if (Objects.isNull(ffiLabeledVertexDataContext)) {\n\n return null;\n\n }\n\n return ffiLabeledVertexDataContext.data();\n", "file_path": "analytical_engine/java/grape-jdk/src/main/java/com/alibaba/graphscope/context/LabeledVertexDataContext.java", "rank": 85, "score": 226660.33167227416 }, { "content": "#[inline]\n\npub fn check_has_network_error(ch_id: u128) -> Option<Vec<SocketAddr>> {\n\n if let Ok(ref mut lock) = NETWORK_SEND_ERRORS.try_lock() {\n\n lock.remove(&ch_id)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/network/src/send/mod.rs", "rank": 86, "score": 226446.95618631388 }, { "content": " public static void main(String[] args) throws ParseException, IOException {\n\n Options options = new Options();\n\n options.addOption(\n\n Option.builder(\"c\")\n\n .longOpt(\"command\")\n\n .hasArg()\n\n .argName(\"COMMAND\")\n\n .desc(\"supported COMMAND: ingest / commit\")\n\n .build());\n\n options.addOption(\n\n Option.builder(\"d\")\n\n .longOpt(\"dir\")\n\n .hasArg()\n\n .argName(\"HDFS_PATH\")\n\n .desc(\"data directory of HDFS. e.g., hdfs://1.2.3.4:9000/build_output\")\n\n .build());\n\n options.addOption(\n\n Option.builder(\"oss\")\n\n .longOpt(\"ossconfigfile\")\n\n .hasArg()\n\n .argName(\"OSS_CONFIG_FILE\")\n\n .desc(\"OSS Config File. e.g., config.init\")\n\n .build());\n\n options.addOption(\n\n Option.builder(\"u\")\n\n .longOpt(\"uniquepath\")\n\n .hasArg()\n\n .argName(\"UNIQUE_PATH\")\n\n .desc(\"unique path from uuid. e.g., unique_path\")\n\n .build());\n\n options.addOption(Option.builder(\"h\").longOpt(\"help\").desc(\"print this message\").build());\n\n CommandLineParser parser = new DefaultParser();\n\n CommandLine commandLine = parser.parse(options, args);\n\n String command = commandLine.getOptionValue(\"command\");\n\n String configPath = null;\n\n String uniquePath = null;\n\n boolean isFromOSS = false;\n\n if (commandLine.hasOption(\"oss\")) {\n\n isFromOSS = true;\n\n configPath = commandLine.getOptionValue(\"oss\");\n\n uniquePath = commandLine.getOptionValue(\"u\");\n\n } else {\n\n configPath = commandLine.getOptionValue(\"dir\");\n\n }\n\n\n\n if (commandLine.hasOption(\"help\") || command == null) {\n\n printHelp(options);\n\n } else if (command.equalsIgnoreCase(\"ingest\")) {\n\n ingest(configPath, isFromOSS, uniquePath);\n\n } else if (command.equalsIgnoreCase(\"commit\")) {\n\n commit(configPath, isFromOSS, uniquePath);\n\n } else if (command.equalsIgnoreCase(\"ingestAndCommit\")) {\n\n ingest(configPath, isFromOSS, uniquePath);\n\n commit(configPath, isFromOSS, uniquePath);\n\n } else {\n\n printHelp(options);\n\n }\n", "file_path": "interactive_engine/data_load_tools/src/main/java/com/alibaba/maxgraph/dataload/LoadTool.java", "rank": 87, "score": 225012.36845461995 }, { "content": " @CXXReference\n", "file_path": "analytical_engine/java/grape-jdk/src/main/java/com/alibaba/graphscope/context/ffi/FFIVertexDataContext.java", "rank": 88, "score": 224997.82945332577 }, { "content": "#[inline]\n\nfn try_read<R: io::Read>(reader: &mut R, bytes: &mut [u8]) -> io::Result<usize> {\n\n loop {\n\n match reader.read(bytes) {\n\n Ok(size) => return Ok(size),\n\n Err(err) => match err.kind() {\n\n io::ErrorKind::WouldBlock | io::ErrorKind::TimedOut => return Ok(0),\n\n io::ErrorKind::Interrupted => (),\n\n _ => return Err(err),\n\n },\n\n }\n\n }\n\n}\n\n\n\n#[enum_dispatch(MessageDecoder)]\n\npub enum GeneralDecoder {\n\n Simple(SimpleBlockDecoder),\n\n Reentrant(ReentrantDecoder),\n\n ReentrantSlab(ReentrantSlabDecoder),\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/network/src/receive/decode.rs", "rank": 89, "score": 224888.69363795477 }, { "content": " private void output() {\n\n if (!checkPrecondition()) {\n\n return;\n\n }\n\n outputOperators();\n\n outputGraphType();\n", "file_path": "analytical_engine/java/grape-runtime/src/main/java/com/alibaba/graphscope/annotation/GraphScopeAnnotationProcessor.java", "rank": 90, "score": 224752.95741028056 }, { "content": "pub trait ScopeStreamPush<T: Data> {\n\n fn port(&self) -> Port;\n\n\n\n fn push(&mut self, tag: &Tag, msg: T) -> IOResult<()>;\n\n\n\n fn push_last(&mut self, msg: T, end: EndOfScope) -> IOResult<()>;\n\n\n\n fn try_push_iter<I: Iterator<Item = T>>(&mut self, tag: &Tag, iter: &mut I) -> IOResult<()> {\n\n for x in iter {\n\n self.push(tag, x)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn notify_end(&mut self, end: EndOfScope) -> IOResult<()>;\n\n\n\n fn flush(&mut self) -> IOResult<()>;\n\n\n\n fn close(&mut self) -> IOResult<()>;\n\n}\n\n\n", "file_path": "interactive_engine/executor/engine/pegasus/pegasus/src/communication/decorator/mod.rs", "rank": 91, "score": 224623.1965470292 }, { "content": "/// The register_type fn is used to register types used in DynType for serializing and deserializing;\n\n///\n\n/// # Examples\n\n/// ```\n\n///\n\n/// use dyn_type::object::Object;\n\n/// use dyn_type::{register_type, OwnedOrRef};\n\n/// use pegasus_common::codec::{Encode, Decode};\n\n///\n\n/// let dyn_ty_obj = vec![0_u64, 1, 2, 3];\n\n/// let obj = Object::DynOwned(Box::new(dyn_ty_obj));\n\n/// register_type::<Vec<u64>>().expect(\"register type failed\");\n\n/// let mut bytes = vec![];\n\n/// obj.write_to(&mut bytes).unwrap();\n\n///\n\n/// let mut reader = &bytes[0..];\n\n/// let de = Object::read_from(&mut reader).unwrap();\n\n/// let dyn_ty_obj_de: OwnedOrRef<Vec<u64>> = de.get().unwrap();\n\n/// assert_eq!(dyn_ty_obj_de.as_slice(), &[0_u64, 1, 2, 3]);\n\n/// ```\n\n///\n\npub fn register_type<T: 'static + Decode + DynType>() -> io::Result<()> {\n\n let ty_id = TypeId::of::<T>();\n\n let ph: PhImpl<T> = PhImpl { _ph: std::marker::PhantomData };\n\n if let Ok(mut table) = TYPE_TABLE.write() {\n\n table.insert(ty_id, Box::new(ph));\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(io::ErrorKind::Other, \"lock poisoned\"))\n\n }\n\n}\n\n\n", "file_path": "interactive_engine/executor/common/dyn_type/src/serde_dyn.rs", "rank": 92, "score": 223472.73312294483 }, { "content": "#[inline]\n\nfn encode_storage_labels(labels: &Vec<LabelId>) -> GraphProxyResult<Vec<StoreLabelId>> {\n\n labels\n\n .iter()\n\n .map(|label| encode_storage_label(*label))\n\n .collect::<Result<Vec<StoreLabelId>, _>>()\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/graph_proxy/src/adapters/gs_store/read_graph.rs", "rank": 93, "score": 223454.00236726555 }, { "content": " public static void main(String[] args)\n\n throws IOException, ClassNotFoundException, InterruptedException {\n\n String propertiesFile = args[0];\n\n Properties properties = new Properties();\n\n try (InputStream is = new FileInputStream(propertiesFile)) {\n\n properties.load(is);\n\n }\n\n String inputPath = properties.getProperty(INPUT_PATH);\n\n String outputPath = properties.getProperty(OUTPUT_PATH);\n\n String columnMappingConfigStr = properties.getProperty(COLUMN_MAPPING_CONFIG);\n\n String graphEndpoint = properties.getProperty(GRAPH_ENDPOINT);\n\n MaxGraphClient client = MaxGraphClient.newBuilder().setHosts(graphEndpoint).build();\n\n ObjectMapper objectMapper = new ObjectMapper();\n\n Map<String, FileColumnMapping> columnMappingConfig =\n\n objectMapper.readValue(\n\n columnMappingConfigStr,\n\n new TypeReference<Map<String, FileColumnMapping>>() {});\n\n\n\n List<DataLoadTarget> targets = new ArrayList<>();\n\n for (FileColumnMapping fileColumnMapping : columnMappingConfig.values()) {\n\n targets.add(\n\n DataLoadTarget.newBuilder()\n\n .setLabel(fileColumnMapping.getLabel())\n\n .setSrcLabel(fileColumnMapping.getSrcLabel())\n\n .setDstLabel(fileColumnMapping.getDstLabel())\n\n .build());\n\n }\n\n GraphSchema schema = client.prepareDataLoad(targets);\n\n String schemaJson = GraphSchemaMapper.parseFromSchema(schema).toJsonString();\n\n int partitionNum = client.getPartitionNum();\n\n\n\n Map<String, ColumnMappingInfo> columnMappingInfos = new HashMap<>();\n\n columnMappingConfig.forEach(\n\n (fileName, fileColumnMapping) -> {\n\n columnMappingInfos.put(fileName, fileColumnMapping.toColumnMappingInfo(schema));\n\n });\n\n String ldbcCustomize = properties.getProperty(LDBC_CUSTOMIZE, \"true\");\n\n long splitSize = Long.valueOf(properties.getProperty(SPLIT_SIZE, \"256\")) * 1024 * 1024;\n\n boolean loadAfterBuild =\n\n properties.getProperty(LOAD_AFTER_BUILD, \"false\").equalsIgnoreCase(\"true\");\n\n boolean skipHeader = properties.getProperty(SKIP_HEADER, \"true\").equalsIgnoreCase(\"true\");\n\n Configuration conf = new Configuration();\n\n conf.setBoolean(\"mapreduce.map.speculative\", false);\n\n conf.setBoolean(\"mapreduce.reduce.speculative\", false);\n\n conf.setLong(CombineTextInputFormat.SPLIT_MINSIZE_PERNODE, splitSize);\n\n conf.setLong(CombineTextInputFormat.SPLIT_MINSIZE_PERRACK, splitSize);\n\n conf.setStrings(SCHEMA_JSON, schemaJson);\n\n String mappings = objectMapper.writeValueAsString(columnMappingInfos);\n\n conf.setStrings(COLUMN_MAPPINGS, mappings);\n\n conf.setBoolean(LDBC_CUSTOMIZE, ldbcCustomize.equalsIgnoreCase(\"true\"));\n\n conf.set(SEPARATOR, properties.getProperty(SEPARATOR, \"\\\\|\"));\n\n conf.setBoolean(SKIP_HEADER, skipHeader);\n\n Job job = Job.getInstance(conf, \"build graph data\");\n\n job.setJarByClass(OfflineBuild.class);\n\n job.setMapperClass(DataBuildMapper.class);\n\n job.setPartitionerClass(DataBuildPartitioner.class);\n\n job.setReducerClass(DataBuildReducer.class);\n\n job.setNumReduceTasks(partitionNum);\n\n job.setOutputKeyClass(BytesWritable.class);\n\n job.setOutputValueClass(BytesWritable.class);\n\n job.setInputFormatClass(CombineTextInputFormat.class);\n\n CombineTextInputFormat.setMaxInputSplitSize(job, splitSize);\n\n LazyOutputFormat.setOutputFormatClass(job, SstOutputFormat.class);\n\n FileInputFormat.addInputPath(job, new Path(inputPath));\n\n FileInputFormat.setInputDirRecursive(job, true);\n\n String uniquePath = UuidUtils.getBase64UUIDString();\n\n Path outputDir = new Path(outputPath, uniquePath);\n\n FileOutputFormat.setOutputPath(job, outputDir);\n\n if (!job.waitForCompletion(true)) {\n\n System.exit(1);\n\n }\n\n FileSystem fs = outputDir.getFileSystem(job.getConfiguration());\n\n String dataPath = fs.makeQualified(outputDir).toString();\n\n\n\n Map<String, String> outputMeta = new HashMap<>();\n\n outputMeta.put(\"endpoint\", graphEndpoint);\n\n outputMeta.put(\"schema\", schemaJson);\n\n outputMeta.put(\"mappings\", mappings);\n\n outputMeta.put(\"datapath\", dataPath);\n\n outputMeta.put(\"unique_path\", uniquePath);\n\n\n\n FSDataOutputStream os = fs.create(new Path(outputDir, \"META\"));\n\n os.writeUTF(objectMapper.writeValueAsString(outputMeta));\n\n os.flush();\n\n os.close();\n\n\n\n if (loadAfterBuild) {\n\n logger.info(\"start ingesting data\");\n\n client.ingestData(dataPath);\n\n\n\n logger.info(\"commit bulk load\");\n\n Map<Long, DataLoadTarget> tableToTarget = new HashMap<>();\n\n for (ColumnMappingInfo columnMappingInfo : columnMappingInfos.values()) {\n\n long tableId = columnMappingInfo.getTableId();\n\n int labelId = columnMappingInfo.getLabelId();\n\n GraphElement graphElement = schema.getElement(labelId);\n\n String label = graphElement.getLabel();\n\n DataLoadTarget.Builder builder = DataLoadTarget.newBuilder();\n\n builder.setLabel(label);\n\n if (graphElement instanceof GraphEdge) {\n\n builder.setSrcLabel(\n\n schema.getElement(columnMappingInfo.getSrcLabelId()).getLabel());\n\n builder.setDstLabel(\n\n schema.getElement(columnMappingInfo.getDstLabelId()).getLabel());\n\n }\n\n tableToTarget.put(tableId, builder.build());\n\n }\n\n client.commitDataLoad(tableToTarget, uniquePath);\n\n }\n", "file_path": "interactive_engine/data_load_tools/src/main/java/com/alibaba/maxgraph/dataload/databuild/OfflineBuild.java", "rank": 94, "score": 223086.0953431963 }, { "content": "/// Pattern from ldbc schema file and build from pb::Pattern message\n\n/// ```text\n\n/// University\n\n/// study at/ \\study at\n\n/// Person -> Person\n\n/// ```\n\npub fn build_ldbc_pattern_from_pb_case2() -> IrPatternResult<Pattern> {\n\n set_ldbc_graph_schema();\n\n // define pb pattern message\n\n let expand_opr1 = pb::EdgeExpand {\n\n v_tag: None,\n\n direction: 1, // in\n\n params: Some(query_params(vec![15.into()], vec![], None)), //STUDYAT\n\n expand_opt: 0,\n\n alias: None,\n\n };\n\n let expand_opr2 = pb::EdgeExpand {\n\n v_tag: None,\n\n direction: 1, // in\n\n params: Some(query_params(vec![15.into()], vec![], None)), //STUDYAT\n\n expand_opt: 0,\n\n alias: None,\n\n };\n\n let expand_opr3 = pb::EdgeExpand {\n\n v_tag: None,\n\n direction: 0, // out\n", "file_path": "interactive_engine/executor/ir/core/tests/common/pattern_cases.rs", "rank": 95, "score": 222986.5842921824 }, { "content": "/// Pattern from ldbc schema file and build from pb::Pattern message\n\n/// ```text\n\n/// Person\n\n/// ```\n\n/// where it is a single vertex pattern without any edges\n\n///\n\n/// Person is the vertex label\n\n///\n\npub fn build_ldbc_pattern_from_pb_case0() -> IrPatternResult<Pattern> {\n\n set_ldbc_graph_schema();\n\n // define pb pattern message\n\n let vertx_opr = pb::Select { predicate: Some(str_to_expr_pb(\"@.~label==1\".to_string()).unwrap()) };\n\n let pattern = pb::Pattern {\n\n sentences: vec![pb::pattern::Sentence {\n\n start: Some(TAG_A.into()),\n\n binders: vec![pb::pattern::Binder { item: Some(pb::pattern::binder::Item::Select(vertx_opr)) }],\n\n end: Some(TAG_A.into()),\n\n join_kind: 0,\n\n }],\n\n };\n\n let plan_meta = gen_plan_meta(&pattern);\n\n Pattern::from_pb_pattern(&pattern, &plan_meta)\n\n}\n\n\n", "file_path": "interactive_engine/executor/ir/core/tests/common/pattern_cases.rs", "rank": 96, "score": 222986.5842921824 }, { "content": "/// Pattern from ldbc schema file and build from pb::Pattern message\n\n/// 4 Persons know each other\n\npub fn build_ldbc_pattern_from_pb_case3() -> IrPatternResult<Pattern> {\n\n set_ldbc_graph_schema();\n\n // define pb pattern message\n\n let expand_opr = pb::EdgeExpand {\n\n v_tag: None,\n\n direction: 0, // out\n\n params: Some(query_params(vec![12.into()], vec![], None)), //KNOWS\n\n expand_opt: 0,\n\n alias: None,\n\n };\n\n let pattern = pb::Pattern {\n\n sentences: vec![\n\n pb::pattern::Sentence {\n\n start: Some(TAG_A.into()),\n\n binders: vec![pb::pattern::Binder {\n\n item: Some(pb::pattern::binder::Item::Edge(expand_opr.clone())),\n\n }],\n\n end: Some(TAG_B.into()),\n\n join_kind: 0,\n\n },\n", "file_path": "interactive_engine/executor/ir/core/tests/common/pattern_cases.rs", "rank": 97, "score": 222986.5842921824 }, { "content": "/// Pattern from ldbc schema file and build from pb::Pattern message\n\n/// ```text\n\n/// Person\n\n/// knows/ \\knows\n\n/// Person -> Person\n\n/// ```\n\n/// knows is the edge label\n\n///\n\n/// Person is the vertex label\n\npub fn build_ldbc_pattern_from_pb_case1() -> IrPatternResult<Pattern> {\n\n set_ldbc_graph_schema();\n\n // define pb pattern message\n\n let expand_opr = pb::EdgeExpand {\n\n v_tag: None,\n\n direction: 0, // out\n\n params: Some(query_params(vec![12.into()], vec![], None)), // KNOWS\n\n expand_opt: 0,\n\n alias: None,\n\n };\n\n let pattern = pb::Pattern {\n\n sentences: vec![\n\n pb::pattern::Sentence {\n\n start: Some(TAG_A.into()),\n\n binders: vec![pb::pattern::Binder {\n\n item: Some(pb::pattern::binder::Item::Edge(expand_opr.clone())),\n\n }],\n\n end: Some(TAG_B.into()),\n\n join_kind: 0,\n\n },\n", "file_path": "interactive_engine/executor/ir/core/tests/common/pattern_cases.rs", "rank": 98, "score": 222986.5842921824 }, { "content": "/// Pattern from ldbc schema file and build from pb::Pattern message\n\n/// ```text\n\n/// City\n\n/// lives/ \\lives\n\n/// Person Person\n\n/// likes \\ / has creator\n\n/// Comment\n\n/// ```\n\npub fn build_ldbc_pattern_from_pb_case4() -> IrPatternResult<Pattern> {\n\n set_ldbc_graph_schema();\n\n // define pb pattern message\n\n let expand_opr1 = pb::EdgeExpand {\n\n v_tag: None,\n\n direction: 0, // out\n\n params: Some(query_params(vec![11.into()], vec![], None)), //ISLOCATEDIN\n\n expand_opt: 0,\n\n alias: None,\n\n };\n\n let expand_opr2 = pb::EdgeExpand {\n\n v_tag: None,\n\n direction: 0, // out\n\n params: Some(query_params(vec![11.into()], vec![], None)), //ISLOCATEDIN\n\n expand_opt: 0,\n\n alias: None,\n\n };\n\n let expand_opr3 = pb::EdgeExpand {\n\n v_tag: None,\n\n direction: 0, // out\n", "file_path": "interactive_engine/executor/ir/core/tests/common/pattern_cases.rs", "rank": 99, "score": 222986.5842921824 } ]
Rust
2016/day14/src/main.rs
RussellChamp/AdventOfCode
492784bd690d7323990dcddd64b20459bf3263ae
extern crate crypto; use crypto::md5::Md5; use crypto::digest::Digest; #[derive(Debug)] struct Key { idx: u64, letters: Vec<char>, is_valid: bool, hash: String, } fn repeat_letters(line: &String, size: u64, only_first: bool) -> Vec<char> { let mut result: Vec<char> = vec![]; let mut count = 1; let bytes = line.as_bytes(); 'outer: for idx in 0..line.len() - 2 { if bytes[idx] == bytes[idx + 1] { count = count + 1; if count == size && result.iter().position(|&c| c == (bytes[idx] as char)) == None { result.push(bytes[idx] as char); if only_first { break 'outer; } } } else { count = 1; } } result } fn purge_keys(keys: Vec<Key>, idx: u64, threshold: u64) -> Vec<Key> { let old_len = keys.len(); let new_keys: Vec<Key> = keys.into_iter() .filter(|key| key.is_valid || key.idx + threshold > idx) .collect::<Vec<_>>(); if old_len > new_keys.len() { } new_keys } fn validate_keys(keys: &mut Vec<Key>, key_char: char, idx: u64, threshold: u64) { let mut count = 0; keys.into_iter() .map(|key| { if !key.is_valid && key.idx + threshold > idx && key.letters.iter().position(|&c| c == key_char) != None { key.is_valid = true; count = count + 1; } key }) .collect::<Vec<_>>(); if count > 0 { } } fn we_are_done(keys: &Vec<Key>, pad_size: usize) -> bool { keys.iter().take_while(|key| key.is_valid ).collect::<Vec<_>>().len() >= pad_size } const PAD_SIZE: usize = 64; const THRESHOLD: u64 = 1000; fn main() { println!("I did something wrong on this one and am not getting the correct answer. :("); let mut keys: Vec<Key> = vec![]; let mut hasher = Md5::new(); let salt = "ahsbgdzn"; for idx in 0..std::u64::MAX { hasher.input(format!("{}{}", salt, idx).as_bytes()); let mut output = [0; 16]; hasher.result(&mut output); let output_str: String = output.iter().map(|&c| format!("{:x}", c)).collect(); for r in repeat_letters(&output_str, 5, false) { keys = purge_keys(keys, idx, THRESHOLD); validate_keys(&mut keys, r, idx, THRESHOLD); } let threepeats = repeat_letters(&output_str, 3, true); if threepeats.len() > 0 { let new_key = Key { idx: idx, letters: threepeats, is_valid: false, hash: String::from(output_str) }; keys.push(new_key); } if we_are_done(&keys, PAD_SIZE) { if keys.len() >= PAD_SIZE { println!("We're done! Finished on idx {}", idx); println!("The {}th key is {:?}", PAD_SIZE, keys.iter().nth(PAD_SIZE).unwrap()); } else { println!("Uh oh! We didn't find enough keys. :("); } break; } hasher.reset(); } }
extern crate crypto; use crypto::md5::Md5; use crypto::digest::Digest; #[derive(Debug)] struct Key { idx: u64, letters: Vec<char>, is_valid: bool, hash: String, } fn repeat_letters(line: &String, size: u64, only_first: bool) -> Vec<char> { let mut result: Vec<char> = vec![]; let mut count = 1; let bytes = line.as_bytes(); 'outer: for idx in 0..line.len() - 2 { if bytes[idx] == bytes[idx + 1] { count = count + 1; if count == size && result.iter().position(|&c| c == (bytes[idx] as char)) == None { result.push(bytes[idx] as char); if only_first { break 'outer; } } } else { count = 1; } } result } fn purge_keys(keys: Vec<Key>, idx: u64, threshold: u64) -> Vec<Key> { let old_len = keys.len(); let new_keys: Vec<Key> = keys.into_iter() .filter(|key| key.is_valid || key.idx + threshold > idx) .collect::<Vec<_>>(); if old_len > new_keys.len() { } new_keys } fn validate_keys(keys: &mut Vec<Key>, key_char: char, idx: u64, threshold: u64) { let mut count = 0; keys.into_iter() .map(|key| { if !key.is_valid && key.idx + threshold > idx && key.letters.iter().position(|&c| c == key_char) != None { key.is_valid = true; count = count + 1; } key }) .collect::<Vec<_>>(); if count > 0 { } } fn we_are_done(keys: &Vec<Key>, pad_size: usize) -> bool { keys.iter().take_while(|key| key.is_valid ).collect::<Vec<_>>().len() >= pad_size } const PAD_SIZE: usize = 64; const THRESHOLD: u64 = 1000; fn main() { println!("I did something wrong on this one and am not getting the correct answer. :("); let mut keys: Vec<Key> = vec![]; let mut hasher = Md5::new(); let salt = "ahsbgdzn"; for idx in 0..std::u64::MAX { hasher.input(format!("{}{}", salt, idx).as_bytes()); let mut output = [0; 16]; hasher.result(&mut output); let output_str: String = output.iter().map(|&c| format!("{:x}", c)).collect(); for r in repeat_letters(&output_str, 5, false) { keys = purge_keys(keys, idx, THRESHOLD); validate_keys(&mut keys, r, idx, THRESHOLD); } let threepeats = repeat_letters(&output_str, 3, true); if threepeats.len() > 0 { let new_key = Key { idx: idx, letters: threepeats, is_valid: false, hash: String::from(output_str) }; keys.push(new_key); } if we_are_done(&keys, PAD_SIZ
E) { if keys.len() >= PAD_SIZE { println!("We're done! Finished on idx {}", idx); println!("The {}th key is {:?}", PAD_SIZE, keys.iter().nth(PAD_SIZE).unwrap()); } else { println!("Uh oh! We didn't find enough keys. :("); } break; } hasher.reset(); } }
function_block-function_prefixed
[ { "content": "fn build_maze(maze: &mut Vec<Vec<(bool, u32)>>, key: u32) {\n\n for row in 0..MAZE_HEIGHT {\n\n for col in 0..MAZE_WIDTH {\n\n match get_space(col as u32, row as u32, key) {\n\n Space::Open => maze[row][col] = (true, u32::max_value()),\n\n Space::Wall => maze[row][col] = (false, 0),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "2016/day13/src/main.rs", "rank": 4, "score": 224290.56469377206 }, { "content": "fn bit_string(bits: &Vec<bool>) -> String {\n\n bits.into_iter()\n\n .map(|b|\n\n match *b {\n\n true => '1',\n\n false => '0'\n\n }\n\n )\n\n .collect::<String>()\n\n}\n\n\n", "file_path": "2016/day16/src/main.rs", "rank": 5, "score": 198393.7420760652 }, { "content": "fn walk_maze(maze: &mut Vec<Vec<(bool, u32)>>, pos: (usize, usize), step: u32) {\n\n if maze[pos.0][pos.1].0 == false /* if position is a wall */\n\n || step >= maze[pos.0][pos.1].1 /* or position has a better step value */ {\n\n return; // abort now\n\n }\n\n // otherwise\n\n // if this space is within MAX_STEPS and hasn't yet been counted\n\n if maze[pos.0][pos.1].1 == u32::max_value() { //if we never visited this space before\n\n if step <= MAX_STEPS {\n\n unsafe {\n\n COUNT = COUNT + 1;\n\n }\n\n }\n\n }\n\n else if maze[pos.0][pos.1].1 > MAX_STEPS && step <= MAX_STEPS {\n\n unsafe {\n\n COUNT = COUNT + 1;\n\n }\n\n }\n\n\n", "file_path": "2016/day13/src/main.rs", "rank": 6, "score": 196358.08545265553 }, { "content": "fn swap_letter(passcode: String, from: char, to: char) -> String {\n\n let pos_from = passcode.chars().position(|c| c == from).unwrap();\n\n let pos_to = passcode.chars().position(|c| c == to).unwrap();\n\n swap_position(passcode, pos_from, pos_to)\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 7, "score": 188156.9571742339 }, { "content": "fn rotate_letter(passcode: String, from: char) -> String {\n\n //rotate to the right based on 1 + idx of letter + 1 if idx > 3\n\n let pos = passcode.chars().position(|c| c == from).unwrap();\n\n if pos > 3 {\n\n return rotate_position(passcode, Direction::Right, 2 + pos);\n\n } else {\n\n return rotate_position(passcode, Direction::Right, 1 + pos);\n\n }\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 8, "score": 180269.8580395009 }, { "content": "fn get_count(input: &String) -> usize {\n\n let re_letters = Regex::new(r\"^[^\\(\\)]*$\").unwrap(); //I'd prefer to make this static but don't know how\n\n if re_letters.is_match(&input) { //if there are no parens\n\n return input.len(); //return the length\n\n }\n\n let re = Regex::new(r\"^([:alpha:]*)\\((\\d+)x(\\d+)\\)(.*)$\").unwrap();\n\n match re.captures(input) {\n\n None => println!(\"Found nothing!\"),\n\n Some(cap) => {\n\n let letters = cap.at(1).unwrap().to_string();\n\n let count: usize = cap.at(2).unwrap().parse().unwrap();\n\n let times: usize = cap.at(3).unwrap().parse().unwrap();\n\n let subslice = &cap.at(4).unwrap()[0..count].to_string();\n\n let therest = &cap.at(4).unwrap()[count..].to_string();\n\n\n\n //println!(\"Found: {} - {},{}\", cap.at(1).unwrap(), cap.at(2).unwrap(), cap.at(3).unwrap()),\n\n println!(\"({},{}) - {}x {}\", count, times, times, subslice);\n\n return get_count(&letters) + times * get_count(subslice) + get_count(therest);\n\n }\n\n }\n\n 0\n\n}\n\n\n", "file_path": "2016/day09/src/main.rs", "rank": 9, "score": 177443.4212874909 }, { "content": "fn un_rotate_letter(passcode: String, from: char) -> String {\n\n //find the letter\n\n passcode\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 10, "score": 177325.43936056504 }, { "content": "fn get_discs(input: &String) -> Vec<Disc> {\n\n let mut discs: Vec<Disc> = vec![];\n\n\n\n let re = Regex::new(r\"Disc #(\\d+) has (\\d+) positions; at time=0, it is at position (\\d+)\\.\").unwrap();\n\n //read all discs\n\n for line in input.lines() {\n\n let cap = re.captures(line).unwrap();\n\n discs.push(Disc {\n\n id: cap.at(1).unwrap().parse().unwrap(),\n\n pos: cap.at(2).unwrap().parse().unwrap(),\n\n start: cap.at(3).unwrap().parse().unwrap()\n\n });\n\n }\n\n //println!(\"Discs: {:?}\", discs);\n\n discs\n\n}\n\n\n", "file_path": "2016/day15/src/main.rs", "rank": 11, "score": 167067.10757805506 }, { "content": "fn gen_data(bits: &mut Bits, size: usize) {\n\n //println!(\"[{:03}] => {}\", bits.len(), bit_string(&bits));\n\n if bits.len() >= size {\n\n //println!(\"Drained ->\");\n\n bits.drain(size..).collect::<Vec<_>>();\n\n //println!(\"[{:03}] => {}\", bits.len(), bit_string(&bits));\n\n }\n\n else {\n\n let cur_len = bits.len();\n\n //add a buffer 0\n\n bits.push(false);\n\n //add the reverse copy\n\n for i in (0..cur_len).rev() {\n\n let bit = bits[i];\n\n //print!(\"{}.\", bits[i]);\n\n bits.push(!bit);\n\n }\n\n gen_data(bits, size); //until we get the appropriate size\n\n }\n\n}\n\n\n", "file_path": "2016/day16/src/main.rs", "rank": 12, "score": 163601.429390295 }, { "content": "fn uglier_match(text: &String) -> Vec<String> {\n\n let mut ret: Vec<String> = Vec::new();\n\n for idx in 0..text.len() - 2 {\n\n if text.chars().nth(idx).unwrap() == text.chars().nth(idx+2).unwrap() {\n\n ret.push(text[idx..idx+3].to_string());\n\n }\n\n }\n\n ret\n\n}\n\n\n", "file_path": "2016/day07/src/main.rs", "rank": 13, "score": 156002.21672404767 }, { "content": "fn print_maze(maze: &Vec<Vec<(bool, u32)>>) {\n\n print!(\" \"); //first row padding\n\n for col in 0..MAZE_WIDTH {\n\n print!(\"{:02} \", col % 100);\n\n }\n\n println!(\"\"); //newline\n\n for row in 0..MAZE_HEIGHT {\n\n print!(\"{:02}: \", row % 100);\n\n for col in 0..MAZE_WIDTH {\n\n if maze[row][col].0 == true {\n\n if maze[row][col].1 == u32::max_value() {\n\n print!(\".. \"); //unpassable\n\n }\n\n else {\n\n print!(\"{:02} \", maze[row][col].1 % 100); //step value\n\n }\n\n }\n\n else {\n\n print!(\"## \"); //it's a wall\n\n }\n\n }\n\n println!(\"\"); //print the new line\n\n }\n\n}\n\n\n", "file_path": "2016/day13/src/main.rs", "rank": 14, "score": 152561.00774521378 }, { "content": "fn ugly_match(text: &String) -> bool {\n\n if text.len() < 4 {\n\n return false\n\n }\n\n //check for ABBA pattern\n\n for idx in 0..text.len() - 3 {\n\n if text.chars().nth(idx).unwrap() == text.chars().nth(idx+3).unwrap() &&\n\n text.chars().nth(idx+1).unwrap() == text.chars().nth(idx+2).unwrap() &&\n\n text.chars().nth(idx).unwrap() != text.chars().nth(idx+1).unwrap() {\n\n return true\n\n }\n\n }\n\n return false\n\n}\n\n\n", "file_path": "2016/day07/src/main.rs", "rank": 15, "score": 147559.70851910597 }, { "content": "fn reverse(passcode: String, from: usize, to: usize) -> String {\n\n let mut new_passcode = String::from(&passcode[..from]);\n\n for c in passcode[from..to+1].chars().rev() {\n\n new_passcode.push(c);\n\n }\n\n new_passcode.push_str(&passcode[to+1..]);\n\n //println!(\"{} -> {}\", passcode, new_passcode);\n\n new_passcode\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 16, "score": 136381.54865704753 }, { "content": "fn move_position(passcode: String, from: usize, to: usize) -> String {\n\n let mut new_passcode = String::new();\n\n if from < to {\n\n // [..from][from+1..to+1][from][to.1..]\n\n new_passcode.push_str(&passcode[..from]);\n\n new_passcode.push_str(&passcode[from+1..to+1]);\n\n new_passcode.push_str(&passcode[from..from+1]);\n\n new_passcode.push_str(&passcode[to+1..]);\n\n } else {\n\n // [..to][from][to..from][from+1..]\n\n new_passcode.push_str(&passcode[..to]);\n\n new_passcode.push_str(&passcode[from..from+1]);\n\n new_passcode.push_str(&passcode[to..from]);\n\n new_passcode.push_str(&passcode[from+1..]);\n\n }\n\n //println!(\"{} -> {}\", passcode, new_passcode);\n\n new_passcode\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 17, "score": 134288.23470522824 }, { "content": "fn swap_position(passcode: String, from: usize, to: usize) -> String {\n\n let (low, high) = (cmp::min(from, to), cmp::max(from, to));\n\n let mut new_passcode = passcode[0..low].to_string();\n\n\n\n let low_char = passcode.chars().clone().nth(low).unwrap();\n\n let high_char = passcode.chars().clone().nth(high).unwrap();\n\n\n\n new_passcode.push(high_char);\n\n if low + 1 < high {\n\n new_passcode.push_str(&passcode[low+1..high]);\n\n }\n\n new_passcode.push(low_char);\n\n if high < passcode.len() {\n\n new_passcode.push_str(&passcode[high+1..]);\n\n }\n\n //println!(\"{} -> {}\", passcode, new_passcode);\n\n new_passcode\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 18, "score": 134288.23470522824 }, { "content": "struct Output {\n\n id: i32, //the identifier for this output\n\n chips: Vec<i32>,\n\n}\n\n\n", "file_path": "2016/day10/src/main.rs", "rank": 19, "score": 130123.01401992005 }, { "content": "fn rotate_position(passcode: String, dir: Direction, value: usize) -> String {\n\n let mut new_passcode = String::new();\n\n let shift = value % passcode.len(); //unless we shift by too much\n\n match dir {\n\n Direction::Right => {\n\n //the last 'value' characters and then the rest\n\n let split_idx = passcode.len()-shift;\n\n new_passcode.push_str(&passcode[split_idx..]);\n\n new_passcode.push_str(&passcode[..split_idx]);\n\n },\n\n Direction::Left => {\n\n new_passcode.push_str(&passcode[shift..]);\n\n new_passcode.push_str(&passcode[..shift]);\n\n },\n\n }\n\n //println!(\"{} -({} {:?})-> {}\", passcode, value, dir, new_passcode);\n\n new_passcode\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 21, "score": 121863.77049261832 }, { "content": "fn get_paths(passcode: &str) -> Vec<Path> {\n\n let pos = Pos { row: 0, col: 0 };\n\n\n\n let paths: Vec<Path> = recursive_step(passcode, pos, Vec::new());\n\n paths\n\n}\n\n\n", "file_path": "2016/day17/src/main.rs", "rank": 22, "score": 119016.45033614723 }, { "content": "type Path = Vec<char>;\n\n\n", "file_path": "2016/day17/src/main.rs", "rank": 23, "score": 116158.83728973658 }, { "content": "type Bits = Vec<bool>;\n\n\n", "file_path": "2016/day16/src/main.rs", "rank": 24, "score": 115975.69560431282 }, { "content": "fn is_valid(disc: &Disc, time: u32) -> bool {\n\n // a disk starts at position 'start'\n\n // and ticks up to maximum position 'pos' then loops back to the first position\n\n // a disk will be valid if it is at position 0 when hit\n\n // there is a delay of seconds equal to the disc layer, or \"id\"\n\n (disc.start + time + disc.id) % disc.pos == 0\n\n}\n\n\n", "file_path": "2016/day15/src/main.rs", "rank": 25, "score": 113744.56317208173 }, { "content": "fn part_1(input: &String) {\n\n let total = input.lines()\n\n .map(|l| l\n\n .trim()\n\n .split_whitespace()\n\n .map(|v| v.parse::<i32>().unwrap())\n\n .collect::<Vec<_>>())\n\n .filter(|v| v.iter().fold(0, |sum, x| sum + x) > 2 * v.iter().max().unwrap() )\n\n .collect::<Vec<_>>()\n\n .len();\n\n println!(\"Part 1: Total was {}\", total);\n\n}\n\n\n", "file_path": "2016/day03/src/main.rs", "rank": 26, "score": 101746.22684909924 }, { "content": "fn part_1(input: &String) {\n\n let set: Vec<char> = input.chars().collect();\n\n let mut idx: usize = 0;\n\n let mut output = String::new();\n\n let re = Regex::new(r\"\\((\\d+)x(\\d+)\\)\").unwrap();\n\n //step by single characters\n\n //when you find an open paren, look for the next closing paren and match to a regex like (x,y)\n\n //if it does not match, add contents to output and continue\n\n //if it matches, add the next 'x' characters to the output 'y' times and increase idx by 'x'\n\n //if not an open paren, add to output\n\n while idx < input.len() {\n\n let c = set[idx];\n\n match c {\n\n '(' => {\n\n let old_idx: usize = idx; //for when we have to colleect the slice\n\n loop {\n\n idx = idx + 1;\n\n match set[idx] {\n\n ')' => break,\n\n _ => {},\n", "file_path": "2016/day09/src/main.rs", "rank": 27, "score": 101746.22684909924 }, { "content": "fn part_1_2(input: &String) {\n\n let re = Regex::new(r\"([a-z\\\\-]+)-([:digit:]+)\\[([:lower:]+)\\]\").unwrap();\n\n\n\n let mut sum = 0;\n\n 'outer: for cap in re.captures_iter(input) {\n\n let room = Room { name: String::from(cap.at(1).unwrap_or(\"\")),\n\n sector: cap.at(2).unwrap_or(\"0\").parse().unwrap_or(0),\n\n check: String::from(cap.at(3).unwrap_or(\"\")),\n\n };\n\n //println!(\"{:?}\", room);\n\n let mut accumulator = [0; 26];\n\n for letter in room.name.replace(\"-\", \"\").chars() {\n\n let letter_idx = letter as u8 - 'a' as u8;\n\n accumulator[letter_idx as usize] = accumulator[letter_idx as usize] + 1;\n\n //println!(\"The letter is {} with a value {} - {:?}\", letter, letter_idx, accumulator);\n\n }\n\n\n\n //println!(\"Accumulated {:?}\", accumulator);\n\n\n\n let mut letters_to_match = room.check.len();\n", "file_path": "2016/day04/src/main.rs", "rank": 28, "score": 101746.22684909924 }, { "content": "fn part_1(input: &String) {\n\n let mut state = (1, 0);\n\n let mut pos = (0, 0);\n\n for item in input.trim().split(\", \") {\n\n let (dir, dist) = item.split_at(1);\n\n let dir: String = dir.to_string();\n\n let dist: i32 = dist.parse::<i32>().unwrap();\n\n\n\n state = turn(&state, &dir);\n\n pos = step(&state, &pos, dist);\n\n\n\n //println!(\"Moved {} {} to arrive at ({}, {})\", dist, dir, pos.0, pos.1);\n\n }\n\n println!(\"Part 1: Ended up at ({}, {}), {} away from the start\", pos.0, pos.1, pos.0.abs() + pos.1.abs());\n\n}\n\n\n", "file_path": "2016/day01/src/main.rs", "rank": 29, "score": 101746.22684909924 }, { "content": "fn part_2(input: &String) {\n\n //I am not skill enough with iterator adaptors to use .as_slice().chunks(3)\n\n //so we will do things the lazy way. sad\n\n let mut triangles = vec![vec![0; 3]; 3];\n\n let mut read_lines = 3;\n\n let mut total = 0; //total good triangles\n\n for line in input.lines().map(|l| l.to_string()).collect::<Vec<String>>().iter() {\n\n let values = line.trim().split_whitespace().map(|v| v.parse::<i32>().unwrap()).collect::<Vec<_>>();\n\n //println!(\"Values are {:?}\", values);\n\n triangles[0][3 - read_lines] = values[0];\n\n triangles[1][3 - read_lines] = values[1];\n\n triangles[2][3 - read_lines] = values[2];\n\n read_lines = read_lines - 1;\n\n if read_lines == 0 {\n\n //process our triangles\n\n let good = triangles\n\n .iter()\n\n .filter(|v| {\n\n //println!(\"filtering {:?}\", v);\n\n v.iter().fold(0, |sum, x| sum + x) > 2 * v.iter().max().unwrap() }\n", "file_path": "2016/day03/src/main.rs", "rank": 30, "score": 101746.22684909924 }, { "content": "fn part_2(input: &String) {\n\n println!(\"Part 2: Length is {}\", get_count(input));\n\n}\n\n\n", "file_path": "2016/day09/src/main.rs", "rank": 31, "score": 101746.22684909924 }, { "content": "fn part_1(input: &String) {\n\n\n\n let re = Regex::new(r\"\\[([:lower:]+)\\]\").unwrap();\n\n let mut count = 0;\n\n let mut rejected = 0;\n\n let mut none = 0;\n\n\n\n 'outer: for line in input.lines() {\n\n\n\n //hypernet sequences\n\n for cap in re.captures_iter(line) {\n\n let cap = cap.at(1).unwrap_or(\"\");\n\n //reject if cap contains an ABBA\n\n if ugly_match(&cap.to_string()) {\n\n //println!(\"- {}\", cap);\n\n rejected = rejected + 1;\n\n continue 'outer;\n\n }\n\n }\n\n\n", "file_path": "2016/day07/src/main.rs", "rank": 32, "score": 101746.22684909924 }, { "content": "fn part_2(input: &String) {\n\n let re = Regex::new(r\"\\[([:lower:]+)\\]\").unwrap();\n\n let mut count = 0;\n\n\n\n 'outer: for line in input.lines() {\n\n //println!(\"{}\", line);\n\n let mut abas: Vec<String> = Vec::new();\n\n for cap in re.replace_all(line, \",\").split(\",\") {\n\n for m in uglier_match(&cap.to_string()) {\n\n abas.push(m.to_string()); //add ABA to big list\n\n }\n\n }\n\n //check for related BAB\n\n for cap in re.captures_iter(line) {\n\n let text = cap.at(0).unwrap_or(\"\");\n\n for aba in abas.iter() {\n\n let mut bab: String = String::new();\n\n bab.push(aba.chars().nth(1).unwrap());\n\n bab.push(aba.chars().nth(0).unwrap());\n\n bab.push(aba.chars().nth(1).unwrap());\n", "file_path": "2016/day07/src/main.rs", "rank": 33, "score": 101746.22684909924 }, { "content": "fn part_2(input: &String) {\n\n let mut state = (0, 2);\n\n // model a 5x5 grid that looks like\n\n // 1\n\n // 2 3 4\n\n // 5 6 7 8 9\n\n // A B C\n\n // D\n\n // where blank spaces are invalid\n\n let valid_map = [\n\n [0, 0, 1, 0, 0],\n\n [0, 1, 1, 1, 0],\n\n [1, 1, 1, 1, 1],\n\n [0, 1, 1, 1, 0],\n\n [0, 0, 1, 0, 0]\n\n ];\n\n let mut answer = String::new();\n\n\n\n for line in input.split('\\n') {\n\n for letter in line.trim().chars() {\n", "file_path": "2016/day02/src/main.rs", "rank": 34, "score": 101746.22684909924 }, { "content": "fn part_1(input: &String) {\n\n // This would have been great if it actually worked.\n\n // let values = input.lines()\n\n // .map(|l| l\n\n // .trim()\n\n // .chars()\n\n // .collect::<Vec<_>>()\n\n // )\n\n // .collect::<Vec<_>>();\n\n // println!(\"first item {:?}\", values[0]);\n\n // println!(\"second item {:?}\", values[1]);\n\n\n\n // //let mut letters: Vec<String> = vec![String::new(); values[0].len()];\n\n // let mut letters: Vec<Vec<char>> = vec![vec![]; values[0].len()];\n\n // for v in values {\n\n // for (idx, l) in v.iter().enumerate() {\n\n // letters[idx].push(*l);\n\n // }\n\n // }\n\n // for (idx, line) in letters.iter().enumerate() {\n", "file_path": "2016/day06/src/main.rs", "rank": 35, "score": 101746.22684909924 }, { "content": "fn part_1(input: &String) {\n\n let mut state = (1, 1);\n\n // model a 3x3 grid that looks like\n\n // 1 2 3\n\n // 4 5 6\n\n // 7 8 9\n\n // where (0,0) starts in the upper left\n\n\n\n //println!(\"Start at {}\", state_to_val(state));\n\n let mut answer = String::new();\n\n\n\n for line in input.split('\\n') {\n\n for letter in line.trim().chars() {\n\n match letter {\n\n 'R' => { state.0 = clamp(state.0 + 1, 0, 2); },\n\n 'L' => { state.0 = clamp(state.0 - 1, 0, 2); },\n\n 'U' => { state.1 = clamp(state.1 - 1, 0, 2); },\n\n 'D' => { state.1 = clamp(state.1 + 1, 0, 2); },\n\n _ => println!(\"Unknown!\")\n\n }\n\n //println!(\"Moved {} to {}\", letter, state_to_val(state));\n\n }\n\n //println!(\"End on {}\", state_to_val(state));\n\n answer += &state_to_val(state).to_string();\n\n }\n\n println!(\"Part 1 answer is {}\", answer);\n\n}\n\n\n", "file_path": "2016/day02/src/main.rs", "rank": 36, "score": 101746.22684909924 }, { "content": "fn part_2(input: &String) {\n\n const OFFSET: usize = 500; //we'll make a grid 1000x1000\n\n let mut grid = [[false; 2*OFFSET]; 2*OFFSET];\n\n\n\n let mut state = (1, 0);\n\n let mut pos = (OFFSET, OFFSET);\n\n grid[OFFSET][OFFSET] = true; //we are already at the axis\n\n\n\n 'outer: for item in input.trim().split(\", \") {\n\n let (dir, dist) = item.split_at(1);\n\n let dir: String = dir.to_string();\n\n let mut dist: i32 = dist.parse::<i32>().unwrap();\n\n\n\n state = turn(&state, &dir);\n\n while dist > 0 { //take a step\n\n pos = tiny_step(&state, &pos);\n\n match grid[pos.0][pos.1] {\n\n true => {\n\n let real_pos = ((pos.0 as i32 - OFFSET as i32), (pos.1 as i32 - OFFSET as i32));\n\n println!(\"Part 2: Returned to ({}, {}), {} away from the start\", real_pos.0, real_pos.1, real_pos.0.abs() + real_pos.1.abs());\n\n break 'outer;\n\n },\n\n false => { grid[pos.0][pos.1] = true; dist = dist - 1 },\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "2016/day01/src/main.rs", "rank": 37, "score": 101746.22684909924 }, { "content": "fn part_1_2(input: &String) {\n\n let mut grid = [[false; GRID_WIDTH]; GRID_HEIGHT]; //grid of bools, 50 across by 6 down\n\n\n\n let set = RegexSet::new(&[\n\n r\"rect (\\d+)x(\\d+)\",\n\n r\"rotate row y=(\\d+) by (\\d+)\",\n\n r\"rotate column x=(\\d+) by (\\d+)\",\n\n ]).unwrap();\n\n\n\n let re_rect = Regex::new(r\"rect (\\d+)x(\\d+)\").unwrap();\n\n let re_rot_r = Regex::new(r\"rotate row y=(\\d+) by (\\d+)\").unwrap();\n\n let re_rot_c = Regex::new(r\"rotate column x=(\\d+) by (\\d+)\").unwrap();\n\n\n\n for line in input.lines() {\n\n let match_set: Vec<_> = set.matches(line.trim()).into_iter().collect();\n\n match match_set[0] {\n\n 0 /*rect*/ => {\n\n let cap = re_rect.captures(line).unwrap();\n\n let width = cap.at(1).unwrap().parse().unwrap();\n\n let height = cap.at(2).unwrap().parse().unwrap();\n", "file_path": "2016/day08/src/main.rs", "rank": 38, "score": 101746.22684909924 }, { "content": "fn part_2(input: &String) {\n\n let size = input.lines().nth(0).unwrap().len();\n\n print!(\"Part 2: Answer \");\n\n for idx in 0..size {\n\n let mut accumulator = [0; 26];\n\n for line in input.lines() {\n\n let letter = line.chars().nth(idx).unwrap();\n\n let letter_idx = letter as u8 - 'a' as u8;\n\n accumulator[letter_idx as usize] = accumulator[letter_idx as usize] + 1;\n\n //println!(\"The {}th letter is {} with a value {} - {:?}\", idx, letter, letter_idx, accumulator);\n\n }\n\n let mut smallest = (0, <i32>::max_value()); //idx and value\n\n for idx in 0..accumulator.len() {\n\n if accumulator[idx] > 0 && accumulator[idx] < smallest.1 {\n\n smallest = (idx, accumulator[idx]);\n\n }\n\n }\n\n let smallest_letter = (smallest.0 as u8 + 'a' as u8) as char;\n\n // println!(\"Accumulator is {:?}\", accumulator);\n\n // println!(\"Smallest is {:?} which is {}\", smallest, smallest_letter);\n\n print!(\"{}\", smallest_letter);\n\n }\n\n println!(\"\"); //newline\n\n}\n\n\n", "file_path": "2016/day06/src/main.rs", "rank": 39, "score": 101746.22684909924 }, { "content": "fn get_hash(bits: Bits) -> Bits {\n\n let mut hash: Bits = vec![];\n\n for pair in bits.as_slice().chunks(2) {\n\n if pair.len() > 1 {\n\n //println!(\"Pair: ({}, {})\", pair[0], pair[1]);\n\n hash.push(pair[0] == pair[1]);\n\n }\n\n }\n\n //println!(\"Hash => {}\", bit_string(&hash));\n\n match hash.len() % 2 {\n\n 0 /*even*/ => get_hash(hash),\n\n _ /*odd*/ => hash,\n\n }\n\n}\n\n\n", "file_path": "2016/day16/src/main.rs", "rank": 40, "score": 100155.40782596778 }, { "content": "fn play_game(size: i64) -> i64 {\n\n //take the size of the group and returns the winner\n\n let players: Vec<i64> = (1..size+1).collect();\n\n //println!(\"{:?}\", players);\n\n pass_gifts(players)\n\n}\n\n\n\n\n\n// Second game\n\n// 1 2 3 4 -> 1 2 - 4 -> 1 2 - - -> 1\n\n// 1 2 3 4 5 -> 1 2 - 4 5 -> 1 2 - 4 - -> - 2 - 4 - -> 2\n\n// 1 2 3 4 5 6 ->\n\n//looping though all players, remove player at position floor(players.len()/2) + player_idx\n\n\n", "file_path": "2016/day19/src/main.rs", "rank": 41, "score": 97766.4636365714 }, { "content": "fn add_row(grid: &mut Grid) {\n\n static TRAPS: [[Tile; 3]; 4] =\n\n [\n\n [Tile::Trapped, Tile::Trapped, Tile::Safe],\n\n [Tile::Safe, Tile::Trapped, Tile::Trapped],\n\n [Tile::Trapped, Tile::Safe, Tile::Safe],\n\n [Tile::Safe, Tile::Safe, Tile::Trapped],\n\n ];\n\n\n\n let last_row = grid.clone().into_iter().last().unwrap();\n\n //println!(\"Last row {:?}\", last_row);\n\n let mut new_row: Row = vec![];\n\n\n\n for idx in 0..last_row.len() {\n\n let mut pre: [Tile; 3] = [Tile::Safe, Tile::Safe, Tile::Safe];\n\n if idx > 0 {\n\n pre[0] = last_row[idx - 1].clone();\n\n }\n\n pre[1] = last_row[idx].clone();\n\n if idx + 1 < last_row.len() {\n", "file_path": "2016/day18/src/main.rs", "rank": 42, "score": 97630.89689088102 }, { "content": "fn calc_answer(input: &str, length: usize) {\n\n let mut bits: Bits = String::from(input).to_bits();\n\n gen_data(&mut bits, length);\n\n //println!(\"Using {}\", bit_string(&bits));\n\n let hash: Bits = get_hash(bits);\n\n println!(\"Hash is {}\", bit_string(&hash));\n\n}\n\n\n", "file_path": "2016/day16/src/main.rs", "rank": 43, "score": 96566.74263776198 }, { "content": "fn cycle_discs(discs: &Vec<Disc>) {\n\n //find full cycle length (through lowest common multiple of all disc positions)\n\n let full_cycle = discs.iter().fold(1, |total, x| total.lcm(&x.pos));\n\n //println!(\"LCM {}\", full_cycle);\n\n //iterate through each cycle until you find a match\n\n 'time_loop: for time in 0..full_cycle {\n\n for disc in discs {\n\n if !is_valid(disc, time) {\n\n continue 'time_loop;\n\n }\n\n }\n\n //we made it all the way through the discs!\n\n println!(\"We made it at time={}\", time);\n\n break 'time_loop;\n\n }\n\n}\n\n\n", "file_path": "2016/day15/src/main.rs", "rank": 44, "score": 96471.4083326449 }, { "content": "fn play_new_game(size: i64) -> i64 {\n\n let mut players = (1..size+1).collect::<Vec<i64>>();\n\n println!(\"Set up vector of {} players\", size);\n\n let mut current_player_idx = 0;\n\n let mut player_idx_to_eliminate;\n\n let mut player_len = size as usize;\n\n\n\n while player_len > 1 {\n\n player_idx_to_eliminate = (player_len/2 /*drops fraction*/ + current_player_idx) % player_len;\n\n //println!(\"Removing idx {} from players (size {}) {:?}\", player_idx_to_eliminate, players.len(), players);\n\n //players.remove(player_idx_to_eliminate);\n\n players = players.iter().enumerate().filter(|e| e.0 != player_idx_to_eliminate).map(|e| *e.1).collect::<Vec<_>>();\n\n player_len = player_len - 1;\n\n\n\n current_player_idx = current_player_idx + match player_idx_to_eliminate > current_player_idx {\n\n true => 1,\n\n false => 0\n\n } % player_len;\n\n }\n\n //println!(\"Returning {:?}\", players);\n\n players[0]\n\n}\n\n\n", "file_path": "2016/day19/src/main.rs", "rank": 45, "score": 96115.52873001012 }, { "content": "fn pass_gifts(players: Vec<i64>) -> i64 {\n\n //represents the group of players left after a pass\n\n let is_odd = players.len() % 2 == 1;\n\n let new_players = players.iter().enumerate().filter(|e| !(is_odd && e.0 == 0) && e.0 % 2 == 0).map(|e| *e.1).collect::<Vec<_>>();\n\n //println!(\"{:?}\", new_players);\n\n if new_players.len() == 1 {\n\n new_players[0]\n\n } else {\n\n pass_gifts(new_players)\n\n }\n\n}\n\n\n", "file_path": "2016/day19/src/main.rs", "rank": 46, "score": 92508.01308375975 }, { "content": "// Part 2\n\nfn step_row(last_row: &mut Row) -> i64 {\n\n static TRAPS: [[Tile; 3]; 4] =\n\n [\n\n [Tile::Trapped, Tile::Trapped, Tile::Safe],\n\n [Tile::Safe, Tile::Trapped, Tile::Trapped],\n\n [Tile::Trapped, Tile::Safe, Tile::Safe],\n\n [Tile::Safe, Tile::Safe, Tile::Trapped],\n\n ];\n\n\n\n let mut new_row: Row = vec![];\n\n let mut safe_tiles = 0;\n\n for idx in 0..last_row.len() {\n\n let mut pre: [Tile; 3] = [Tile::Safe, Tile::Safe, Tile::Safe];\n\n if idx > 0 {\n\n pre[0] = last_row[idx - 1].clone();\n\n }\n\n pre[1] = last_row[idx].clone();\n\n if idx + 1 < last_row.len() {\n\n pre[2] = last_row[idx + 1].clone();\n\n }\n", "file_path": "2016/day18/src/main.rs", "rank": 47, "score": 92057.69244376033 }, { "content": "fn state_to_new_val(state: (i32, i32)) -> char {\n\n let val_map = [\n\n [' ', ' ', '1', ' ', ' '],\n\n [' ', '2', '3', '4', ' '],\n\n ['5', '6', '7', '8', '9'],\n\n [' ', 'A', 'B', 'C', ' '],\n\n [' ', ' ', 'D', ' ', ' ']\n\n ];\n\n val_map[state.1 as usize][state.0 as usize]\n\n}\n\n\n", "file_path": "2016/day02/src/main.rs", "rank": 48, "score": 91977.34236837458 }, { "content": "fn main() {\n\n //let mut pos = (1, 1);\n\n //let goal = (31, 39);\n\n\n\n //println!(\"Answer is {:?}\", get_space(1, 2, 3));\n\n let mut maze: Vec<Vec<(bool, u32)>> = vec![vec![(false, 0); MAZE_WIDTH]; MAZE_HEIGHT];\n\n //print_maze(32, 40, 1358);\n\n build_maze(&mut maze, 1358);\n\n walk_maze(&mut maze, (1, 1), 0);\n\n print_maze(&maze);\n\n let end = (31, 39);\n\n println!(\"Part 1: It takes {} steps to get to ({}, {})\", maze[end.1][end.0].1, end.0, end.1);\n\n unsafe {\n\n println!(\"Part 2: There are {} positions within {} steps\", COUNT, MAX_STEPS);\n\n }\n\n}\n", "file_path": "2016/day13/src/main.rs", "rank": 49, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n part_1(&input);\n\n part_2(&input);\n\n}", "file_path": "2016/day01/src/main.rs", "rank": 51, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n part_1(&input);\n\n part_2(&input);\n\n}\n", "file_path": "2016/day03/src/main.rs", "rank": 52, "score": 84113.77836214329 }, { "content": "fn main() {\n\n // Part 1\n\n print!(\"Part 1: \");\n\n calc_answer(\"01110110101001000\", 272);\n\n\n\n // Part 2\n\n print!(\"Part 2: \");\n\n calc_answer(\"01110110101001000\", 35651584);\n\n}\n", "file_path": "2016/day16/src/main.rs", "rank": 53, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let re_swap_pos = Regex::new(r\"swap position (\\d+) with position (\\d+)\").unwrap();\n\n let re_swap_let = Regex::new(r\"swap letter ([:alpha:]) with letter ([:alpha:])\").unwrap();\n\n let re_rotate_pos = Regex::new(r\"rotate (left|right) (\\d+) step[s]{0,1}\").unwrap();\n\n let re_rotate_let = Regex::new(r\"rotate based on position of letter ([:alpha:])\").unwrap();\n\n let re_reverse = Regex::new(r\"reverse positions (\\d+) through (\\d+)\").unwrap();\n\n let re_move_pos = Regex::new(r\"move position (\\d+) to position (\\d)\").unwrap();\n\n\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n let mut steps: Vec<Step> = Vec::new(); //translate all the instructions\n\n\n\n for line in input.lines() {\n\n if re_swap_pos.is_match(line) {\n\n let cap = re_swap_pos.captures(line).unwrap();\n\n steps.push(Step::SwapPos { from: cap.at(1).unwrap().parse().unwrap(), to: cap.at(2).unwrap().parse().unwrap() });\n\n }\n\n else if re_swap_let.is_match(line) {\n", "file_path": "2016/day21/src/main.rs", "rank": 54, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let grid = create_grid(40);\n\n let total_safe = grid.iter().fold(0, |sum, r| sum + r.iter().filter(|t| **t == Tile::Safe).count());\n\n let total_trap = grid.iter().fold(0, |sum, r| sum + r.iter().filter(|t| **t == Tile::Trapped).count());\n\n println!(\"Part 1: There are {} safe tiles and {} traps\", total_safe, total_trap);\n\n\n\n //Need to improve the algorithm so it finishes in a sane amount of time\n\n let mut row: Row =\n\n String::from(\".^..^....^....^^.^^.^.^^.^.....^.^..^...^^^^^^.^^^^.^.^^^^^^^.^^^^^..^.^^^.^^..^.^^.^....^.^...^^.^.\")\n\n .chars().map(|c| {\n\n match c {\n\n '^' => Tile::Trapped,\n\n _ => Tile::Safe,\n\n }\n\n }).collect::<Row>();\n\n let mut safe_tiles = 48; //from row 1\n\n for _ in 0..400000 - 1 {\n\n safe_tiles = safe_tiles + step_row(&mut row);\n\n }\n\n println!(\"Part 2: There are {} safe tiles\", safe_tiles);\n\n}", "file_path": "2016/day18/src/main.rs", "rank": 55, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n part_1_2(&input);\n\n}\n", "file_path": "2016/day04/src/main.rs", "rank": 56, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let (first_good, good_count) = process_addresses(\"./data\", 4294967288);\n\n println!(\"Part 1: First nonblocked address is {}\", first_good);\n\n println!(\"Part 2: Counted {} valid addresses\", good_count);\n\n}\n\n\n", "file_path": "2016/day20/src/main.rs", "rank": 57, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let mut ops: Vec<Operator> = Vec::new();\n\n\n\n //**** Read in the file input and convert it to a Vec of Operations ****//\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n let re_cpy = Regex::new(r\"^cpy (.*) (.*)$\").unwrap();\n\n let re_inc = Regex::new(r\"^inc (.*)$\").unwrap();\n\n let re_dec = Regex::new(r\"^dec (.*)$\").unwrap();\n\n let re_jnz = Regex::new(r\"^jnz (.*) (.*)$\").unwrap();\n\n\n\n for line in input.lines() {\n\n if re_cpy.is_match(line) {\n\n let cap = re_cpy.captures(line).unwrap();\n\n let reg2 = reg_to_idx(cap.at(2).unwrap());\n\n match cap.at(1).unwrap().parse::<i64>() {\n\n Ok(v) => ops.push(Operator::CpyV { value: v, dest: reg2 }),\n\n Err(e) => ops.push(Operator::CpyR { from: reg_to_idx(cap.at(1).unwrap()), dest: reg2 }),\n", "file_path": "2016/day12/src/main.rs", "rank": 58, "score": 84113.77836214329 }, { "content": "fn main() {\n\n //part_1();\n\n part_2();\n\n}\n", "file_path": "2016/day05/src/main.rs", "rank": 59, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let winner = play_game(3014387);\n\n println!(\"Part 1: Winner is {}\", winner);\n\n\n\n //Need to improve the algorithm so it finishes in a sane amount of time\n\n let new_winner = play_new_game(3014387);\n\n println!(\"Part 2: Winner is {}\", new_winner);\n\n}\n", "file_path": "2016/day19/src/main.rs", "rank": 60, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let passcode = \"rrrbmfta\";\n\n let paths: Vec<Path> = get_paths(passcode);\n\n\n\n //find the shortest size then all paths that size since min_by does not work\n\n let shortest_dist = paths.clone().into_iter().map(|p| p.len()).min().unwrap(); //.min_by(|x, y| x.len().cmp(&y.len()) );\n\n let longest_dist = paths.clone().into_iter().map(|p| p.len()).max().unwrap();\n\n\n\n let shortest_paths: Vec<Path> = paths.clone().into_iter().filter(|p| p.len() == shortest_dist).collect();\n\n //let longest_paths: Vec<Path> = paths.clone().into_iter().filter(|p| p.len() == longest_dist).collect();\n\n\n\n println!(\"Part 1: Shortest path {:?}\", shortest_paths[0]);\n\n println!(\"Part 2: Longest path sized {}\", longest_dist);\n\n}\n", "file_path": "2016/day17/src/main.rs", "rank": 61, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n // Part 1\n\n print!(\"Part 1: \");\n\n let mut discs = get_discs(&input);\n\n cycle_discs(&discs);\n\n\n\n // Part 2\n\n print!(\"Part 2: \");\n\n let new_id = (discs.len() + 1) as u32;\n\n discs.push(Disc {id: new_id, pos: 11, start: 0 });\n\n cycle_discs(&discs);\n\n\n\n}\n", "file_path": "2016/day15/src/main.rs", "rank": 62, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n part_1_2(&input);\n\n}\n", "file_path": "2016/day08/src/main.rs", "rank": 63, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n part_1(&input);\n\n part_2(&input);\n\n}\n", "file_path": "2016/day06/src/main.rs", "rank": 64, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n let re_bot = Regex::new(r\"bot (\\d+) gives low to (bot|output) (\\d+) and high to (bot|output) (\\d+)\").unwrap();\n\n let re_val = Regex::new(r\"value (\\d+) goes to bot (\\d+)\").unwrap();\n\n\n\n let bots: Vec<Bot> = Vec::new();\n\n\n\n for line in input.lines() {\n\n if re_bot.is_match(line) {\n\n let cap = re_bot.captures(line).unwrap();\n\n let bot = Bot { id: cap.at(1).unwrap().parse().unwrap(), chips: vec![], low: None, high: None };\n\n println!(\"This dude {} will give to {} and {}\", cap.at(1).unwrap(), cap.at(3).unwrap(), cap.at(5).unwrap());\n\n }\n\n else if re_val.is_match(line) {\n\n let cap = re_val.captures(line).unwrap();\n\n\n\n }\n\n }\n\n\n\n //for each line of input\n\n // either add a new bot \"node\" to the tree\n\n // or give a chip to a bot and let it propogate through the tree\n\n}\n", "file_path": "2016/day10/src/main.rs", "rank": 65, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n part_1(&input);\n\n part_2(&input);\n\n}\n", "file_path": "2016/day07/src/main.rs", "rank": 66, "score": 84113.77836214329 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "2016/day11/src/main.rs", "rank": 67, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n //part_1(&input);\n\n part_2(&input);\n\n}\n", "file_path": "2016/day09/src/main.rs", "rank": 68, "score": 84113.77836214329 }, { "content": "fn main() {\n\n //Filesystem Size Used Avail Use%\n\n let re_filesys = Regex::new(r\"/dev/grid/node-x(\\d+)-y(\\d+)\\s+(\\d+)T\\s+(\\d+)T\\s+(\\d+)T\\s+(\\d+)%\").unwrap();\n\n\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n let mut file_systems: Vec<FileSys> = Vec::new();\n\n\n\n for line in input.lines() {\n\n match re_filesys.captures(line) {\n\n Some(cap) => file_systems.push(FileSys {\n\n x: cap.at(1).unwrap().parse().unwrap(),\n\n y: cap.at(2).unwrap().parse().unwrap(),\n\n size: cap.at(3).unwrap().parse().unwrap(),\n\n used: cap.at(4).unwrap().parse().unwrap(),\n\n avail: cap.at(5).unwrap().parse().unwrap()\n\n }),\n\n None => panic!(\"Unknown data in input\"),\n", "file_path": "2016/day22/src/main.rs", "rank": 69, "score": 84113.77836214329 }, { "content": "fn main() {\n\n let mut file = File::open(\"./data\").expect(\"could not open file\");\n\n let mut input = String::new();\n\n file.read_to_string(&mut input).expect(\"could not read file\");\n\n\n\n part_1(&input);\n\n part_2(&input);\n\n}\n", "file_path": "2016/day02/src/main.rs", "rank": 70, "score": 84113.77836214329 }, { "content": "fn turn(state: &(i32, i32), direction: &String) -> (i32, i32) {\n\n match direction.as_ref() {\n\n \"R\" => { // clockwise motion\n\n (-1 * state.1, 1 * state.0)\n\n },\n\n \"L\" => { //rotate widdershins\n\n (1 * state.1, -1 * state.0)\n\n },\n\n _ => {\n\n println!(\"Invalid direction: {}\", direction);\n\n (state.0, state.1)\n\n },\n\n }\n\n}\n\n\n", "file_path": "2016/day01/src/main.rs", "rank": 71, "score": 83143.06630017898 }, { "content": "fn pass_chip(all_bots: &Vec<Bot>, bot_id: i32, value: i32) {\n\n let mut bot = all_bots.iter().find(|&b| b.id == bot_id).unwrap();\n\n bot.chips.push(value);\n\n if bot.chips.len() == 2 {\n\n pass_chip(all_bots, bot.low.unwrap(), *bot.chips.iter().clone().min().unwrap());\n\n pass_chip(all_bots, bot.high.unwrap(), *bot.chips.iter().clone().max().unwrap());\n\n }\n\n}\n\n\n", "file_path": "2016/day10/src/main.rs", "rank": 72, "score": 81205.7382877414 }, { "content": "#[test]\n\nfn test_rotate_letter() {\n\n assert_eq!(rotate_letter(\"abcd\".to_string(), 'b'), \"cdab\");\n\n assert_eq!(rotate_letter(\"abcd\".to_string(), 'b'), rotate_position(\"abcd\".to_string(), Direction::Right, 2));\n\n assert_eq!(rotate_letter(\"abcdefgh\".to_string(), 'f'), \"bcdefgha\");\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 73, "score": 79896.01501232316 }, { "content": "#[test]\n\nfn test_swap_letter() {\n\n assert_eq!(swap_letter(\"abcd\".to_string(), 'b', 'd'), \"adcb\");\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 74, "score": 79896.01501232316 }, { "content": "fn recursive_step(passcode: &str, pos: Pos, path: Path) -> Vec<Path> {\n\n if pos.row == 3 && pos.col == 3 {\n\n //println!(\"{}\", path.iter().cloned().collect::<String>());\n\n return vec![path];\n\n }\n\n\n\n let mut paths: Vec<Path> = Vec::new();\n\n let mut hasher = Md5::new();\n\n hasher.input(format!(\"{}{}\", passcode, path.iter().cloned().collect::<String>()).as_bytes());\n\n let result = hasher.result_str();\n\n\n\n\n\n if pos.row > 0 && KEYS.contains(&result.clone().chars().nth(0).unwrap()) {\n\n let mut rec_path = path.clone(); rec_path.push('U');\n\n paths.append(&mut recursive_step(passcode, Pos { row: pos.row - 1, col: pos.col }, rec_path));\n\n }\n\n\n\n if pos.row + 1 < HEIGHT && KEYS.contains(&result.clone().chars().nth(1).unwrap()) {\n\n let mut rec_path = path.clone(); rec_path.push('D');\n\n paths.append(&mut recursive_step(passcode, Pos { row: pos.row + 1, col: pos.col }, rec_path));\n", "file_path": "2016/day17/src/main.rs", "rank": 75, "score": 79683.91771521755 }, { "content": "#[test]\n\nfn test_un_rotate_letter() {\n\n assert_eq!(un_rotate_letter(\"abcdefg\".to_string(), 'c'), \"abcdefg\");\n\n assert_eq!(un_rotate_letter(rotate_letter(\"abcdefg\".to_string(), 'c'), 'c'), \"abcdefg\");\n\n}", "file_path": "2016/day21/src/main.rs", "rank": 76, "score": 78032.18951040557 }, { "content": "fn tiny_step(state: &(i32, i32), pos: &(usize, usize)) -> (usize, usize) {\n\n ((pos.0 as i32 + state.0) as usize, (pos.1 as i32 + state.1) as usize)\n\n}\n\n\n", "file_path": "2016/day01/src/main.rs", "rank": 77, "score": 77024.87412345462 }, { "content": "fn reg_to_idx(name: &str) -> u8 {\n\n match name {\n\n \"a\" => 0,\n\n \"b\" => 1,\n\n \"c\" => 2,\n\n \"d\" => 3,\n\n _ => 0,\n\n }\n\n}\n\n\n", "file_path": "2016/day12/src/main.rs", "rank": 78, "score": 72776.19169620851 }, { "content": "fn create_grid(rows: usize) -> Grid {\n\n let first_row: Row =\n\n String::from(\".^..^....^....^^.^^.^.^^.^.....^.^..^...^^^^^^.^^^^.^.^^^^^^^.^^^^^..^.^^^.^^..^.^^.^....^.^...^^.^.\")\n\n .chars().map(|c| {\n\n match c {\n\n '^' => Tile::Trapped,\n\n _ => Tile::Safe,\n\n }\n\n }).collect::<Row>();\n\n let mut grid: Grid = vec![first_row];\n\n while grid.len() < rows {\n\n add_row(&mut grid);\n\n }\n\n grid\n\n}\n\n\n\n\n", "file_path": "2016/day18/src/main.rs", "rank": 79, "score": 72502.57365457156 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct Disc {\n\n id: u32,\n\n pos: u32,\n\n start: u32\n\n}\n\n\n", "file_path": "2016/day15/src/main.rs", "rank": 80, "score": 68062.34495964645 }, { "content": "#[derive(Debug)]\n\nstruct Room {\n\n name: String,\n\n sector: i32,\n\n check: String,\n\n}\n\n\n", "file_path": "2016/day04/src/main.rs", "rank": 81, "score": 68062.34495964645 }, { "content": "#[derive(Debug, PartialOrd, PartialEq, Eq, Ord)]\n\nstruct Range {\n\n low: u32,\n\n high: u32,\n\n}\n\n\n", "file_path": "2016/day20/src/main.rs", "rank": 82, "score": 68062.34495964645 }, { "content": "struct Bot {\n\n id: i32, //the identifier for this bot\n\n chips: Vec<i32>, //the value of the chips a bot can receive\n\n low: Option<i32>, //where to pass the low chip\n\n high: Option<i32>, //where to pass the high chip\n\n}\n\n\n", "file_path": "2016/day10/src/main.rs", "rank": 83, "score": 68062.34495964645 }, { "content": "struct Pos {\n\n row: u32,\n\n col: u32\n\n}\n\n\n\nconst WIDTH: u32 = 4;\n\nconst HEIGHT: u32 = 4;\n\n\n\nstatic KEYS: [char; 5] = ['b', 'c', 'd', 'e', 'f'];\n\n\n", "file_path": "2016/day17/src/main.rs", "rank": 84, "score": 68062.34495964645 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct FileSys {\n\n x: i32,\n\n y: i32,\n\n size: i32,\n\n used: i32,\n\n avail: i32\n\n}\n\n\n", "file_path": "2016/day22/src/main.rs", "rank": 85, "score": 66814.35924450002 }, { "content": "fn get_space(x: u32, y: u32, favorite_number: u32) -> Space {\n\n let total: u32 = x*x + 3*x + 2*x*y + y + y*y + favorite_number;\n\n //println!(\"{}, {}\", total, total.count_ones());\n\n if total.count_ones() % 2 == 0 { /*even*/\n\n Space::Open\n\n }\n\n else { /*odd*/\n\n Space::Wall\n\n }\n\n}\n\n\n\n// fn print_maze(width: u32, height: u32, key: u32) {\n\n// //print header\n\n// print!(\" \");\n\n// for col in 0..width {\n\n// print!(\"{}\", col % 10);\n\n// }\n\n// println!(\"\");\n\n// for row in 0..height {\n\n// print!(\"{} \", row % 10);\n", "file_path": "2016/day13/src/main.rs", "rank": 86, "score": 64838.726672185214 }, { "content": "type Grid = Vec<Row>;\n\n\n\n//Part 1\n\n\n", "file_path": "2016/day18/src/main.rs", "rank": 87, "score": 60651.54717659157 }, { "content": "type Row = Vec<Tile>;\n", "file_path": "2016/day18/src/main.rs", "rank": 88, "score": 60651.54717659157 }, { "content": "//This was mostly stolen from a github gist that I found while looking for a Rust MD5 library\n\nfn part_1() {\n\n let mut hasher = Md5::new();\n\n\n\n let key = \"reyedfim\".as_bytes();\n\n let mut answer: Vec<u8> = vec![];\n\n\n\n let mut count = 8;\n\n for i in 0..std::u64::MAX {\n\n hasher.input(key);\n\n hasher.input(i.to_string().as_bytes());\n\n\n\n let mut output = [0; 16]; //represents MD5 string\n\n hasher.result(&mut output);\n\n\n\n //check the first two and a half bytes of the output\n\n let first_five = output[0] as i32 + output[1] as i32 + (output[2] >> 4) as i32;\n\n if first_five == 0 {\n\n let output_str: String = output.iter().map(|&c| format!(\"{:x}{:x}\", c >> 4, c)).collect();\n\n println!(\"hashed {} to get {}\", i, output_str);\n\n answer.push(output[2]);\n", "file_path": "2016/day05/src/main.rs", "rank": 89, "score": 53032.42345089679 }, { "content": "#[test]\n\nfn test_between() {\n\n assert_eq!(8, between(1,10));\n\n}\n\n\n", "file_path": "2016/day20/src/main.rs", "rank": 90, "score": 53032.42345089679 }, { "content": "fn part_2() {\n\n let mut hasher = Md5::new();\n\n\n\n let key = \"reyedfim\".as_bytes();\n\n let mut answer = [0; 8];\n\n let mut answer_mask = [true; 8];\n\n\n\n for i in 0..std::u64::MAX {\n\n hasher.input(key);\n\n hasher.input(i.to_string().as_bytes());\n\n\n\n let mut output = [0; 16]; //represents MD5 string\n\n hasher.result(&mut output);\n\n\n\n //check the first two and a half bytes of the output\n\n let first_five = output[0] as i32 + output[1] as i32 + (output[2] >> 4) as i32;\n\n if first_five == 0 {\n\n let output_str: String = output.iter().map(|&c| format!(\"{:x}{:x}\", c >> 4, c)).collect();\n\n println!(\"hashed {} to get {}\", i, output_str);\n\n //answer.push(output[2]);\n", "file_path": "2016/day05/src/main.rs", "rank": 91, "score": 53032.42345089679 }, { "content": "#[test]\n\nfn test_reverse() {\n\n assert_eq!(reverse(\"abcd\".to_string(), 1, 2), \"acbd\");\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 92, "score": 51935.389711868615 }, { "content": "#[test]\n\nfn small_test() {\n\n let (first_good, good_count) = process_addresses(\"./small_data\", 11);\n\n assert_eq!(3, first_good);\n\n assert_eq!(4, good_count);\n\n}", "file_path": "2016/day20/src/main.rs", "rank": 93, "score": 51935.389711868615 }, { "content": "#[test]\n\nfn test_swap_position() {\n\n assert_eq!(swap_position(\"abcd\".to_string(), 0, 2), \"cbad\");\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 94, "score": 50908.74093330605 }, { "content": "#[test]\n\nfn test_move_position() {\n\n assert_eq!(move_position(\"abcdefg\".to_string(), 1, 4), \"acdebfg\");\n\n assert_eq!(move_position(\"abcdefg\".to_string(), 4, 1), \"aebcdfg\");\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 95, "score": 50908.74093330605 }, { "content": "#[test]\n\nfn test_rotate_position() {\n\n assert_eq!(rotate_position(\"abcd\".to_string(), Direction::Right, 2), \"cdab\");\n\n assert_eq!(rotate_position(\"abcd\".to_string(), Direction::Left, 1), \"bcda\");\n\n assert_eq!(rotate_position(\"abcd\".to_string(), Direction::Right, 8), \"abcd\");\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 96, "score": 50908.74093330605 }, { "content": "#[test]\n\nfn test_undo_move_position() {\n\n assert_eq!(move_position(move_position(\"abcdefg\".to_string(), 1, 4), 4, 1), \"abcdefg\");\n\n assert_eq!(move_position(move_position(\"abcdefg\".to_string(), 4, 1), 1, 4), \"abcdefg\");\n\n}\n\n\n", "file_path": "2016/day21/src/main.rs", "rank": 97, "score": 49945.913882647255 }, { "content": "fn between(low: u32, high: u32) -> u32 {\n\n if low >= high {\n\n return 0;\n\n }\n\n high - low - 1\n\n}\n\n\n", "file_path": "2016/day20/src/main.rs", "rank": 98, "score": 43632.22688881497 }, { "content": "fn state_to_val(state: (i32, i32)) -> i32 {\n\n state.0 + 3 * state.1 + 1\n\n}\n\n\n", "file_path": "2016/day02/src/main.rs", "rank": 99, "score": 43613.86275206097 } ]
Rust
bee-snapshot/src/lib.rs
neumoxx/bee-p
2a68c3c6c17bd2c8dd36d8274ef0db3a04d50b16
pub(crate) mod constants; pub(crate) mod pruning; pub(crate) mod worker; pub mod config; pub mod event; pub mod global; pub mod local; pub mod metadata; use bee_common::shutdown_stream::ShutdownStream; use bee_common_ext::{bee_node::BeeNode, event::Bus, shutdown_tokio::Shutdown, worker::Worker}; use bee_crypto::ternary::Hash; use bee_ledger::state::LedgerState; use bee_protocol::{event::LatestSolidMilestoneChanged, tangle::tangle, MilestoneIndex}; use chrono::{offset::TimeZone, Utc}; use futures::channel::{mpsc, oneshot}; use log::{info, warn}; use tokio::spawn; use std::{path::Path, sync::Arc}; #[derive(Debug)] pub enum Error { Global(global::FileError), Local(local::FileError), Download(local::DownloadError), } pub async fn init( config: &config::SnapshotConfig, bee_node: Arc<BeeNode>, bus: Arc<Bus<'static>>, shutdown: &mut Shutdown, ) -> Result<(LedgerState, MilestoneIndex, u64), Error> { let (state, index, timestamp) = match config.load_type() { config::LoadType::Global => { info!("Loading global snapshot file {}...", config.global().path()); let snapshot = global::GlobalSnapshot::from_file(config.global().path(), MilestoneIndex(*config.global().index())) .map_err(Error::Global)?; tangle().clear_solid_entry_points(); tangle().add_solid_entry_point(Hash::zeros(), MilestoneIndex(*config.global().index())); info!( "Loaded global snapshot file from with index {} and {} balances.", *config.global().index(), snapshot.state().len() ); (snapshot.into_state(), *config.global().index(), 0) } config::LoadType::Local => { if !Path::new(config.local().path()).exists() { local::download_local_snapshot(config.local()) .await .map_err(Error::Download)?; } info!("Loading local snapshot file {}...", config.local().path()); let snapshot = local::LocalSnapshot::from_file(config.local().path()).map_err(Error::Local)?; info!( "Loaded local snapshot file from {} with index {}, {} solid entry points, {} seen milestones and \ {} balances.", Utc.timestamp(snapshot.metadata().timestamp() as i64, 0).to_rfc2822(), snapshot.metadata().index(), snapshot.metadata().solid_entry_points().len(), snapshot.metadata().seen_milestones().len(), snapshot.state.len() ); tangle().update_latest_solid_milestone_index(snapshot.metadata().index().into()); tangle().update_latest_milestone_index(snapshot.metadata().index().into()); tangle().update_snapshot_index(snapshot.metadata().index().into()); tangle().update_pruning_index(snapshot.metadata().index().into()); tangle().add_solid_entry_point(Hash::zeros(), MilestoneIndex(0)); for (hash, index) in snapshot.metadata().solid_entry_points() { tangle().add_solid_entry_point(*hash, MilestoneIndex(*index)); } for _seen_milestone in snapshot.metadata().seen_milestones() { } let index = snapshot.metadata().index(); let timestamp = snapshot.metadata().timestamp(); (snapshot.into_state(), index, timestamp) } }; let (snapshot_worker_tx, snapshot_worker_rx) = mpsc::unbounded(); let (snapshot_worker_shutdown_tx, snapshot_worker_shutdown_rx) = oneshot::channel(); shutdown.add_worker_shutdown( snapshot_worker_shutdown_tx, spawn(worker::SnapshotWorker::new(config.clone()).start( ShutdownStream::new(snapshot_worker_shutdown_rx, snapshot_worker_rx), bee_node, (), )), ); bus.add_listener(move |latest_solid_milestone: &LatestSolidMilestoneChanged| { if let Err(e) = snapshot_worker_tx.unbounded_send(worker::SnapshotWorkerEvent(latest_solid_milestone.0.clone())) { warn!( "Failed to send milestone {} to snapshot worker: {:?}.", *latest_solid_milestone.0.index(), e ) } }); Ok((state, MilestoneIndex(index), timestamp)) }
pub(crate) mod constants; pub(crate) mod pruning; pub(crate) mod worker; pub mod config; pub mod event; pub mod global; pub mod local; pub mod metadata; use bee_common::shutdown_stream::ShutdownStream; use bee_common_ext::{bee_node::BeeNode, event::Bus, shutdown_tokio::Shutdown, worker::Worker}; use bee_crypto::ternary::Hash; use bee_ledger::state::LedgerState; use bee_protocol::{event::LatestSolidMilestoneChanged, tangle::tangle, MilestoneIndex}; use chrono::{offset::TimeZone, Utc}; use futures::channel::{mpsc, oneshot}; use log::{info, warn}; use tokio::spawn; use std::{path::Path, sync::Arc}; #[derive(Debug)] pub enum Error { Global(global::FileError), Local(local::FileError), Download(local::DownloadError), } pub async fn init( config: &config::SnapshotConfig, bee_node: Arc<BeeNode>, bus: Arc<Bus<'static>>, shutdown: &mut Shutdown, ) -> Result<(LedgerState, MilestoneIndex, u64), Error> { let (state, index, timestamp) = match config.load_type() { config::LoadType::Global => { info!("Loading global snapshot file {}...", config.global().path()); let snapshot = global::GlobalSnapshot::from_file(config.global().path(), MilestoneIndex(*config.global().index())) .map_err(Error::Global)?; tangle().clear_solid_entry_points(); tangle().add_solid_entry_point(Hash::zeros(), MilestoneIndex(*config.global().index())); info!( "Loaded global snapshot file from with index {} and {} balances.", *config.global().index(), snapshot.state().len() ); (snapshot.into_state(), *config.global().index(), 0) } config::LoadType::Local => { if !Path::new(config.local().path()).exists() { local::download_local_snapshot(config.local()) .await .map_err(Error::Download)?; } info!("Loading local snapshot file {}...", config.local().path()); let snapshot = local::LocalSnapshot::from_file(config.local().path()).map_err(Error::Local)?; info!( "Loaded local snapshot file from {} with index {}, {} solid entry points, {} seen milestones and \ {} balances.", Utc.timestamp(snapshot.metadata().timestamp() as i64, 0).to_rfc2822(), snapshot.metadata().index(), snapshot.metadata().solid_entry_points().len(), snapshot.metadata().seen_milestones().len(), snapshot.state.len() ); tangle().update_latest_solid_milestone_index(snapshot.metadata().index().into()); tangle().update_latest_milestone_index(snapshot.metadata().index().into()); tangle().update_snapshot_index(snapshot.metadata().index().into()); tangle().update_pruning_index(snapshot.metadata().index().into()); tangle().add_solid_entry_point(Hash::zeros(), MilestoneIndex(0)); for (hash, index) in snapshot.metadata().solid_entry_points() { tangle().add_solid_entry_point(*hash, MilestoneIndex(*index)); } for _seen_milestone in snapshot.metadata().seen_milestones() { } let index = snapshot.metadata().index(); let timestamp = snapshot.metadata().timestamp(); (snapshot.into_state(), index, timestamp) } }; let (snapshot_worker_tx, snapshot_worker_rx) = mpsc::unbounded(); let (snapshot_worker_shutdown_tx, snapshot_worker_shutdown_rx) = oneshot::channel(); shutdown.add_worker_shutdow
n( snapshot_worker_shutdown_tx, spawn(worker::SnapshotWorker::new(config.clone()).start( ShutdownStream::new(snapshot_worker_shutdown_rx, snapshot_worker_rx), bee_node, (), )), ); bus.add_listener(move |latest_solid_milestone: &LatestSolidMilestoneChanged| { if let Err(e) = snapshot_worker_tx.unbounded_send(worker::SnapshotWorkerEvent(latest_solid_milestone.0.clone())) { warn!( "Failed to send milestone {} to snapshot worker: {:?}.", *latest_solid_milestone.0.index(), e ) } }); Ok((state, MilestoneIndex(index), timestamp)) }
function_block-function_prefixed
[ { "content": "// TODO testing\n\npub fn get_new_solid_entry_points(target_index: MilestoneIndex) -> Result<DashMap<Hash, MilestoneIndex>, Error> {\n\n let solid_entry_points = DashMap::<Hash, MilestoneIndex>::new();\n\n for index in *target_index - SOLID_ENTRY_POINT_CHECK_THRESHOLD_PAST..*target_index {\n\n let milestone_hash;\n\n\n\n // NOTE Actually we don't really need the tail, and only need one of the milestone tx.\n\n // In gohornet, we start from the tail milestone tx.\n\n if let Some(hash) = tangle().get_milestone_hash(MilestoneIndex(index)) {\n\n milestone_hash = hash;\n\n } else {\n\n return Err(Error::MilestoneNotFoundInTangle(index));\n\n }\n\n\n\n // Get all the approvees confirmed by the milestone tail.\n\n traversal::visit_parents_depth_first(\n\n tangle(),\n\n milestone_hash,\n\n |_, _, metadata| *metadata.milestone_index() >= index,\n\n |hash, _, metadata| {\n\n if metadata.flags().is_confirmed() && is_solid_entry_point(&hash).unwrap() {\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 0, "score": 527072.5702499526 }, { "content": "/// Checks whether any direct approver of the given transaction was confirmed by a\n\n/// milestone which is above the target milestone.\n\npub fn is_solid_entry_point(hash: &Hash) -> Result<bool, Error> {\n\n // Check if there is any approver of the transaction was confirmed by newer milestones.\n\n let milestone_index;\n\n if let Some(metadata) = tangle().get_metadata(hash) {\n\n milestone_index = metadata.milestone_index();\n\n } else {\n\n return Err(Error::MetadataNotFound(*hash));\n\n }\n\n let mut is_solid = false;\n\n traversal::visit_children_follow_trunk(\n\n tangle(),\n\n *hash,\n\n |_, metadata| {\n\n if is_solid {\n\n return false;\n\n }\n\n // `true` when one of the current tx's approver was confirmed by a newer milestone_index.\n\n is_solid = metadata.flags().is_confirmed() && metadata.milestone_index() > milestone_index;\n\n true\n\n },\n\n |_, _, _| {},\n\n );\n\n Ok(is_solid)\n\n}\n\n\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 1, "score": 477846.5850692907 }, { "content": "// NOTE we don't prune cache, but only prune the database.\n\npub fn prune_database(mut target_index: MilestoneIndex) -> Result<(), Error> {\n\n let target_index_max = MilestoneIndex(\n\n *tangle().get_snapshot_index() - SOLID_ENTRY_POINT_CHECK_THRESHOLD_PAST - ADDITIONAL_PRUNING_THRESHOLD - 1,\n\n );\n\n if target_index > target_index_max {\n\n target_index = target_index_max;\n\n }\n\n // Update the solid entry points in the static MsTangle.\n\n let new_solid_entry_points = get_new_solid_entry_points(target_index)?;\n\n\n\n // Clear the solid_entry_points in the static MsTangle.\n\n tangle().clear_solid_entry_points();\n\n\n\n // TODO update the whole solid_entry_points in the static MsTangle w/o looping.\n\n for (hash, milestone_index) in new_solid_entry_points.into_iter() {\n\n tangle().add_solid_entry_point(hash, milestone_index);\n\n }\n\n\n\n // We have to set the new solid entry point index.\n\n // This way we can cleanly prune even if the pruning was aborted last time.\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 2, "score": 455866.3142682345 }, { "content": "// TODO get the unconfirmed trnsactions in the database.\n\npub fn get_unconfirmed_transactions(_target_index: &MilestoneIndex) -> Vec<Hash> {\n\n // NOTE If there is no specific struct for storing th unconfirmed transaction,\n\n // then we need to traverse the whole tangle to get the unconfirmed transactions (SLOW)!\n\n // TODO traverse the whole tangle through the approvers from solid entry points.\n\n unimplemented!()\n\n}\n\n\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 3, "score": 391351.25995407917 }, { "content": "// TODO prunes the milestone metadata and the ledger diffs from the database for the given milestone.\n\npub fn prune_milestone(_milestone_index: MilestoneIndex) {\n\n // Delete ledger_diff for milestone with milestone_index.\n\n // Delete milestone storage (if we have this) for milestone with milestone_index.\n\n unimplemented!()\n\n}\n\n\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 4, "score": 380601.0258652238 }, { "content": "// TODO remove the unconfirmed transactions in the database.\n\npub fn prune_unconfirmed_transactions(_purning_milestone_index: &MilestoneIndex) -> u32 {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 5, "score": 358270.4740487877 }, { "content": "// TODO remove the confirmed transactions in the database.\n\npub fn prune_transactions(_hashes: Vec<Hash>) -> u32 {\n\n unimplemented!()\n\n}\n\n\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 6, "score": 316926.7624728735 }, { "content": "pub fn load_bundle_builder<Metadata>(tangle: &Tangle<Metadata>, hash: &Hash) -> Option<IncomingBundleBuilder>\n\nwhere\n\n Metadata: Clone + Copy,\n\n{\n\n let mut bundle_builder = IncomingBundleBuilder::new();\n\n let mut done = false;\n\n\n\n visit_parents_follow_trunk(\n\n tangle,\n\n *hash,\n\n |transaction, _| {\n\n if done {\n\n return false;\n\n }\n\n if transaction.index() == transaction.last_index() {\n\n done = true;\n\n }\n\n true\n\n },\n\n |_, transaction, _| {\n\n bundle_builder.push((*(*transaction)).clone());\n\n },\n\n );\n\n\n\n match bundle_builder.len() {\n\n 0 => None,\n\n _ => Some(bundle_builder),\n\n }\n\n}\n", "file_path": "bee-tangle/src/helper.rs", "rank": 7, "score": 282021.1053956513 }, { "content": "pub fn create_random_milestone(index: MilestoneIndex) -> Milestone {\n\n Milestone::new(rand_trits_field::<Hash>(), index)\n\n}\n", "file_path": "bee-test/src/milestone.rs", "rank": 8, "score": 272204.86623557314 }, { "content": "pub fn on_all_tails<Apply: FnMut(&Hash, &TransactionRef, &TransactionMetadata)>(\n\n tangle: &Tangle<TransactionMetadata>,\n\n root: Hash,\n\n apply: Apply,\n\n) {\n\n traversal::visit_parents_depth_first(\n\n tangle,\n\n root,\n\n |_, _, metadata| !metadata.flags().is_tail(),\n\n |_, _, _| {},\n\n apply,\n\n |_| {},\n\n );\n\n}\n", "file_path": "bee-protocol/src/tangle/helper.rs", "rank": 9, "score": 254166.10734353334 }, { "content": "pub fn init(\n\n index: u32,\n\n state: LedgerState,\n\n coo_config: ProtocolCoordinatorConfig,\n\n bee_node: Arc<BeeNode>,\n\n bus: Arc<Bus<'static>>,\n\n shutdown: &mut Shutdown,\n\n) -> mpsc::UnboundedSender<LedgerWorkerEvent> {\n\n // TODO\n\n // if unsafe { !WHITE_FLAG.is_null() } {\n\n // warn!(\"Already initialized.\");\n\n // return;\n\n // }\n\n\n\n let (ledger_worker_tx, ledger_worker_rx) = mpsc::unbounded();\n\n let (ledger_worker_shutdown_tx, ledger_worker_shutdown_rx) = oneshot::channel();\n\n\n\n shutdown.add_worker_shutdown(\n\n ledger_worker_shutdown_tx,\n\n tokio::spawn(\n", "file_path": "bee-ledger/src/whiteflag/mod.rs", "rank": 10, "score": 250780.9226444011 }, { "content": "pub fn init() {\n\n if !INITIALIZED.compare_and_swap(false, true, Ordering::Relaxed) {\n\n TANGLE.store(Box::into_raw(MsTangle::new().into()), Ordering::Relaxed);\n\n } else {\n\n panic!(\"Tangle already initialized\");\n\n }\n\n}\n\n\n", "file_path": "bee-protocol/src/tangle/mod.rs", "rank": 11, "score": 250780.9226444011 }, { "content": "/// A Tangle walker that - given a starting vertex - visits all of its ancestors that are connected through\n\n/// the *trunk* edge. The walk continues as long as the visited vertices match a certain condition. For each\n\n/// visited vertex a customized logic can be applied. Each traversed vertex provides read access to its\n\n/// associated data and metadata.\n\npub fn visit_parents_follow_trunk<Metadata, Match, Apply>(\n\n tangle: &Tangle<Metadata>,\n\n mut hash: Hash,\n\n mut matches: Match,\n\n mut apply: Apply,\n\n) where\n\n Metadata: Clone + Copy,\n\n Match: FnMut(&TxRef, &Metadata) -> bool,\n\n Apply: FnMut(&Hash, &TxRef, &Metadata),\n\n{\n\n while let Some(vtx) = tangle.vertices.get(&hash) {\n\n let vtx = vtx.value();\n\n\n\n if !matches(vtx.transaction(), vtx.metadata()) {\n\n break;\n\n } else {\n\n apply(&hash, vtx.transaction(), vtx.metadata());\n\n hash = *vtx.trunk();\n\n }\n\n }\n\n}\n\n\n", "file_path": "bee-tangle/src/traversal.rs", "rank": 12, "score": 243184.0648748554 }, { "content": "/// A Tangle walker that - given a starting vertex - visits all of its children that are connected through\n\n/// the *trunk* edge. The walk continues as long as the visited vertices match a certain condition. For each\n\n/// visited vertex a customized logic can be applied. Each traversed vertex provides read access to its\n\n/// associated data and metadata.\n\npub fn visit_children_follow_trunk<Metadata, Match, Apply>(\n\n tangle: &Tangle<Metadata>,\n\n root: Hash,\n\n mut matches: Match,\n\n mut apply: Apply,\n\n) where\n\n Metadata: Clone + Copy,\n\n Match: FnMut(&TxRef, &Metadata) -> bool,\n\n Apply: FnMut(&Hash, &TxRef, &Metadata),\n\n{\n\n // TODO could be simplified like visit_parents_follow_trunk ? Meaning no vector ?\n\n let mut children = vec![root];\n\n\n\n while let Some(ref parent_hash) = children.pop() {\n\n if let Some(parent) = tangle.vertices.get(parent_hash) {\n\n if matches(parent.value().transaction(), parent.value().metadata()) {\n\n apply(parent_hash, parent.value().transaction(), parent.value().metadata());\n\n\n\n if let Some(parent_children) = tangle.children.get(parent_hash) {\n\n for child_hash in parent_children.value() {\n", "file_path": "bee-tangle/src/traversal.rs", "rank": 13, "score": 243184.0648748554 }, { "content": "pub fn timestamp_millis() -> u64 {\n\n let unix_time = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"system clock error\");\n\n\n\n unix_time.as_secs() * 1000 + u64::from(unix_time.subsec_millis())\n\n}\n", "file_path": "bee-network/src/util/time.rs", "rank": 14, "score": 239196.49298195654 }, { "content": "// TODO: test\n\n/// A Tangle walker that - given a starting vertex - visits all of its decendents that are connected through\n\n/// either the *trunk* or the *branch* edge. The walk continues as long as the visited vertices match a certain\n\n/// condition. For each visited vertex customized logic can be applied depending on the availability of the\n\n/// vertex. Each traversed vertex provides read access to its associated data and metadata.\n\npub fn visit_children_depth_first<Metadata, Match, Apply, ElseApply>(\n\n tangle: &Tangle<Metadata>,\n\n root: Hash,\n\n matches: Match,\n\n mut apply: Apply,\n\n mut else_apply: ElseApply,\n\n) where\n\n Metadata: Clone + Copy,\n\n Match: Fn(&TxRef, &Metadata) -> bool,\n\n Apply: FnMut(&Hash, &TxRef, &Metadata),\n\n ElseApply: FnMut(&Hash),\n\n{\n\n let mut children = vec![root];\n\n let mut visited = HashSet::new();\n\n\n\n while let Some(hash) = children.last() {\n\n match tangle.vertices.get(hash) {\n\n Some(r) => {\n\n let vtx = r.value();\n\n\n", "file_path": "bee-tangle/src/traversal.rs", "rank": 15, "score": 232483.00091316662 }, { "content": "/// A Tangle walker that - given a starting vertex - visits all of its ancestors that are connected through\n\n/// either the *trunk* or the *branch* edge. The walk continues as long as the visited vertices match a certain\n\n/// condition. For each visited vertex customized logic can be applied depending on the availability of the\n\n/// vertex. Each traversed vertex provides read access to its associated data and metadata.\n\npub fn visit_parents_depth_first<Metadata, Match, Apply, ElseApply, MissingApply>(\n\n tangle: &Tangle<Metadata>,\n\n root: Hash,\n\n matches: Match,\n\n mut apply: Apply,\n\n mut else_apply: ElseApply,\n\n mut missing_apply: MissingApply,\n\n) where\n\n Metadata: Clone + Copy,\n\n Match: Fn(&Hash, &TxRef, &Metadata) -> bool,\n\n Apply: FnMut(&Hash, &TxRef, &Metadata),\n\n ElseApply: FnMut(&Hash, &TxRef, &Metadata),\n\n MissingApply: FnMut(&Hash),\n\n{\n\n let mut parents = Vec::new();\n\n let mut visited = HashSet::new();\n\n\n\n parents.push(root);\n\n\n\n while let Some(hash) = parents.pop() {\n", "file_path": "bee-tangle/src/traversal.rs", "rank": 16, "score": 222758.09366575957 }, { "content": "fn xx_hash(buf: &[u8]) -> u64 {\n\n let mut hasher = XxHash64::default();\n\n\n\n hasher.write(buf);\n\n hasher.finish()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_cache_insert_same_elements() {\n\n let mut cache = HashCache::new(10);\n\n\n\n let first_buf = &[1, 2, 3];\n\n let second_buf = &[1, 2, 3];\n\n\n\n assert_eq!(cache.insert(first_buf), true);\n", "file_path": "bee-protocol/src/worker/transaction/hash_cache.rs", "rank": 17, "score": 213177.79246664094 }, { "content": "#[test]\n\nfn null_balance() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_null_balance.txt\", MilestoneIndex(0)).err(),\n\n Some(Error::NullBalance)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 18, "score": 209364.7219097258 }, { "content": "#[test]\n\nfn overflow_balance() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_overflow_balance.txt\", MilestoneIndex(0)).err(),\n\n Some(Error::InvalidBalance)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 19, "score": 209364.7219097258 }, { "content": "#[test]\n\nfn negative_balance() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_negative_balance.txt\", MilestoneIndex(0)).err(),\n\n Some(Error::InvalidBalance)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 20, "score": 209364.7219097258 }, { "content": "#[test]\n\nfn invalid_balance() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_invalid_balance.txt\", MilestoneIndex(0)).err(),\n\n Some(Error::InvalidBalance)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 21, "score": 209364.7219097258 }, { "content": "#[test]\n\nfn file_not_found() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_file_not_found.txt\", MilestoneIndex(0)).err(),\n\n Some(Error::FileNotFound)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 22, "score": 209253.1225273862 }, { "content": "#[allow(clippy::many_single_char_names)]\n\npub fn create_test_tangle() -> (Tangle<()>, Transactions, Hashes) {\n\n // a b\n\n // |\\ /\n\n // | c\n\n // |/|\n\n // d |\n\n // \\|\n\n // e\n\n\n\n let tangle = Tangle::new();\n\n\n\n let (a_hash, a) = create_random_tx();\n\n let (b_hash, b) = create_random_tx();\n\n let (c_hash, c) = create_random_attached_tx(a_hash.clone(), b_hash.clone());\n\n let (d_hash, d) = create_random_attached_tx(a_hash.clone(), c_hash.clone());\n\n let (e_hash, e) = create_random_attached_tx(d_hash.clone(), c_hash.clone());\n\n\n\n assert_eq!(*c.trunk(), b_hash);\n\n assert_eq!(*c.branch(), a_hash);\n\n assert_eq!(*d.trunk(), c_hash);\n", "file_path": "bee-tangle/tests/helper/mod.rs", "rank": 23, "score": 197320.60685621612 }, { "content": "enum Error {\n\n NonContiguousMilestone,\n\n MerkleProofMismatch,\n\n InvalidTailsCount,\n\n InvalidConfirmationSet(TraversalError),\n\n}\n\n\n\npub enum LedgerWorkerEvent {\n\n Confirm(Milestone),\n\n GetBalance(Address, oneshot::Sender<u64>),\n\n}\n\n\n\npub(crate) struct LedgerWorker {\n\n index: MilestoneIndex,\n\n pub(crate) state: LedgerState,\n\n coo_config: ProtocolCoordinatorConfig,\n\n bus: Arc<Bus<'static>>,\n\n}\n\n\n\n#[async_trait]\n", "file_path": "bee-ledger/src/whiteflag/worker.rs", "rank": 24, "score": 194658.46461353707 }, { "content": "pub fn create_random_attached_tx(branch: Hash, trunk: Hash) -> (Hash, Transaction) {\n\n let builder = TransactionBuilder::new()\n\n .with_payload(rand_trits_field::<Payload>())\n\n .with_address(rand_trits_field::<Address>())\n\n .with_value(Value::from_inner_unchecked(0))\n\n .with_obsolete_tag(rand_trits_field::<Tag>())\n\n .with_timestamp(Timestamp::from_inner_unchecked(0))\n\n .with_index(Index::from_inner_unchecked(0))\n\n .with_last_index(Index::from_inner_unchecked(0))\n\n .with_tag(rand_trits_field::<Tag>())\n\n .with_attachment_ts(Timestamp::from_inner_unchecked(0))\n\n .with_bundle(rand_trits_field::<Hash>())\n\n .with_trunk(trunk)\n\n .with_branch(branch)\n\n .with_attachment_lbts(Timestamp::from_inner_unchecked(0))\n\n .with_attachment_ubts(Timestamp::from_inner_unchecked(0))\n\n .with_nonce(rand_trits_field::<Nonce>());\n\n\n\n (rand_trits_field::<Hash>(), builder.build().unwrap())\n\n}\n", "file_path": "bee-test/src/transaction.rs", "rank": 25, "score": 193557.79508190948 }, { "content": "pub fn create_random_tx() -> (Hash, Transaction) {\n\n let builder = TransactionBuilder::new()\n\n .with_payload(Payload::zeros())\n\n .with_address(rand_trits_field::<Address>())\n\n .with_value(Value::from_inner_unchecked(0))\n\n .with_obsolete_tag(rand_trits_field::<Tag>())\n\n .with_timestamp(Timestamp::from_inner_unchecked(0))\n\n .with_index(Index::from_inner_unchecked(0))\n\n .with_last_index(Index::from_inner_unchecked(0))\n\n .with_tag(rand_trits_field::<Tag>())\n\n .with_attachment_ts(Timestamp::from_inner_unchecked(0))\n\n .with_bundle(rand_trits_field::<Hash>())\n\n .with_trunk(rand_trits_field::<Hash>())\n\n .with_branch(rand_trits_field::<Hash>())\n\n .with_attachment_lbts(Timestamp::from_inner_unchecked(0))\n\n .with_attachment_ubts(Timestamp::from_inner_unchecked(0))\n\n .with_nonce(rand_trits_field::<Nonce>());\n\n\n\n (rand_trits_field::<Hash>(), builder.build().unwrap())\n\n}\n\n\n", "file_path": "bee-test/src/transaction.rs", "rank": 26, "score": 189161.73191472233 }, { "content": "#[inline]\n\nfn disconnect_endpoint(epid: EndpointId, connected_endpoints: &mut ConnectedEndpointList) -> Result<bool, WorkerError> {\n\n // NOTE: removing the endpoint will drop the connection!\n\n Ok(connected_endpoints.remove(epid))\n\n}\n\n\n\n#[inline]\n\nasync fn send_event_after_delay(event: Event, mut internal_event_sender: EventSender) -> Result<(), WorkerError> {\n\n tokio::time::delay_for(Duration::from_secs(RECONNECT_INTERVAL.load(Ordering::Relaxed))).await;\n\n\n\n Ok(internal_event_sender.send(event).await?)\n\n}\n\n\n\n#[inline]\n\nasync fn send_message(\n\n receiver_epid: EndpointId,\n\n message: Vec<u8>,\n\n connected: &mut ConnectedEndpointList,\n\n) -> Result<bool, WorkerError> {\n\n Ok(connected.send_message(message, receiver_epid).await?)\n\n}\n\n\n", "file_path": "bee-network/src/endpoint/worker.rs", "rank": 27, "score": 182252.98901273028 }, { "content": "pub fn init(level_filter: log::LevelFilter) {\n\n pretty_env_logger::formatted_timed_builder()\n\n .format_indent(None)\n\n .format(|f, record| {\n\n let ts = f.timestamp();\n\n\n\n let col = match record.level() {\n\n log::Level::Trace => Color::Magenta,\n\n log::Level::Debug => Color::Blue,\n\n log::Level::Info => Color::Green,\n\n log::Level::Warn => Color::Yellow,\n\n log::Level::Error => Color::Red,\n\n };\n\n\n\n let mut level_style = f.style();\n\n level_style.set_color(col).set_bold(true);\n\n\n\n writeln!(f, \"[{} {:>7}] {}\", ts, level_style.value(record.level()), record.args())\n\n })\n\n //.format_timestamp(Some(env_logger::TimestampPrecision::Millis))\n\n .format_timestamp_secs()\n\n .filter_level(level_filter)\n\n .init();\n\n}\n\n\n", "file_path": "bee-network/examples/common/logger.rs", "rank": 28, "score": 180675.68139527674 }, { "content": "pub fn channel() -> (EventSender, EventReceiver) {\n\n mpsc::unbounded()\n\n}\n\n\n\n#[derive(Debug)]\n\n#[non_exhaustive]\n\npub enum Event {\n\n EndpointAdded {\n\n epid: EndpointId,\n\n },\n\n\n\n EndpointRemoved {\n\n epid: EndpointId,\n\n },\n\n\n\n ConnectionEstablished {\n\n epid: EndpointId,\n\n peer_address: SocketAddr,\n\n origin: Origin,\n\n data_sender: DataSender,\n", "file_path": "bee-network/src/event.rs", "rank": 29, "score": 180291.84305419627 }, { "content": "pub fn warn(message: &str, context: &str) {\n\n if log_enabled!(log::Level::Warn) {\n\n warn!(\"{} {}\", context, message);\n\n }\n\n}\n\n\n", "file_path": "bee-network/examples/common/logger.rs", "rank": 30, "score": 179121.98589522488 }, { "content": "pub fn info(message: &str, context: &str) {\n\n if log_enabled!(log::Level::Info) {\n\n info!(\"{} {}\", context, message);\n\n }\n\n}\n\n\n", "file_path": "bee-network/examples/common/logger.rs", "rank": 31, "score": 179121.98589522488 }, { "content": "pub fn error(message: &str, context: &str) {\n\n if log_enabled!(log::Level::Error) {\n\n error!(\"{} {}\", context, message);\n\n }\n\n}\n", "file_path": "bee-network/examples/common/logger.rs", "rank": 32, "score": 178901.632913234 }, { "content": "pub fn clone_ms(ms: &Milestone) -> Milestone {\n\n Milestone::new(*ms.hash(), ms.index())\n\n}\n\n\n", "file_path": "bee-test/src/milestone.rs", "rank": 33, "score": 177215.45215087495 }, { "content": "#[test]\n\nfn valid() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_valid.txt\", MilestoneIndex(0))\n\n .unwrap()\n\n .state()\n\n .len(),\n\n 3\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 34, "score": 167759.7573354961 }, { "content": "#[test]\n\nfn empty() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_empty.txt\", MilestoneIndex(0)).err(),\n\n Some(Error::InvalidSupply)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 35, "score": 167759.7573354961 }, { "content": "fn on_latest_solid_milestone_changed(latest_solid_milestone: &LatestSolidMilestoneChanged) {\n\n debug!(\"New solid milestone {}.\", *latest_solid_milestone.0.index);\n\n tangle().update_latest_solid_milestone_index(latest_solid_milestone.0.index);\n\n\n\n let ms_sync_count = Protocol::get().config.workers.ms_sync_count;\n\n let next_ms = latest_solid_milestone.0.index + MilestoneIndex(ms_sync_count);\n\n\n\n if !tangle().is_synced() {\n\n if tangle().contains_milestone(next_ms) {\n\n Protocol::trigger_milestone_solidification(next_ms);\n\n } else {\n\n Protocol::request_milestone(next_ms, None);\n\n }\n\n }\n\n\n\n spawn(Protocol::broadcast_heartbeat(\n\n latest_solid_milestone.0.index,\n\n tangle().get_pruning_index(),\n\n tangle().get_latest_milestone_index(),\n\n ));\n\n}\n", "file_path": "bee-protocol/src/protocol/protocol.rs", "rank": 36, "score": 165680.052977674 }, { "content": "fn outer_increment(prestate: &mut PowCurlState) {\n\n for i in OUTER_INCR_START..INNER_INCR_START {\n\n let with_carry = prestate.bit_add(i);\n\n if !with_carry {\n\n break;\n\n }\n\n }\n\n}\n\n\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 37, "score": 165536.02799927164 }, { "content": "#[test]\n\nfn invalid_supply_more() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_invalid_supply_more.txt\", MilestoneIndex(0)).err(),\n\n Some(Error::InvalidSupply)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 38, "score": 165370.17606522448 }, { "content": "#[test]\n\nfn extraneous_semicolon() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\n\n \"tests/files/global_snapshot_extraneous_semicolon.txt\",\n\n MilestoneIndex(0)\n\n )\n\n .err(),\n\n Some(Error::ExtraneousSemicolon)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 39, "score": 165370.17606522448 }, { "content": "#[test]\n\nfn duplicate_address() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_duplicate_address.txt\", MilestoneIndex(0)).err(),\n\n Some(Error::DuplicateAddress)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 40, "score": 165370.17606522448 }, { "content": "#[test]\n\nfn missing_semicolon() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_missing_semicolon.txt\", MilestoneIndex(0)).err(),\n\n Some(Error::MissingSemicolon)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 41, "score": 165370.17606522448 }, { "content": "#[test]\n\nfn additional_whitespaces() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\n\n \"tests/files/global_snapshot_additional_whitespaces.txt\",\n\n MilestoneIndex(0)\n\n )\n\n .err(),\n\n Some(Error::InvalidAddressTryte)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 42, "score": 165370.17606522448 }, { "content": "#[test]\n\nfn different_newline() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_different_newline.txt\", MilestoneIndex(0))\n\n .unwrap()\n\n .state()\n\n .len(),\n\n 3\n\n );\n\n}\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 43, "score": 165370.17606522448 }, { "content": "pub fn tangle() -> &'static MsTangle {\n\n let tangle = TANGLE.load(Ordering::Relaxed);\n\n if tangle.is_null() {\n\n panic!(\"Tangle cannot be null\");\n\n } else {\n\n unsafe { &*tangle }\n\n }\n\n}\n\n\n\n// #[cfg(test)]\n\n// mod tests {\n\n// use super::*;\n\n// use crate::{tangle::TransactionMetadata, MilestoneIndex};\n\n//\n\n// use bee_tangle::traversal;\n\n// use bee_test::{field::rand_trits_field, transaction::create_random_attached_tx};\n\n//\n\n// #[test]\n\n// fn confirm_transaction() {\n\n// // Example from https://github.com/iotaledger/protocol-rfcs/blob/master/text/0005-white-flag/0005-white-flag.md\n", "file_path": "bee-protocol/src/tangle/mod.rs", "rank": 44, "score": 164464.00391115644 }, { "content": "#[test]\n\nfn invalid_supply_less() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\"tests/files/global_snapshot_invalid_supply_less.txt\", MilestoneIndex(0)).err(),\n\n Some(Error::InvalidSupply)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 45, "score": 163062.91225775486 }, { "content": "#[test]\n\nfn invalid_address_length() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\n\n \"tests/files/global_snapshot_invalid_address_length.txt\",\n\n MilestoneIndex(0)\n\n )\n\n .err(),\n\n Some(Error::InvalidAddressLength)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 46, "score": 163062.91225775486 }, { "content": "#[test]\n\nfn invalid_address_tryte() {\n\n assert_eq!(\n\n GlobalSnapshot::from_file(\n\n \"tests/files/global_snapshot_invalid_address_tryte.txt\",\n\n MilestoneIndex(0)\n\n )\n\n .err(),\n\n Some(Error::InvalidAddressTryte)\n\n );\n\n}\n\n\n", "file_path": "bee-snapshot/tests/global_snapshot.rs", "rank": 47, "score": 163062.91225775486 }, { "content": "pub fn channel() -> (DataSender, DataReceiver) {\n\n mpsc::unbounded()\n\n}\n", "file_path": "bee-network/src/endpoint/mod.rs", "rank": 48, "score": 162227.53631825742 }, { "content": "pub trait Error: std::fmt::Debug {\n\n fn is_retryable(&self) -> bool;\n\n fn is_still_valid(&self) -> bool;\n\n fn error_msg(&self) -> Option<String>;\n\n}\n", "file_path": "bee-storage/bee-storage/src/access/mod.rs", "rank": 49, "score": 161969.09400564973 }, { "content": "fn inner_increment(prestate: &mut PowCurlState) -> Exhausted {\n\n // we have not exhausted the search space until each add\n\n // operation produces a carry\n\n for i in INNER_INCR_START..HASH_LEN {\n\n if {\n\n let with_carry = prestate.bit_add(i);\n\n !with_carry\n\n } {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "bee-pow/src/pearldiver.rs", "rank": 50, "score": 160463.67054726896 }, { "content": "/// The read state of the message handler.\n\n///\n\n/// This type is used by `MessageHandler` to decide what should be read next when handling an\n\n/// event.\n\nenum ReadState {\n\n /// `MessageHandler` should read a header.\n\n Header,\n\n /// `MessageHandler` should read a payload based on a header.\n\n Payload(Header),\n\n}\n\n\n\n/// A message handler.\n\n///\n\n/// It takes care of processing events into messages that can be processed by the workers.\n\npub(super) struct MessageHandler {\n\n events: EventHandler,\n\n // FIXME: see if we can implement `Stream` for the `MessageHandler` and use the\n\n // `ShutdownStream` type instead.\n\n shutdown: ShutdownRecv,\n\n state: ReadState,\n\n /// The address of the peer. This field is only here for logging purposes.\n\n address: SocketAddr,\n\n}\n\n\n", "file_path": "bee-protocol/src/worker/peer/message_handler.rs", "rank": 51, "score": 156445.80737210688 }, { "content": "type ShutdownRecv = future::Fuse<oneshot::Receiver<()>>;\n\n\n", "file_path": "bee-protocol/src/worker/peer/message_handler.rs", "rank": 52, "score": 139503.469239728 }, { "content": " pub(crate) seen_milestones: HashMap<Hash, u32>,\n\n}\n\n\n\nimpl LocalSnapshotMetadata {\n\n pub fn hash(&self) -> &Hash {\n\n &self.inner.hash\n\n }\n\n\n\n pub fn index(&self) -> u32 {\n\n self.inner.snapshot_index\n\n }\n\n\n\n pub fn timestamp(&self) -> u64 {\n\n self.inner.timestamp\n\n }\n\n\n\n pub fn solid_entry_points(&self) -> &HashMap<Hash, u32> {\n\n &self.solid_entry_points\n\n }\n\n\n\n pub fn seen_milestones(&self) -> &HashMap<Hash, u32> {\n\n &self.seen_milestones\n\n }\n\n}\n", "file_path": "bee-snapshot/src/local/metadata.rs", "rank": 53, "score": 136536.86937491482 }, { "content": " Ok(LocalSnapshot {\n\n metadata: LocalSnapshotMetadata {\n\n inner: SnapshotMetadata {\n\n coordinator: Hash::zeros(),\n\n hash,\n\n snapshot_index: index,\n\n entry_point_index: index,\n\n pruning_index: index,\n\n timestamp,\n\n },\n\n solid_entry_points,\n\n seen_milestones,\n\n },\n\n state,\n\n })\n\n }\n\n\n\n pub fn to_file(&self, path: &str) -> Result<(), Error> {\n\n let mut writer = BufWriter::new(\n\n OpenOptions::new()\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 54, "score": 136517.70297923102 }, { "content": "// Copyright 2020 IOTA Stiftung\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with\n\n// the License. You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on\n\n// an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and limitations under the License.\n\n\n\nuse crate::metadata::SnapshotMetadata;\n\n\n\nuse bee_crypto::ternary::Hash;\n\n\n\nuse std::collections::HashMap;\n\n\n\npub struct LocalSnapshotMetadata {\n\n pub(crate) inner: SnapshotMetadata,\n\n pub(crate) solid_entry_points: HashMap<Hash, u32>,\n", "file_path": "bee-snapshot/src/local/metadata.rs", "rank": 55, "score": 136511.55446669404 }, { "content": " io::{prelude::*, BufReader},\n\n};\n\n\n\n#[derive(Eq, PartialEq, Debug)]\n\npub enum Error {\n\n FileNotFound,\n\n FailedIO,\n\n MissingSemicolon,\n\n ExtraneousSemicolon,\n\n InvalidAddressTryte,\n\n InvalidAddressLength,\n\n DuplicateAddress,\n\n InvalidBalance,\n\n NullBalance,\n\n InvalidSupply,\n\n DifferentNewline,\n\n}\n\n\n\nimpl GlobalSnapshot {\n\n pub fn from_file(path: &str, index: MilestoneIndex) -> Result<Self, Error> {\n", "file_path": "bee-snapshot/src/global/file.rs", "rank": 56, "score": 136508.68498138347 }, { "content": "// Copyright 2020 IOTA Stiftung\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with\n\n// the License. You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on\n\n// an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and limitations under the License.\n\n\n\nuse crate::{constants::IOTA_SUPPLY, global::GlobalSnapshot};\n\n\n\nuse bee_ledger::state::LedgerState;\n\nuse bee_protocol::MilestoneIndex;\n\nuse bee_ternary::{T1B1Buf, TryteBuf};\n\nuse bee_transaction::bundled::{Address, BundledTransactionField};\n\n\n\nuse std::{\n\n fs::File,\n", "file_path": "bee-snapshot/src/global/file.rs", "rank": 57, "score": 136500.06361617346 }, { "content": " };\n\n\n\n let balance = tokens[1].parse::<u64>().map_err(|_| Error::InvalidBalance)?;\n\n\n\n if balance == 0 {\n\n return Err(Error::NullBalance);\n\n }\n\n\n\n if state.insert(address, balance).is_some() {\n\n return Err(Error::DuplicateAddress);\n\n }\n\n\n\n supply += balance;\n\n }\n\n\n\n if supply != IOTA_SUPPLY {\n\n return Err(Error::InvalidSupply);\n\n }\n\n\n\n Ok(Self { state, index })\n\n }\n\n}\n", "file_path": "bee-snapshot/src/global/file.rs", "rank": 58, "score": 136497.3411311785 }, { "content": " let file = File::open(path).map_err(|_| Error::FileNotFound)?;\n\n let reader = BufReader::new(file);\n\n\n\n let mut state = LedgerState::new();\n\n let mut supply = 0;\n\n\n\n for line in reader.lines() {\n\n let line = line.map_err(|_| Error::FailedIO)?;\n\n let tokens = line.split(\";\").collect::<Vec<&str>>();\n\n\n\n if tokens.len() < 2 {\n\n return Err(Error::MissingSemicolon);\n\n } else if tokens.len() > 2 {\n\n return Err(Error::ExtraneousSemicolon);\n\n }\n\n\n\n let address = match TryteBuf::try_from_str(tokens[0]) {\n\n Ok(trytes) => Address::try_from_inner(trytes.as_trits().encode::<T1B1Buf>())\n\n .map_err(|_| Error::InvalidAddressLength)?,\n\n Err(_) => return Err(Error::InvalidAddressTryte),\n", "file_path": "bee-snapshot/src/global/file.rs", "rank": 59, "score": 136494.81550760753 }, { "content": "\n\n if let Err(e) = writer.write_all(&mut (self.metadata.seen_milestones.len() as u32).to_le_bytes()) {\n\n return Err(Error::IOError(e));\n\n }\n\n\n\n // Number of balances\n\n\n\n if let Err(e) = writer.write_all(&mut (self.state.len() as u32).to_le_bytes()) {\n\n return Err(Error::IOError(e));\n\n }\n\n\n\n // Number of spent addresses\n\n\n\n if let Err(e) = writer.write_all(&mut 0u32.to_le_bytes()) {\n\n return Err(Error::IOError(e));\n\n }\n\n\n\n // Solid entry points\n\n\n\n for (hash, index) in self.metadata.solid_entry_points.iter() {\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 60, "score": 136493.74511554962 }, { "content": "\n\n // Milestone index\n\n\n\n if let Err(e) = writer.write_all(&mut self.metadata.inner.snapshot_index.to_le_bytes()) {\n\n return Err(Error::IOError(e));\n\n }\n\n\n\n // Timestamp\n\n\n\n if let Err(e) = writer.write_all(&mut self.metadata.inner.timestamp.to_le_bytes()) {\n\n return Err(Error::IOError(e));\n\n }\n\n\n\n // Number of solid entry points\n\n\n\n if let Err(e) = writer.write_all(&mut (self.metadata.solid_entry_points.len() as u32).to_le_bytes()) {\n\n return Err(Error::IOError(e));\n\n }\n\n\n\n // Number of seen milestones\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 61, "score": 136493.54769125103 }, { "content": " InvalidSeenMilestoneHash,\n\n InvalidAddress,\n\n InvalidSupply(u64, u64),\n\n}\n\nimpl LocalSnapshot {\n\n pub fn from_file(path: &str) -> Result<LocalSnapshot, Error> {\n\n let mut reader = BufReader::new(OpenOptions::new().read(true).open(path).map_err(Error::IOError)?);\n\n\n\n // Version byte\n\n\n\n let mut buf = [0u8];\n\n let version = match reader.read_exact(&mut buf) {\n\n Ok(_) => buf[0],\n\n Err(e) => return Err(Error::IOError(e)),\n\n };\n\n\n\n if version != VERSION {\n\n return Err(Error::InvalidVersion(version, VERSION));\n\n }\n\n\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 62, "score": 136486.90761232594 }, { "content": " let mut buf = [0u8; std::mem::size_of::<u32>()];\n\n let solid_entry_points_num = match reader.read_exact(&mut buf) {\n\n Ok(_) => u32::from_le_bytes(buf),\n\n Err(e) => return Err(Error::IOError(e)),\n\n };\n\n\n\n debug!(\"Solid entry points: {}.\", solid_entry_points_num);\n\n\n\n // Number of seen milestones\n\n\n\n let mut buf = [0u8; std::mem::size_of::<u32>()];\n\n let seen_milestones_num = match reader.read_exact(&mut buf) {\n\n Ok(_) => u32::from_le_bytes(buf),\n\n Err(e) => return Err(Error::IOError(e)),\n\n };\n\n\n\n debug!(\"Seen milestones: {}.\", seen_milestones_num);\n\n\n\n // Number of balances\n\n\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 63, "score": 136485.22171738185 }, { "content": " let mut buf_hash = [0u8; 49];\n\n let mut buf_index = [0u8; std::mem::size_of::<u32>()];\n\n let mut solid_entry_points = HashMap::with_capacity(solid_entry_points_num as usize);\n\n for _ in 0..solid_entry_points_num {\n\n let hash = match reader.read_exact(&mut buf_hash) {\n\n Ok(_) => match Trits::<T5B1>::try_from_raw(cast_slice(&buf_hash), HASH_LENGTH) {\n\n Ok(trits) => {\n\n Hash::try_from_inner(trits.encode::<T1B1Buf>()).map_err(|_| Error::InvalidSolidEntryPointHash)\n\n }\n\n Err(_) => Err(Error::InvalidSolidEntryPointHash),\n\n },\n\n Err(e) => Err(Error::IOError(e)),\n\n }?;\n\n let index = match reader.read_exact(&mut buf_index) {\n\n Ok(_) => u32::from_le_bytes(buf_index),\n\n Err(e) => return Err(Error::IOError(e)),\n\n };\n\n solid_entry_points.insert(hash, index);\n\n }\n\n\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 64, "score": 136483.3599517023 }, { "content": " let mut buf = [0u8; std::mem::size_of::<u32>()];\n\n let index = match reader.read_exact(&mut buf) {\n\n Ok(_) => u32::from_le_bytes(buf),\n\n Err(e) => return Err(Error::IOError(e)),\n\n };\n\n\n\n debug!(\"Index: {}.\", index);\n\n\n\n // Timestamp\n\n\n\n let mut buf = [0u8; std::mem::size_of::<u64>()];\n\n let timestamp = match reader.read_exact(&mut buf) {\n\n Ok(_) => u64::from_le_bytes(buf),\n\n Err(e) => return Err(Error::IOError(e)),\n\n };\n\n\n\n debug!(\"Timestamp: {}.\", timestamp);\n\n\n\n // Number of solid entry points\n\n\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 65, "score": 136482.50872203492 }, { "content": " // Seen milestones\n\n\n\n let mut buf_hash = [0u8; 49];\n\n let mut buf_index = [0u8; std::mem::size_of::<u32>()];\n\n let mut seen_milestones = HashMap::with_capacity(seen_milestones_num as usize);\n\n for _ in 0..seen_milestones_num {\n\n let seen_milestone = match reader.read_exact(&mut buf_hash) {\n\n Ok(_) => match Trits::<T5B1>::try_from_raw(cast_slice(&buf_hash), HASH_LENGTH) {\n\n Ok(trits) => {\n\n Hash::try_from_inner(trits.encode::<T1B1Buf>()).map_err(|_| Error::InvalidSeenMilestoneHash)\n\n }\n\n Err(_) => Err(Error::InvalidSeenMilestoneHash),\n\n },\n\n Err(e) => Err(Error::IOError(e)),\n\n }?;\n\n let index = match reader.read_exact(&mut buf_index) {\n\n Ok(_) => u32::from_le_bytes(buf_index),\n\n Err(e) => return Err(Error::IOError(e)),\n\n };\n\n seen_milestones.insert(seen_milestone, index);\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 66, "score": 136478.05772133896 }, { "content": " if let Err(e) = writer.write_all(&mut cast_slice(hash.as_trits().encode::<T5B1Buf>().as_i8_slice())) {\n\n return Err(Error::IOError(e));\n\n }\n\n if let Err(e) = writer.write_all(&mut index.to_le_bytes()) {\n\n return Err(Error::IOError(e));\n\n }\n\n }\n\n\n\n // Seen milestones\n\n\n\n for (hash, index) in self.metadata.seen_milestones.iter() {\n\n if let Err(e) = writer.write_all(&mut cast_slice(hash.as_trits().encode::<T5B1Buf>().as_i8_slice())) {\n\n return Err(Error::IOError(e));\n\n }\n\n if let Err(e) = writer.write_all(&mut index.to_le_bytes()) {\n\n return Err(Error::IOError(e));\n\n }\n\n }\n\n\n\n // Balances\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 67, "score": 136477.79145392566 }, { "content": "use bee_transaction::bundled::{Address, BundledTransactionField};\n\n\n\nuse bytemuck::cast_slice;\n\nuse log::debug;\n\n\n\nuse std::{\n\n collections::HashMap,\n\n fs::OpenOptions,\n\n io::{BufReader, BufWriter, Read, Write},\n\n};\n\n\n\nconst VERSION: u8 = 4;\n\n\n\n// TODO detail errors\n\n#[derive(Debug)]\n\npub enum Error {\n\n IOError(std::io::Error),\n\n InvalidVersion(u8, u8),\n\n InvalidMilestoneHash,\n\n InvalidSolidEntryPointHash,\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 68, "score": 136474.58280908313 }, { "content": " }\n\n\n\n // Balances\n\n\n\n let mut buf_address = [0u8; 49];\n\n let mut buf_value = [0u8; std::mem::size_of::<u64>()];\n\n let mut state = LedgerState::with_capacity(balances_num as usize);\n\n let mut supply: u64 = 0;\n\n for i in 0..balances_num {\n\n let address = match reader.read_exact(&mut buf_address) {\n\n Ok(_) => match Trits::<T5B1>::try_from_raw(cast_slice(&buf_address), HASH_LENGTH) {\n\n Ok(trits) => Address::try_from_inner(trits.encode::<T1B1Buf>()).map_err(|_| Error::InvalidAddress),\n\n Err(_) => Err(Error::InvalidAddress),\n\n },\n\n Err(e) => Err(Error::IOError(e)),\n\n }?;\n\n let value = match reader.read_exact(&mut buf_value) {\n\n Ok(_) => u64::from_le_bytes(buf_value),\n\n Err(e) => return Err(Error::IOError(e)),\n\n };\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 69, "score": 136473.46871326113 }, { "content": " let mut buf = [0u8; std::mem::size_of::<u32>()];\n\n let balances_num = match reader.read_exact(&mut buf) {\n\n Ok(_) => u32::from_le_bytes(buf),\n\n Err(e) => return Err(Error::IOError(e)),\n\n };\n\n\n\n debug!(\"Balances: {}.\", balances_num);\n\n\n\n // Number of spent addresses\n\n\n\n let mut buf = [0u8; std::mem::size_of::<u32>()];\n\n let spent_addresses_num = match reader.read_exact(&mut buf) {\n\n Ok(_) => u32::from_le_bytes(buf),\n\n Err(e) => return Err(Error::IOError(e)),\n\n };\n\n\n\n debug!(\"Spent addresses: {}.\", spent_addresses_num);\n\n\n\n // Solid entry points\n\n\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 70, "score": 136471.3020247163 }, { "content": " debug!(\"Version: {}.\", version);\n\n\n\n // Milestone hash\n\n\n\n let mut buf = [0u8; 49];\n\n let hash = match reader.read_exact(&mut buf) {\n\n Ok(_) => match Trits::<T5B1>::try_from_raw(cast_slice(&buf), HASH_LENGTH) {\n\n Ok(trits) => Hash::try_from_inner(trits.encode::<T1B1Buf>()).map_err(|_| Error::InvalidMilestoneHash),\n\n Err(_) => Err(Error::InvalidMilestoneHash),\n\n },\n\n Err(e) => Err(Error::IOError(e)),\n\n }?;\n\n\n\n debug!(\n\n \"Hash: {}.\",\n\n hash.iter_trytes().map(|trit| char::from(trit)).collect::<String>()\n\n );\n\n\n\n // Milestone index\n\n\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 71, "score": 136468.99393957085 }, { "content": "// Copyright 2020 IOTA Stiftung\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with\n\n// the License. You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on\n\n// an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and limitations under the License.\n\n\n\nuse crate::{\n\n constants::IOTA_SUPPLY,\n\n local::{LocalSnapshot, LocalSnapshotMetadata},\n\n metadata::SnapshotMetadata,\n\n};\n\n\n\nuse bee_crypto::ternary::{Hash, HASH_LENGTH};\n\nuse bee_ledger::state::LedgerState;\n\nuse bee_ternary::{T1B1Buf, T5B1Buf, Trits, T5B1};\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 72, "score": 136468.71505254228 }, { "content": "\n\n for (address, balance) in self.state.iter() {\n\n if let Err(e) = writer.write_all(&mut cast_slice(address.to_inner().encode::<T5B1Buf>().as_i8_slice())) {\n\n return Err(Error::IOError(e));\n\n }\n\n if let Err(e) = writer.write_all(&mut balance.to_le_bytes()) {\n\n return Err(Error::IOError(e));\n\n }\n\n }\n\n\n\n // TODO hash ?\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 73, "score": 136463.45605368202 }, { "content": " .write(true)\n\n .truncate(true)\n\n .create(true)\n\n .open(path)\n\n .map_err(Error::IOError)?,\n\n );\n\n\n\n // Version byte\n\n\n\n if let Err(e) = writer.write_all(&mut [VERSION]) {\n\n return Err(Error::IOError(e));\n\n };\n\n\n\n // Milestone hash\n\n\n\n if let Err(e) = writer.write_all(&mut cast_slice(\n\n self.metadata.inner.hash.to_inner().encode::<T5B1Buf>().as_i8_slice(),\n\n )) {\n\n return Err(Error::IOError(e));\n\n }\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 74, "score": 136462.7501668912 }, { "content": "\n\n if i % 50_000 == 0 && i != 0 {\n\n debug!(\n\n \"Read {}/{} ({:.0}%) balances.\",\n\n i,\n\n balances_num,\n\n ((i * 100) as f64) / (balances_num as f64)\n\n );\n\n }\n\n\n\n state.insert(address, value);\n\n supply += value;\n\n }\n\n\n\n if supply != IOTA_SUPPLY {\n\n return Err(Error::InvalidSupply(supply, IOTA_SUPPLY));\n\n }\n\n\n\n // TODO hash ?\n\n\n", "file_path": "bee-snapshot/src/local/file.rs", "rank": 75, "score": 136457.91402532387 }, { "content": "}\n\n\n\nimpl PruningConfigBuilder {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn enabled(mut self, enabled: bool) -> Self {\n\n self.enabled.replace(enabled);\n\n self\n\n }\n\n\n\n pub fn delay(mut self, delay: u32) -> Self {\n\n self.delay.replace(delay);\n\n self\n\n }\n\n\n\n pub fn finish(self) -> PruningConfig {\n\n PruningConfig {\n\n enabled: self.enabled.unwrap_or(DEFAULT_ENABLED),\n", "file_path": "bee-snapshot/src/pruning/config.rs", "rank": 76, "score": 136367.47176116557 }, { "content": " delay: self.delay.unwrap_or(DEFAULT_DELAY),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct PruningConfig {\n\n enabled: bool,\n\n delay: u32,\n\n}\n\n\n\nimpl PruningConfig {\n\n pub fn build() -> PruningConfigBuilder {\n\n PruningConfigBuilder::new()\n\n }\n\n\n\n pub fn enabled(&self) -> bool {\n\n self.enabled\n\n }\n\n\n\n pub fn delay(&self) -> u32 {\n\n self.delay\n\n }\n\n}\n", "file_path": "bee-snapshot/src/pruning/config.rs", "rank": 77, "score": 136363.01911931677 }, { "content": "// Copyright 2020 IOTA Stiftung\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with\n\n// the License. You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on\n\n// an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and limitations under the License.\n\n\n\nuse serde::Deserialize;\n\n\n\nconst DEFAULT_ENABLED: bool = true;\n\nconst DEFAULT_DELAY: u32 = 60480;\n\n\n\n#[derive(Default, Deserialize)]\n\npub struct PruningConfigBuilder {\n\n enabled: Option<bool>,\n\n delay: Option<u32>,\n", "file_path": "bee-snapshot/src/pruning/config.rs", "rank": 78, "score": 136360.92527809937 }, { "content": "}\n\n\n\nimpl GlobalSnapshotConfigBuilder {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn path(mut self, path: String) -> Self {\n\n self.path.replace(path);\n\n self\n\n }\n\n\n\n pub fn index(mut self, index: u32) -> Self {\n\n self.index.replace(index);\n\n self\n\n }\n\n\n\n pub fn finish(self) -> GlobalSnapshotConfig {\n\n GlobalSnapshotConfig {\n\n path: self.path.unwrap_or_else(|| DEFAULT_PATH.to_string()),\n", "file_path": "bee-snapshot/src/global/config.rs", "rank": 79, "score": 136313.8290842845 }, { "content": " index: self.index.unwrap_or(DEFAULT_INDEX),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct GlobalSnapshotConfig {\n\n path: String,\n\n index: u32,\n\n}\n\n\n\nimpl GlobalSnapshotConfig {\n\n pub fn build() -> GlobalSnapshotConfigBuilder {\n\n GlobalSnapshotConfigBuilder::new()\n\n }\n\n\n\n pub fn path(&self) -> &String {\n\n &self.path\n\n }\n\n\n\n pub fn index(&self) -> &u32 {\n\n &self.index\n\n }\n\n}\n", "file_path": "bee-snapshot/src/global/config.rs", "rank": 80, "score": 136310.09536599397 }, { "content": "// Copyright 2020 IOTA Stiftung\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with\n\n// the License. You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on\n\n// an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and limitations under the License.\n\n\n\nuse serde::Deserialize;\n\n\n\nconst DEFAULT_PATH: &str = \"./snapshots/mainnet/snapshot.txt\";\n\nconst DEFAULT_INDEX: u32 = 1050000;\n\n\n\n#[derive(Default, Deserialize)]\n\npub struct GlobalSnapshotConfigBuilder {\n\n path: Option<String>,\n\n index: Option<u32>,\n", "file_path": "bee-snapshot/src/global/config.rs", "rank": 81, "score": 136305.80668848328 }, { "content": " self\n\n }\n\n\n\n pub fn depth(mut self, depth: u32) -> Self {\n\n self.depth.replace(depth);\n\n self\n\n }\n\n\n\n pub fn interval_synced(mut self, interval_synced: u32) -> Self {\n\n self.interval_synced.replace(interval_synced);\n\n self\n\n }\n\n\n\n pub fn interval_unsynced(mut self, interval_unsynced: u32) -> Self {\n\n self.interval_unsynced.replace(interval_unsynced);\n\n self\n\n }\n\n\n\n pub fn finish(self) -> LocalSnapshotConfig {\n\n LocalSnapshotConfig {\n", "file_path": "bee-snapshot/src/local/config.rs", "rank": 82, "score": 136274.5698880723 }, { "content": "pub struct LocalSnapshotConfigBuilder {\n\n path: Option<String>,\n\n download_urls: Option<Vec<String>>,\n\n depth: Option<u32>,\n\n interval_synced: Option<u32>,\n\n interval_unsynced: Option<u32>,\n\n}\n\n\n\nimpl LocalSnapshotConfigBuilder {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn path(mut self, path: String) -> Self {\n\n self.path.replace(path);\n\n self\n\n }\n\n\n\n pub fn download_urls(mut self, download_urls: Vec<String>) -> Self {\n\n self.download_urls.replace(download_urls);\n", "file_path": "bee-snapshot/src/local/config.rs", "rank": 83, "score": 136273.24454177724 }, { "content": " path: self.path.unwrap_or_else(|| DEFAULT_PATH.to_string()),\n\n download_urls: self.download_urls.unwrap_or_else(|| DEFAULT_DOWNLOAD_URLS),\n\n depth: self.depth.unwrap_or(DEFAULT_DEPTH),\n\n interval_synced: self.interval_synced.unwrap_or(DEFAULT_INTERVAL_SYNCED),\n\n interval_unsynced: self.interval_unsynced.unwrap_or(DEFAULT_INTERVAL_UNSYNCED),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct LocalSnapshotConfig {\n\n path: String,\n\n download_urls: Vec<String>,\n\n depth: u32,\n\n interval_synced: u32,\n\n interval_unsynced: u32,\n\n}\n\n\n\nimpl LocalSnapshotConfig {\n\n pub fn build() -> LocalSnapshotConfigBuilder {\n", "file_path": "bee-snapshot/src/local/config.rs", "rank": 84, "score": 136268.75137072324 }, { "content": " LocalSnapshotConfigBuilder::new()\n\n }\n\n\n\n pub fn path(&self) -> &String {\n\n &self.path\n\n }\n\n\n\n pub fn download_urls(&self) -> &Vec<String> {\n\n &self.download_urls\n\n }\n\n\n\n pub fn depth(&self) -> u32 {\n\n self.depth\n\n }\n\n\n\n pub fn interval_synced(&self) -> u32 {\n\n self.interval_synced\n\n }\n\n\n\n pub fn interval_unsynced(&self) -> u32 {\n\n self.interval_unsynced\n\n }\n\n}\n", "file_path": "bee-snapshot/src/local/config.rs", "rank": 85, "score": 136267.10085579858 }, { "content": "// Copyright 2020 IOTA Stiftung\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with\n\n// the License. You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on\n\n// an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and limitations under the License.\n\n\n\nuse serde::Deserialize;\n\n\n\nconst DEFAULT_PATH: &str = \"./snapshots/mainnet/export.bin\";\n\nconst DEFAULT_DOWNLOAD_URLS: Vec<String> = Vec::new();\n\nconst DEFAULT_DEPTH: u32 = 50;\n\nconst DEFAULT_INTERVAL_SYNCED: u32 = 50;\n\nconst DEFAULT_INTERVAL_UNSYNCED: u32 = 1000;\n\n\n\n#[derive(Default, Deserialize)]\n", "file_path": "bee-snapshot/src/local/config.rs", "rank": 86, "score": 136256.19686304513 }, { "content": "#[async_trait]\n\npub trait Worker<N: Node>: Any {\n\n const DEPS: &'static [TypeId] = &[];\n\n\n\n type Config;\n\n type Error;\n\n type Event;\n\n type Receiver: Stream<Item = Self::Event>;\n\n\n\n async fn start(self, receiver: Self::Receiver, node: Arc<N>, config: Self::Config) -> Result<(), Self::Error>;\n\n async fn stop(self) -> Result<(), Self::Error>\n\n where\n\n Self: Sized,\n\n {\n\n Ok(())\n\n }\n\n}\n", "file_path": "bee-common-ext/src/worker.rs", "rank": 87, "score": 136092.50540261448 }, { "content": " MilestoneIndex,\n\n};\n\nuse bee_tangle::traversal;\n\n\n\nuse dashmap::DashMap;\n\n\n\nuse log::{info, warn};\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n MilestoneNotFoundInTangle(u32),\n\n MetadataNotFound(Hash),\n\n}\n\n\n\n/// Checks whether any direct approver of the given transaction was confirmed by a\n\n/// milestone which is above the target milestone.\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 88, "score": 135866.4684987237 }, { "content": " tangle().update_entry_point_index(target_index);\n\n\n\n prune_unconfirmed_transactions(&tangle().get_pruning_index());\n\n\n\n // Iterate through all milestones that have to be pruned.\n\n for milestone_index in *tangle().get_pruning_index() + 1..*target_index + 1 {\n\n info!(\"Pruning milestone {}...\", milestone_index);\n\n\n\n // TODO calculate the deleted tx count and visited tx count if needed\n\n let pruned_unconfirmed_transactions_count = prune_unconfirmed_transactions(&MilestoneIndex(milestone_index));\n\n\n\n // NOTE Actually we don't really need the tail, and only need one of the milestone tx.\n\n // In gohornet, we start from the tail milestone tx.\n\n let milestone_hash;\n\n if let Some(hash) = tangle().get_milestone_hash(MilestoneIndex(milestone_index)) {\n\n milestone_hash = hash;\n\n } else {\n\n warn!(\"Pruning milestone {} failed! Milestone not found!\", milestone_index);\n\n continue;\n\n }\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 89, "score": 135863.3108811231 }, { "content": "// Copyright 2020 IOTA Stiftung\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with\n\n// the License. You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on\n\n// an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and limitations under the License.\n\n\n\nmod config;\n\n\n\npub use config::{PruningConfig, PruningConfigBuilder};\n\n\n\nuse crate::constants::{ADDITIONAL_PRUNING_THRESHOLD, SOLID_ENTRY_POINT_CHECK_THRESHOLD_PAST};\n\n\n\nuse bee_crypto::ternary::Hash;\n\nuse bee_protocol::{\n\n tangle::{helper, tangle},\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 90, "score": 135862.533819593 }, { "content": "\n\n let mut transactions_to_prune: Vec<Hash> = Vec::new();\n\n\n\n // Traverse the approvees of the milestone transaction.\n\n traversal::visit_parents_depth_first(\n\n tangle(),\n\n milestone_hash,\n\n |_, _, _| {\n\n // NOTE everything that was referenced by that milestone can be pruned\n\n // (even transactions of older milestones)\n\n true\n\n },\n\n |hash, _, _| transactions_to_prune.push(hash.clone()),\n\n |_, _, _| {},\n\n |_| {},\n\n );\n\n\n\n // NOTE The metadata of solid entry points can be deleted from the database,\n\n // because we only need the hashes of them, and don't have to trace their parents.\n\n let transactions_to_prune_count = transactions_to_prune.len();\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 91, "score": 135862.5224610928 }, { "content": " // Find all tails.\n\n helper::on_all_tails(tangle(), *hash, |hash, _tx, metadata| {\n\n solid_entry_points.insert(hash.clone(), metadata.milestone_index());\n\n });\n\n }\n\n },\n\n |_, _, _| {},\n\n |_| {},\n\n );\n\n }\n\n Ok(solid_entry_points)\n\n}\n\n\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 92, "score": 135862.0948284547 }, { "content": " let pruned_transactions_count = prune_transactions(transactions_to_prune);\n\n\n\n prune_milestone(MilestoneIndex(milestone_index));\n\n\n\n tangle().update_pruning_index(MilestoneIndex(milestone_index));\n\n info!(\n\n \"Pruning milestone {}. Pruned {}/{} confirmed transactions. Pruned {} unconfirmed transactions.\",\n\n milestone_index,\n\n pruned_transactions_count,\n\n transactions_to_prune_count,\n\n pruned_unconfirmed_transactions_count\n\n );\n\n // TODO trigger pruning milestone index changed event if needed.\n\n // notify peers about our new pruning milestone index by\n\n // broadcast_heartbeat()\n\n }\n\n Ok(())\n\n}\n", "file_path": "bee-snapshot/src/pruning/mod.rs", "rank": 93, "score": 135851.76743979537 }, { "content": "// Copyright 2020 IOTA Stiftung\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with\n\n// the License. You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on\n\n// an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and limitations under the License.\n\n\n\nmod config;\n\nmod file;\n\n\n\npub use config::{GlobalSnapshotConfig, GlobalSnapshotConfigBuilder};\n\npub use file::Error as FileError;\n\n\n\nuse bee_ledger::state::LedgerState;\n\nuse bee_protocol::MilestoneIndex;\n\n\n", "file_path": "bee-snapshot/src/global/mod.rs", "rank": 94, "score": 135809.92289558132 }, { "content": " hash: Hash::zeros(),\n\n snapshot_index: index,\n\n entry_point_index: index,\n\n pruning_index: index,\n\n timestamp: 0,\n\n },\n\n solid_entry_points: HashMap::new(),\n\n seen_milestones: HashMap::new(),\n\n },\n\n state: LedgerState::new(),\n\n };\n\n\n\n let file = path.to_string() + \"_tmp\";\n\n\n\n if let Err(e) = ls.to_file(&file) {\n\n error!(\"Failed to write local snapshot to file {}: {:?}.\", file, e);\n\n }\n\n\n\n info!(\"Created local snapshot at index {}.\", index);\n\n\n\n Ok(())\n\n}\n", "file_path": "bee-snapshot/src/local/mod.rs", "rank": 95, "score": 135797.77018897654 }, { "content": "pub struct GlobalSnapshot {\n\n index: MilestoneIndex,\n\n state: LedgerState,\n\n}\n\n\n\nimpl GlobalSnapshot {\n\n pub fn index(&self) -> &MilestoneIndex {\n\n &self.index\n\n }\n\n\n\n pub fn state(&self) -> &LedgerState {\n\n &self.state\n\n }\n\n\n\n pub fn into_state(self) -> LedgerState {\n\n self.state\n\n }\n\n}\n", "file_path": "bee-snapshot/src/global/mod.rs", "rank": 96, "score": 135794.86814206312 }, { "content": "pub use metadata::LocalSnapshotMetadata;\n\n\n\nuse crate::metadata::SnapshotMetadata;\n\n\n\nuse bee_crypto::ternary::Hash;\n\nuse bee_ledger::state::LedgerState;\n\n\n\nuse log::{error, info};\n\n\n\nuse std::collections::HashMap;\n\n\n\npub struct LocalSnapshot {\n\n pub(crate) metadata: LocalSnapshotMetadata,\n\n pub(crate) state: LedgerState,\n\n}\n\n\n\nimpl LocalSnapshot {\n\n pub fn metadata(&self) -> &LocalSnapshotMetadata {\n\n &self.metadata\n\n }\n", "file_path": "bee-snapshot/src/local/mod.rs", "rank": 97, "score": 135776.26914562285 }, { "content": "\n\n pub fn state(&self) -> &LedgerState {\n\n &self.state\n\n }\n\n\n\n pub fn into_state(self) -> LedgerState {\n\n self.state\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub(crate) enum Error {}\n\n\n\npub(crate) fn snapshot(path: &str, index: u32) -> Result<(), Error> {\n\n info!(\"Creating local snapshot at index {}...\", index);\n\n\n\n let ls = LocalSnapshot {\n\n metadata: LocalSnapshotMetadata {\n\n inner: SnapshotMetadata {\n\n coordinator: Hash::zeros(),\n", "file_path": "bee-snapshot/src/local/mod.rs", "rank": 98, "score": 135775.73362697876 }, { "content": "// Copyright 2020 IOTA Stiftung\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\"); you may not use this file except in compliance with\n\n// the License. You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on\n\n// an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and limitations under the License.\n\n\n\nmod config;\n\nmod download;\n\nmod file;\n\nmod metadata;\n\n\n\npub(crate) use download::{download_local_snapshot, Error as DownloadError};\n\n\n\npub use config::{LocalSnapshotConfig, LocalSnapshotConfigBuilder};\n\npub use file::Error as FileError;\n", "file_path": "bee-snapshot/src/local/mod.rs", "rank": 99, "score": 135772.03097476877 } ]
Rust
xtask/src/main.rs
dureuill/yew-sse-example
c1bbc94bbcbd91cb7a4b7ab3a3361514983ff1bf
use std::ops::Index; use cargo_metadata::camino::Utf8PathBuf; use cargo_metadata::Metadata; use clap::crate_version; use clap::Clap; use color_eyre::eyre::Context; use color_eyre::eyre::ContextCompat; use color_eyre::eyre::Result; use color_eyre::owo_colors::colors::xterm::UserGreen; use color_eyre::owo_colors::colors::xterm::UserYellow; use color_eyre::owo_colors::OwoColorize; use color_eyre::Help; #[derive(Clap)] #[clap(version=crate_version!())] struct Opts { #[clap(subcommand)] cmd: SubCommand, } #[derive(Clap)] enum SubCommand { Dist(Dist), Run(Run), Install(Install), } #[derive(Clap)] struct Dist { #[clap(long)] release: bool, } #[derive(Clap)] struct Run { #[clap(long)] release: bool, } impl Run { fn to_dist(&self) -> Dist { Dist { release: self.release, } } } #[derive(Clap)] struct Install { #[clap(short, long)] output: Option<Utf8PathBuf>, } impl Install { fn to_dist(&self) -> Dist { Dist { release: true, } } } fn main() -> Result<()> { color_eyre::install()?; let opts: Opts = Opts::parse(); match opts.cmd { SubCommand::Dist(config) => dist(config)?, SubCommand::Run(config) => run(config)?, SubCommand::Install(config) => install(config)?, } Ok(()) } fn dist_path(metadata: &Metadata, is_release: bool) -> Utf8PathBuf { metadata .target_directory .join("static/dist") .join(if is_release { "release" } else { "debug" }) } fn backend_path(metadata: &Metadata, is_release: bool) -> Utf8PathBuf { metadata .target_directory .join(if is_release { "release" } else { "debug" }) .join("backend") } fn output_default_path(metadata: &Metadata) -> Utf8PathBuf { metadata.workspace_root.join("output") } fn dist(config: Dist) -> Result<()> { let cmd = cargo_metadata::MetadataCommand::new(); let metadata = cmd.exec()?; let frontend = metadata .workspace_members .iter() .find(|pkg| metadata[pkg].name == "frontend") .wrap_err("Could not find package 'frontend'")?; let html_path = metadata .index(frontend) .manifest_path .with_file_name("index.html"); let dist_path = dist_path(&metadata, config.release); println!( "- Distributing frontend in {}", dist_path.bold().fg::<UserGreen>() ); std::fs::create_dir_all(&dist_path).wrap_err("Could not write to the target directory")?; let trunk_version = duct::cmd!("trunk", "--version") .read() .wrap_err("Could not find `trunk`") .note("`trunk` is required for the build") .suggestion("Install `trunk` with `cargo install trunk`")?; println!("- Using {}", trunk_version.bold().fg::<UserGreen>()); let release = if config.release { Some("--release") } else { None }; let args = IntoIterator::into_iter(["build", "--dist", dist_path.as_str(), html_path.as_str()]) .chain(release); duct::cmd("trunk", args).run()?; Ok(()) } fn run(config: Run) -> Result<()> { dist(config.to_dist())?; let cmd = cargo_metadata::MetadataCommand::new(); let metadata = cmd.exec()?; let dist_path = dist_path(&metadata, config.release); let release = if config.release { Some("--release") } else { None }; let args = IntoIterator::into_iter(["run", "-p", "backend"]).chain(release); duct::cmd("cargo", args) .env("ROCKET_DIST", dist_path) .run() .wrap_err("Could not run server")?; Ok(()) } fn install(config: Install) -> Result<()> { dist(config.to_dist())?; let cmd = cargo_metadata::MetadataCommand::new(); let metadata = cmd.exec()?; let dist_path = dist_path(&metadata, true); let backend_path = backend_path(&metadata, true); let output_dir = config.output.unwrap_or_else(|| output_default_path(&metadata)); println!( "- Building backend in {}", backend_path.bold().fg::<UserGreen>() ); let args = IntoIterator::into_iter(["build", "--release", "-p", "backend"]); duct::cmd("cargo", args) .run() .wrap_err("Could not build backend")?; println!( "- Copying frontend to {}", output_dir.join("static/dist").bold().fg::<UserGreen>() ); std::fs::create_dir_all(&output_dir).wrap_err("Cannot create output dir")?; std::fs::remove_dir_all(&output_dir).wrap_err("Error while cleaning output directory")?; std::fs::create_dir_all(&output_dir.join("static")).wrap_err("Cannot create output dir")?; let errors = copy_dir::copy_dir(&dist_path, &output_dir.join("static/dist")) .wrap_err("Could not copy dist dir to output dir")?; if !errors.is_empty() { eprintln!( "{} Copy succeeded, but the following errors occurred during the copy:", "WARNING:".bold().fg::<UserYellow>() ); for error in errors { eprintln!("\t{}", error.fg::<UserYellow>()) } } println!( "- Copying backend to {}", output_dir.join("backend").bold().fg::<UserGreen>() ); std::fs::copy(backend_path, output_dir.join("backend")) .wrap_err("Copying the backend failed")?; Ok(()) }
use std::ops::Index; use cargo_metadata::camino::Utf8PathBuf; use cargo_metadata::Metadata; use clap::crate_version; use clap::Clap; use color_eyre::eyre::Context; use color_eyre::eyre::ContextCompat; use color_eyre::eyre::Result; use color_eyre::owo_colors::colors::xterm::UserGreen; use color_eyre::owo_colors::colors::xterm::UserYellow; use color_eyre::owo_colors::OwoColorize; use color_eyre::Help; #[derive(Clap)] #[clap(version=crate_version!())] struct Opts { #[clap(subcommand)] cmd: SubCommand, } #[derive(Clap)] enum SubCommand { Dist(Dist), Run(Run), Install(Install), } #[derive(Clap)] struct Dist { #[clap(long)] release: bool, } #[derive(Clap)] struct Run { #[clap(long)] release: bool, } impl Run { fn to_dist(&self) -> Dist { Dist { release: self.release, } } } #[derive(Clap)] struct Install { #[clap(short, long)] output: Option<Utf8PathBuf>, } impl Install { fn to_dist(&self) -> Dist { Dist { release: true, } } } fn main() -> Result<()> { color_eyre::install()?; let opts: Opts = Opts::parse(); match opts.cmd { SubCommand::Dist(config) => dist(config)?, SubCommand::Run(config) => run(config)?, SubCommand::Install(config) => install(config)?, } Ok(()) } fn dist_path(metadata: &Metadata, is_release: bool) -> Utf8PathBuf { metadata .target_directory .join("static/dist") .join(if is_release { "release" } else { "debug" }) } fn backend_path(metadata: &Metadata, is_release: bool) -> Utf8PathBuf { metadata .target_directory .join(if is_release { "release" } else { "debug" }) .join("backend") } fn output_default_path(metadata: &Metadata) -> Utf8PathBuf { metadata.workspace_root.join("output") } fn dist(config: Dist) -> Result<()> { let cmd = cargo_metadata::MetadataCommand::new(); let metadata = cmd.exec()?; let frontend = metadata .workspace_members .iter() .find(|pkg| metadata[pkg].name == "frontend") .wrap_err("Could not find package 'frontend'")?; let html_path = metadata .index(frontend) .manifest_path .with_file_name("index.html"); let dist_path = dist_path(&metadata, config.release); println!( "- Distributing frontend in {}", dist_path.bold().fg::<UserGreen>() ); std::fs::create_dir_all(&dist_path).wrap_err("Could not write to the target directory")?; let trunk_version = duct::cmd!("trunk", "--version") .read() .wrap_err("Could not find `trunk`") .note("`trunk` is required for the build") .suggestion("Install `trunk` with `cargo install trunk`")?; println!("- Using {}", trunk_version.bold().fg::<UserGreen>()); let release = if config.release { Some("--release") } else { None }; let args = IntoIterator::into_iter(["build", "--dist", dist_path.as_str(), html_path.as_str()]) .chain(release); duct::cmd("trunk", args).run()?; Ok(()) } fn run(config: Run) -> Result<()> { dist(config.to_dist())?; let cmd = cargo_metadata::MetadataCommand::new(); let metadata = cmd.exec()?; let dist_path = dist_path(&metadata, config.release); let release = if config.release { Some("--release") } else { None }; let args = IntoIterator::into_iter(["run", "-p", "backend"]).chain(release); duct::cmd("cargo", args) .env("ROCKET_DIST", dist_path) .run() .wrap_err("Could not run server")?; Ok(()) } fn install(config: Install) -> Result<()> { dist(config.to_dist())?; let cmd = cargo_metadata::MetadataCommand::new(); let metadata = cmd.exec()?; let dist_path = dist_path(&metadata, true); let backend_path = backend_path(&metadata, true); let output_dir = config.output.unwrap_or_else(|| output_default_path(&metadata)); println!( "- Building backend in {}", backend_path.bold().fg::<UserGreen>() ); let args = IntoIterator::into_iter(["build", "--release", "-p", "backend"]); duct::cmd("cargo", args) .run() .wrap_err("Could not build backend")?; println!( "- Copying frontend to {}", output_dir.join("static/dist").bold().fg::<UserGreen>() ); std::fs::create_dir_all(&output_dir).wrap_err("Cannot create output dir")?; std::fs::remove_dir_all(&output_dir).wrap_err("Error while cleaning output directory")?; std::fs::create_dir_all(&output_dir.join("static")).wrap_err("Cannot create output dir")?; let errors = copy_dir::copy_dir(&dist_path, &output_dir.join("static/dist")) .wrap_err("Could not copy dist dir to output dir")?;
println!( "- Copying backend to {}", output_dir.join("backend").bold().fg::<UserGreen>() ); std::fs::copy(backend_path, output_dir.join("backend")) .wrap_err("Copying the backend failed")?; Ok(()) }
if !errors.is_empty() { eprintln!( "{} Copy succeeded, but the following errors occurred during the copy:", "WARNING:".bold().fg::<UserYellow>() ); for error in errors { eprintln!("\t{}", error.fg::<UserYellow>()) } }
if_condition
[ { "content": "fn main() {\n\n yew::start_app::<Model>();\n\n}", "file_path": "frontend/src/main.rs", "rank": 9, "score": 69425.33094278068 }, { "content": "#[derive(Debug, Clone)]\n\nenum Message {\n\n Message(String),\n\n}\n\n\n", "file_path": "backend/src/main.rs", "rank": 10, "score": 55528.74999435581 }, { "content": "struct Context {\n\n sender: rocket::tokio::sync::broadcast::Sender<Message>,\n\n}\n\n\n\nimpl Context {\n\n fn new() -> Self {\n\n let (sender, _) = rocket::tokio::sync::broadcast::channel(1000);\n\n Self { sender }\n\n }\n\n}\n\n\n\n#[post(\"/msg\", data = \"<msg>\")]\n\nasync fn msg(msg: String, context: &State<Context>) {\n\n context.sender.send(Message::Message(msg)).unwrap_or(0);\n\n}\n\n\n", "file_path": "backend/src/main.rs", "rank": 13, "score": 54270.30834090999 }, { "content": "struct Model {}\n\nimpl Component for Model {\n\n type Message = ();\n\n type Properties = ();\n\n\n\n fn create(_props: Self::Properties, _link: yew::ComponentLink<Self>) -> Self {\n\n Self {}\n\n }\n\n\n\n fn update(&mut self, _msg: Self::Message) -> yew::ShouldRender {\n\n true\n\n }\n\n\n\n fn change(&mut self, _props: Self::Properties) -> yew::ShouldRender {\n\n false\n\n }\n\n\n\n fn view(&self) -> yew::Html {\n\n html! {\n\n <div>\n\n <div><event_send::Model/></div>\n\n <div><event_stream::Model/></div>\n\n </div>\n\n }\n\n }\n\n\n\n}\n\n\n", "file_path": "frontend/src/main.rs", "rank": 14, "score": 53228.615540293715 }, { "content": "#[launch]\n\nfn rocket() -> _ {\n\n #[derive(Deserialize)]\n\n #[serde(crate = \"rocket::serde\")]\n\n struct Config {\n\n dist: PathBuf,\n\n }\n\n\n\n impl Default for Config {\n\n fn default() -> Self {\n\n Self {\n\n dist: \"static/dist\".into(),\n\n }\n\n }\n\n }\n\n\n\n let rocket = rocket::build();\n\n let config: Config = rocket.figment().extract().unwrap_or_default();\n\n\n\n rocket\n\n .manage(Context::new())\n\n .mount(\"/\", routes![events, msg])\n\n .mount(\"/\", FileServer::from(config.dist))\n\n .attach(Cors)\n\n}\n", "file_path": "backend/src/main.rs", "rank": 16, "score": 50826.29096923845 }, { "content": "mod cors;\n\n\n\nuse rocket::{get, launch, post, routes};\n\n\n\nuse rocket::serde::Deserialize;\n\n\n\nuse std::path::PathBuf;\n\nuse std::sync::atomic::AtomicU64;\n\n\n\nuse rocket::fs::FileServer;\n\nuse rocket::response::stream::{Event, EventStream};\n\nuse rocket::tokio::select;\n\nuse rocket::State;\n\n\n\nuse rocket::Shutdown;\n\n\n\nuse crate::cors::Cors;\n\n\n\n/// Returns an infinite stream of server-sent events. Each event is a message\n\n/// pulled from a broadcast queue sent by the `post` handler.\n", "file_path": "backend/src/main.rs", "rank": 17, "score": 24844.195811250534 }, { "content": "#[get(\"/events\")]\n\nasync fn events(mut end: Shutdown, context: &State<Context>) -> EventStream![] {\n\n let mut receiver = context.sender.subscribe();\n\n let i: AtomicU64 = AtomicU64::new(0);\n\n EventStream! {\n\n loop {\n\n select! {\n\n msg = receiver.recv() => {\n\n let msg = msg.unwrap();\n\n match msg {\n\n Message::Message(msg) => {\n\n let i = i.fetch_add(1, std::sync::atomic::Ordering::Relaxed);\n\n\n\n yield Event::data(msg).id(format!(\"{}\", i));}\n\n }\n\n }\n\n _ = &mut end => break,\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "backend/src/main.rs", "rank": 18, "score": 24842.263310860562 }, { "content": " }\n\n }\n\n\n\n fn update(&mut self, msg: Self::Message) -> ShouldRender {\n\n match msg {\n\n Msg::Response(res) => {\n\n if let Err(err) = res {\n\n self.last_error = Some(err)\n\n } else {\n\n self.last_error = None;\n\n self.message.clear();\n\n self.current_task = None;\n\n }\n\n true\n\n }\n\n Msg::SendMessage(msg) => {\n\n let post_request = fetch::Request::post(\"http://localhost:8000/msg\")\n\n .body(Ok(msg))\n\n .expect(\"Could not build that request.\");\n\n let callback = self.link.callback(\n", "file_path": "frontend/src/main.rs", "rank": 19, "score": 23737.684634219568 }, { "content": " }\n\n }\n\n }\n\n\n\n pub enum Msg {\n\n Response(Result<(), anyhow::Error>),\n\n UpdateMessage(String),\n\n SendMessage(String),\n\n }\n\n\n\n impl Component for Model {\n\n type Message = Msg;\n\n type Properties = ();\n\n\n\n fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n Self {\n\n current_task: None,\n\n message: Default::default(),\n\n link,\n\n last_error: None,\n", "file_path": "frontend/src/main.rs", "rank": 20, "score": 23737.13471074831 }, { "content": " // previously received properties.\n\n // This component has no properties so we will always return \"false\".\n\n false\n\n }\n\n\n\n fn view(&self) -> Html {\n\n let last_event = match &self.last_event {\n\n Some(event) => html! {\n\n {render_event_source_update(event)}\n\n },\n\n None => html! {<pre>{\"No last event\"}</pre>},\n\n };\n\n match &self.task {\n\n Ok(_) => {\n\n html! {\n\n <div>\n\n <h1>{\"Events\"}</h1>\n\n <ul>\n\n { for self.events.iter().map(render_event_item) }\n\n </ul>\n", "file_path": "frontend/src/main.rs", "rank": 21, "score": 23734.934064495133 }, { "content": "use yew::{Component, html};\n\n\n\nmod event_send {\n\n use yew::{prelude::*, services::fetch};\n\n\n\n pub struct Model {\n\n link: ComponentLink<Self>,\n\n current_task: Option<fetch::FetchTask>,\n\n message: String,\n\n last_error: Option<anyhow::Error>,\n\n }\n\n\n\n impl Model {\n\n fn view_last_error(&self) -> Html {\n\n if let Some(err) = self.last_error.as_ref() {\n\n html! {\n\n <p>{format!(\"Sending failed: {}\", err)}</p>\n\n }\n\n } else {\n\n html! {<p></p>}\n", "file_path": "frontend/src/main.rs", "rank": 22, "score": 23734.41366016118 }, { "content": " |response: fetch::Response<Result<String, anyhow::Error>>| {\n\n Msg::Response(response.into_body().map(|_| ()))\n\n },\n\n );\n\n let mut fetch_service = fetch::FetchService::new();\n\n // 3. pass the request and callback to the fetch service\n\n let task = fetch_service\n\n .fetch(post_request, callback)\n\n .expect(\"failed to start request\");\n\n // 4. store the task so it isn't canceled immediately\n\n self.current_task = Some(task);\n\n // we want to redraw so that the page displays a 'fetching...' message to the user\n\n // so return 'true'\n\n self.message.clear();\n\n true\n\n }\n\n Msg::UpdateMessage(msg) => {\n\n self.message = msg;\n\n true\n\n }\n", "file_path": "frontend/src/main.rs", "rank": 23, "score": 23733.306085643362 }, { "content": " }\n\n\n\n pub enum Msg {\n\n PushEvent(Event),\n\n ReceivedUpdate(EventSourceUpdate),\n\n }\n\n\n\n pub struct Model {\n\n events: Vec<Event>,\n\n task: Result<EventSourceTask, String>,\n\n last_event: Option<EventSourceUpdate>,\n\n }\n\n\n\n fn render_event_source_update(update: &EventSourceUpdate) -> Html {\n\n match update {\n\n EventSourceUpdate::Error => html! { <pre>{\"error\"}</pre>},\n\n EventSourceUpdate::Open => html! { <pre>{\"open\"}</pre>},\n\n }\n\n }\n\n\n", "file_path": "frontend/src/main.rs", "rank": 24, "score": 23733.17323843504 }, { "content": " }\n\n }\n\n\n\n fn update(&mut self, msg: Self::Message) -> ShouldRender {\n\n match msg {\n\n Msg::PushEvent(event) => {\n\n self.events.push(event);\n\n // the value has changed so we need to\n\n // re-render for it to appear on the page\n\n true\n\n }\n\n Msg::ReceivedUpdate(event) => {\n\n self.last_event = Some(event);\n\n true\n\n }\n\n }\n\n }\n\n\n\n fn change(&mut self, _props: Self::Properties) -> ShouldRender {\n\n // Should only return \"true\" if new properties are different to\n", "file_path": "frontend/src/main.rs", "rank": 25, "score": 23732.792173654976 }, { "content": " impl Component for Model {\n\n type Message = Msg;\n\n type Properties = ();\n\n\n\n fn create(_props: Self::Properties, link: ComponentLink<Self>) -> Self {\n\n let service = services::EventSourceService::new();\n\n let callback = link.callback(|(msg0, msg1)| {\n\n Msg::PushEvent(Event {\n\n title: msg0,\n\n description: msg1,\n\n })\n\n });\n\n let updates = link.callback(|update| Msg::ReceivedUpdate(update));\n\n let task = service\n\n .open(\"http://localhost:8000/events\", callback, updates)\n\n .map_err(Into::into);\n\n Self {\n\n events: vec![],\n\n task,\n\n last_event: None,\n", "file_path": "frontend/src/main.rs", "rank": 26, "score": 23732.609270467143 }, { "content": " <input type=\"text\" id=\"msg\" name=\"msg\" placeholder=\"Send message\" value={msg.clone()} oninput={self.link.callback(|e : InputData| Msg::UpdateMessage(e.value))}/>\n\n <input type=\"submit\" value=\"Send\" disabled={msg.is_empty()} onclick={self.link.callback(move |_| Msg::SendMessage(msg.clone()))}/>\n\n {self.view_last_error()}\n\n </div>\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nmod event_stream {\n\n use yew::prelude::*;\n\n use yew_sse::services::{\n\n self,\n\n sse::{EventSourceTask, EventSourceUpdate},\n\n };\n\n\n\n pub struct Event {\n\n title: String,\n\n description: String,\n", "file_path": "frontend/src/main.rs", "rank": 27, "score": 23731.80524337817 }, { "content": " {last_event}\n\n </div>\n\n }\n\n }\n\n Err(err) => {\n\n html! { <div>{format!(\"Error: {}\", err)}</div> }\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn render_event_item(event: &Event) -> Html {\n\n html! {\n\n <li>\n\n {format!(\"{} => {}\", event.title, event.description)}\n\n </li>\n\n }\n\n }\n\n}\n\n\n", "file_path": "frontend/src/main.rs", "rank": 28, "score": 23730.809264615702 }, { "content": " }\n\n }\n\n\n\n fn change(&mut self, _props: Self::Properties) -> ShouldRender {\n\n // Should only return \"true\" if new properties are different to\n\n // previously received properties.\n\n // This component has no properties so we will always return \"false\".\n\n false\n\n }\n\n\n\n fn view(&self) -> Html {\n\n if let Some(_) = self.current_task {\n\n html! {\n\n <div>{\"Sending message...\"}</div>\n\n }\n\n } else {\n\n let msg = self.message.clone();\n\n html! {\n\n <div>\n\n <label for=\"msg\">{\"Message:\"}</label>\n", "file_path": "frontend/src/main.rs", "rank": 29, "score": 23730.68866527829 }, { "content": "use rocket::http::Header;\n\nuse rocket::{Request, Response};\n\nuse rocket::fairing::{Fairing, Info, Kind};\n\n\n\npub struct Cors;\n\n\n\n#[rocket::async_trait]\n\nimpl Fairing for Cors {\n\n fn info(&self) -> Info {\n\n Info {\n\n name: \"Add CORS headers to responses\",\n\n kind: Kind::Response\n\n }\n\n }\n\n\n\n async fn on_response<'r>(&self, _req: &'r Request<'_>, response: &mut Response<'r>) {\n\n response.set_header(Header::new(\"Access-Control-Allow-Origin\", \"*\"));\n\n response.set_header(Header::new(\"Access-Control-Allow-Methods\", \"POST, GET, PATCH, OPTIONS\"));\n\n response.set_header(Header::new(\"Access-Control-Allow-Headers\", \"*\"));\n\n response.set_header(Header::new(\"Access-Control-Allow-Credentials\", \"true\"));\n\n }\n\n}", "file_path": "backend/src/cors.rs", "rank": 30, "score": 17689.47748576521 }, { "content": "yew-sse-example\n\n===============\n\n\n\nThis repository is a proof of concept of:\n\n\n\n- Serving a [yew](https://yew.rs) frontend application from a [rocket](https://rocket.rs) backend\n\n- Using the [yew-sse](https://github.com/liquidnya/yew-sse) library to send messages and receive them from several connected clients.\n\n\n\nThis example is not meant to be *minimal*. Rather, it's meant to be *fun*, and so it uses:\n\n\n\n- The [cargo xtask](https://github.com/matklad/cargo-xtask) pattern to distribute and serve the frontend application from the backend.\n\n- [trunk](https://trunkrs.dev) to perform the \"distribution\" step of the frontend\n\n- [clap](https://clap.rs/) for ergonomic CLI definition\n\n- [color-eyre](https://github.com/yaahc/color-eyre) for nice, colored errors :sparkles:\n\n- [duct](https://github.com/oconnor663/duct.rs), for easy scripting in Rust\n\n\n\n\n\nWhat it does **not** demonstrate though, is the vastness of my skills at CSS :stuck_out_tongue:.\n\n\n\n## How to use\n\n\n\n### Prerequisites\n\n\n\nYou'll need Rust installed, with the `wasm32-unknown-unknown` target available, as well as the [trunk](https://trunkrs.dev) tool.\n\n\n\nTo install `trunk`, just run:\n\n\n\n```\n\ncargo install trunk\n\n```\n\n\n\nTo install the `wasm32-unknown-unknown` target:\n\n\n\n```\n\nrustup target add wasm32-unknown-unknown\n\n```\n\n\n\n### Serving the example\n\n\n\nFrom the repository's root directory, run:\n\n\n\n```\n\ncargo xtask run\n\n```\n\n\n\n(add the `--release` flag to build everything in `--release` mode)\n\n\n\nAfter the build, once rocket is started, open [http://127.0.0.1:8000](http://127.0.0.1:8000) in your browser to use the yew application.\n\nOpen several times the same page to exchange messages! Very convenient :sweat_smile:\n", "file_path": "README.md", "rank": 35, "score": 13.356104045856572 } ]
Rust
ndless-async/src/keypad.rs
jkcoxson/ndless-rs
6242bd3e08c8d6aeac419e19431acc91ba9c0ff1
use alloc::rc::{Rc, Weak}; use core::cell::{Ref, RefCell}; use core::future::Future; use core::mem; use core::pin::Pin; use core::task::{Context, Poll}; use core::time::Duration; use crossbeam_queue::ArrayQueue; use futures_util::{stream::Stream, task::AtomicWaker, StreamExt}; use ignore_result::Ignore; use ndless::alloc::vec::Vec; use ndless::input::{iter_keys, Key}; use ndless::prelude::*; use ndless::timer::{get_ticks, Ticks, TICKS_PER_SECOND}; use crate::timer::TimerListener; #[derive(Eq, PartialEq, Copy, Clone, Debug, Hash)] pub enum KeyState { Pressed, Released, } #[derive(Eq, PartialEq, Copy, Clone, Debug, Hash)] pub struct KeyEvent { pub key: Key, pub state: KeyState, pub tick_at: u32, } struct SharedKeyQueue { queue: ArrayQueue<KeyEvent>, waker: AtomicWaker, } #[derive(Default)] struct KeypadListenerInner { queues: RefCell<Vec<Rc<SharedKeyQueue>>>, keys: RefCell<Vec<Key>>, } impl KeypadListenerInner { fn poll(&self) { let mut queues = self.queues.borrow_mut(); queues.retain(|queue| Rc::strong_count(queue) > 1); if queues.is_empty() { return; } let mut keys = self.keys.borrow_mut(); let mut retain_i = 0; let mut change = false; iter_keys().for_each(|key| { if let Some((i, _)) = keys.iter().enumerate().find(|(_, other)| key == **other) { if i > retain_i { let (beginning, end) = keys.split_at_mut(i); mem::swap(&mut beginning[retain_i], &mut end[0]); } retain_i += 1; } else { change = true; keys.push(key); let tick_at = get_ticks(); queues.iter_mut().for_each(|queue| { queue .queue .push(KeyEvent { key, state: KeyState::Pressed, tick_at, }) .ignore() }); if keys.len() > retain_i + 1 { let (last, beginning) = keys.split_last_mut().unwrap(); mem::swap(&mut beginning[retain_i], last); } retain_i += 1; } }); let tick_at = get_ticks(); for _ in retain_i..keys.len() { change = true; let key = keys.pop().unwrap(); queues.iter_mut().for_each(|queue| { queue .queue .push(KeyEvent { key, state: KeyState::Released, tick_at, }) .ignore() }); } if change { queues.iter_mut().for_each(|queue| queue.waker.wake()); } } } pub struct KeypadListener<'a> { timer_listener: Option<&'a TimerListener>, rate: u32, interval: RefCell<Weak<RefCell<dyn Future<Output = ()> + Unpin>>>, inner: Rc<KeypadListenerInner>, } impl<'a> KeypadListener<'a> { pub fn new(timer_listener: &'a TimerListener) -> Self { Self::new_with_hz(timer_listener, 30) } pub fn new_with_hz(timer_listener: &'a TimerListener, hz: u32) -> Self { Self::new_with_ticks(timer_listener, TICKS_PER_SECOND / hz) } pub fn new_with_ms(timer_listener: &'a TimerListener, dur: u32) -> Self { Self::new_with_rate(timer_listener, Duration::from_millis(dur as u64)) } pub fn new_with_rate(timer_listener: &'a TimerListener, dur: Duration) -> Self { Self::new_with_ticks(timer_listener, dur.as_ticks()) } pub fn new_with_ticks(timer_listener: &'a TimerListener, ticks: u32) -> Self { Self { timer_listener: Some(timer_listener), rate: ticks, interval: RefCell::new(Weak::<RefCell<futures_util::future::Ready<()>>>::new()), inner: Default::default(), } } pub fn new_manually_polled() -> Self { Self { timer_listener: None, rate: 0, interval: RefCell::new(Weak::<RefCell<futures_util::future::Ready<()>>>::new()), inner: Default::default(), } } fn interval(&self) -> Rc<RefCell<dyn Future<Output = ()> + Unpin>> { if let Some(interval) = self.interval.borrow().upgrade() { return interval; } let listener = self.inner.clone(); let interval: Rc<RefCell<dyn Future<Output = ()> + Unpin>> = if let Some(timer_listener) = self.timer_listener { Rc::new(RefCell::new( timer_listener.every_ticks(self.rate).for_each(move |_| { listener.poll(); futures_util::future::ready(()) }), )) } else { Rc::new(RefCell::new(futures_util::future::pending())) }; self.interval.replace(Rc::downgrade(&interval)); interval } pub fn poll(&self) { self.inner.poll(); } pub fn stream(&self) -> KeyStream { let mut queues = self.inner.queues.borrow_mut(); let queue = Rc::new(SharedKeyQueue { queue: ArrayQueue::new(100), waker: AtomicWaker::new(), }); queues.push(queue.clone()); KeyStream { queue, interval: self.interval(), } } pub fn stream_with_buffer(&self, size: usize) -> KeyStream { let mut queues = self.inner.queues.borrow_mut(); let queue = Rc::new(SharedKeyQueue { queue: ArrayQueue::new(size), waker: AtomicWaker::new(), }); queues.push(queue.clone()); KeyStream { queue, interval: self.interval(), } } pub fn list_keys(&self) -> Ref<Vec<Key>> { self.inner.keys.borrow() } } pub struct KeyStream { queue: Rc<SharedKeyQueue>, interval: Rc<RefCell<dyn Future<Output = ()> + Unpin>>, } impl Stream for KeyStream { type Item = KeyEvent; fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> { let mut interval = self.interval.borrow_mut(); let _ = Pin::new(&mut *interval).poll(cx); self.queue.waker.register(cx.waker()); if let Ok(key) = self.queue.queue.pop() { Poll::Ready(Some(key)) } else { Poll::Pending } } }
use alloc::rc::{Rc, Weak}; use core::cell::{Ref, RefCell}; use core::future::Future; use core::mem; use core::pin::Pin; use core::task::{Context, Poll}; use core::time::Duration; use crossbeam_queue::ArrayQueue; use futures_util::{stream::Stream, task::AtomicWaker, StreamExt}; use ignore_result::Ignore; use ndless::alloc::vec::Vec; use ndless::input::{iter_keys, Key}; use ndless::prelude::*; use ndless::timer::{get_ticks, Ticks, TICKS_PER_SECOND}; use crate::timer::TimerListener; #[derive(Eq, PartialEq, Copy, Clone, Debug, Hash)] pub enum KeyState { Pressed, Released, } #[derive(Eq, PartialEq, Copy, Clone, Debug, Hash)] pub struct KeyEvent { pub key: Key, pub state: KeyState, pub tick_at: u32, } struct SharedKeyQueue { queue: ArrayQueue<KeyEvent>, waker: AtomicWaker, } #[derive(Default)] struct KeypadListenerInner { queues: RefCell<Vec<Rc<SharedKeyQueue>>>, keys: RefCell<Vec<Key>>, } impl KeypadListenerInner { fn poll(&self) { let mut queues = self.queues.borrow_mut(); queues.retain(|queue| Rc::strong_count(queue) > 1); if queues.is_empty() { return; } let mut keys = self.keys.borrow_mut(); let mut retain_i = 0; let mut change = false; iter_keys().for_each(|key| { if let Some((i, _)) = keys.iter().enumerate().find(|(_, other)| key == **other) { if i > retain_i { let (beginning, end) = keys.split_at_mut(i); mem::swap(&mut beginning[retain_i], &mut end[0]); } retain_i += 1; } else { change = true; keys.push(key); let tick_at = get_ticks(); queues.iter_mut().for_each(|queue| { queue .queue .push(KeyEvent { key, state: KeyState::Pressed, tick_at, }) .ignore() }); if keys.len() > retain_i + 1 { let (last, beginning) = keys.split_last_mut().unwrap(); mem::swap(&mut beginning[retain_i], last); } retain_i += 1; } }); let tick_at = get_ticks(); for _ in retain_i..keys.len() { change = true; let key = keys.pop().unwrap(); queues.iter_mut().for_each(|queue| { queue .queue .push(KeyEvent { key, state: KeyState::Released, tick_at, }) .ignore() }); } if change { queues.iter_mut().for_each(|queue| queue.waker.wake()); } } } pub struct KeypadListener<'a> { timer_listener: Option<&'a TimerListener>, rate: u32, interval: RefCell<Weak<RefCell<dyn Future<Output = ()> + Unpin>>>, inner: Rc<KeypadListenerInner>, } impl<'a> KeypadListener<'a> { pub fn new(timer_listener: &'a TimerListener) -> Self { Self::new_with_hz(timer_listener, 30) } pub fn new_with_hz(timer_listener: &'a TimerListener, hz: u32) -> Self { Self::new_with_ticks(timer_listener, TICKS_PER_SECOND / hz) } pub fn new_with_ms(timer_listener: &'a TimerListener, dur: u32) -> Self { Self::new_with_rate(timer_listener, Duration::from_millis(dur as u64)) } pub fn new_with_rate(timer_listener: &'a TimerListener, dur: Duration) -> Self { Self::new_with_ticks(timer_listener, dur.as_ticks()) } pub fn new_with_ticks(timer_listener: &'a TimerListener, ticks: u32) -> Self { Self { timer_listener: Some(timer_listener), rate: ticks, interval: RefCell::new(Weak::<RefCell<futures_util::future::Ready<()>>>::new()), inner: Default::default(), } } pub fn new_manually_polled() -> Self { Self { timer_listener: None, rate: 0, interval: RefCell::new(Weak::<RefCell<futures_util::future::Ready<()>>>::new()), inner: Default::default(), } } fn interval(&self) -> Rc<RefCell<dyn Future<Output = ()> + Unpin>> { if let Some(interval) = self.interval.borrow().upgrade() { return interval; } let listener = self.inner.clone(); let interval: Rc<RefCell<dyn Future<Output = ()> + Unpin>> = if let Some(timer_listener) = self.timer_listener { Rc::new(RefCell::new( timer_listener.every_ticks(self.rate).for_each(move |_| { listener.poll(); futures_util::future::ready(()) }), )) } else { Rc::new(RefCell::new(futures_util::future::pending())) }; self.interval.replace(Rc::downgrade(&interval)); interval } pub fn poll(&self) { self.inner.poll(); }
pub fn stream_with_buffer(&self, size: usize) -> KeyStream { let mut queues = self.inner.queues.borrow_mut(); let queue = Rc::new(SharedKeyQueue { queue: ArrayQueue::new(size), waker: AtomicWaker::new(), }); queues.push(queue.clone()); KeyStream { queue, interval: self.interval(), } } pub fn list_keys(&self) -> Ref<Vec<Key>> { self.inner.keys.borrow() } } pub struct KeyStream { queue: Rc<SharedKeyQueue>, interval: Rc<RefCell<dyn Future<Output = ()> + Unpin>>, } impl Stream for KeyStream { type Item = KeyEvent; fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> { let mut interval = self.interval.borrow_mut(); let _ = Pin::new(&mut *interval).poll(cx); self.queue.waker.register(cx.waker()); if let Ok(key) = self.queue.queue.pop() { Poll::Ready(Some(key)) } else { Poll::Pending } } }
pub fn stream(&self) -> KeyStream { let mut queues = self.inner.queues.borrow_mut(); let queue = Rc::new(SharedKeyQueue { queue: ArrayQueue::new(100), waker: AtomicWaker::new(), }); queues.push(queue.clone()); KeyStream { queue, interval: self.interval(), } }
function_block-full_function
[ { "content": "/// Returns true if the specific key is pressed.\n\n/// Note that you may pass either an owned [`Key`] or a borrowed [`&Key`][Key].\n\npub fn is_key_pressed(key: impl Borrow<Key>) -> bool {\n\n\tKEY_MAPPING\n\n\t\t.iter()\n\n\t\t.find(|(_, other)| other == key.borrow())\n\n\t\t.map_or(\n\n\t\t\tfalse,\n\n\t\t\t|(raw_key, _key)| unsafe { ndless_sys::isKeyPressed(raw_key) } == 1,\n\n\t\t)\n\n}\n\n\n", "file_path": "ndless/src/bindings/input.rs", "rank": 0, "score": 308078.37026206695 }, { "content": "/// Returns the number of ticks since the program started, based on\n\n/// a 32768Hz timer (i.e. 32768 ticks per second).\n\npub fn get_ticks() -> u32 {\n\n\tunsafe {\n\n\t\tif has_colors() {\n\n\t\t\tlet value = 0x900C0004 as *mut u32;\n\n\t\t\tSTART_VALUE.wrapping_sub(read_volatile(value))\n\n\t\t} else {\n\n\t\t\tlet value = 0x900C0000 as *mut u32;\n\n\t\t\tTICK_SUM += read_volatile(value);\n\n\t\t\twrite_volatile(value, 0);\n\n\t\t\tTICK_SUM\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "ndless/src/bindings/timer.rs", "rank": 1, "score": 259583.6962539169 }, { "content": "/// A more efficient way to get keys being pressed than [`get_keys`],\n\n/// as `iter_keys` does not allocate.\n\n///\n\n/// However, it must be used immediately, as each iteration of the\n\n/// loop checks if the key is being pressed at that time. For example:\n\n///\n\n/// ```\n\n/// use ndless::prelude::*;\n\n/// use ndless::input::iter_keys;\n\n/// for key in iter_keys() {\n\n/// println!(\"Key {:?} is being pressed.\", key);\n\n/// }\n\n/// ```\n\n///\n\n/// Additionally, it may be used like any other [`Iterator`] in Rust:\n\n///\n\n/// ```\n\n/// // Print all keys except escape\n\n/// use ndless::prelude::*;\n\n/// use ndless::input::{iter_keys, Key};\n\n/// iter_keys()\n\n/// .filter(|key| key != Key::Esc)\n\n/// .for_each(|key| println!(\"Key {:?} is being pressed.\", key));\n\n/// ```\n\npub fn iter_keys() -> impl Iterator<Item = Key> + 'static {\n\n\tKEY_MAPPING\n\n\t\t.iter()\n\n\t\t.filter(|(raw_key, _key)| unsafe { ndless_sys::isKeyPressed(raw_key) } == 1)\n\n\t\t.map(|(_, key)| *key)\n\n}\n\n\n", "file_path": "ndless/src/bindings/input.rs", "rank": 2, "score": 251398.55363854923 }, { "content": "/// Puts the current thread to sleep for at least the specified amount of time.\n\n///\n\n/// The thread may sleep longer than the duration specified due to scheduling\n\n/// specifics or platform-dependent functionality. It will never sleep less.\n\n///\n\n/// Problems will occur when sleeping for more than 2^31/32768 seconds, which\n\n/// is about 18 hours.\n\n///\n\n/// This function has a resolution of 30 μs.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// use core::time;\n\n/// use ndless::thread;\n\n///\n\n/// let ten_millis = time::Duration::from_millis(10);\n\n///\n\n/// thread::sleep(ten_millis);\n\n/// ```\n\npub fn sleep(dur: Duration) {\n\n\tlet ticks = dur.as_ticks();\n\n\tlet wanted_time = get_ticks().wrapping_add(ticks);\n\n\tconfigure_sleep(ticks);\n\n\twhile !has_time_passed(wanted_time) {\n\n\t\tidle();\n\n\t}\n\n\tdisable_sleep();\n\n}\n", "file_path": "ndless/src/bindings/thread.rs", "rank": 3, "score": 251397.47687220824 }, { "content": "/// Prepares the system for sleep. [`idle`][crate::hw::idle] must be\n\n/// called to actually sleep.\n\npub fn configure_sleep(ticks: u32) {\n\n\tunsafe {\n\n\t\tinit_sleep();\n\n\t\tif has_colors() {\n\n\t\t\tlet control = 0x900D0008 as *mut u32;\n\n\t\t\tlet load = 0x900D0000 as *mut u32;\n\n\t\t\tlet _value = 0x900D0004 as *mut u32;\n\n\t\t\twrite_volatile(control, 0);\n\n\t\t\twrite_volatile(control, 0b01100011);\n\n\t\t\twrite_volatile(control, 0b11100011);\n\n\t\t\twrite_volatile(load, ticks);\n\n\t\t} else {\n\n\t\t\tlet timer = 0x900D0000 as *mut u32;\n\n\t\t\tlet control = 0x900D0008 as *mut u32;\n\n\t\t\tlet divider = 0x900D0004 as *mut u32;\n\n\t\t\twrite_volatile(control, 0);\n\n\t\t\twrite_volatile(divider, 1);\n\n\t\t\twrite_volatile(timer, ticks.max(2u32.pow(16) - 1));\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "ndless/src/bindings/timer.rs", "rank": 4, "score": 247110.06019667754 }, { "content": "pub fn get_key_state() -> Vec<(Key, bool)> {\n\n\tlet mut num = 0;\n\n\tlet data = unsafe { ll::SDL_GetKeyState(&mut num) };\n\n\tlet mut i = -1isize;\n\n\n\n\tlet buf = data as *const u8;\n\n\tlet buf = unsafe { slice::from_raw_parts(buf, num as usize) };\n\n\tbuf.iter()\n\n\t\t.filter_map(|&state| {\n\n\t\t\ti += 1;\n\n\n\n\t\t\tmatch wrap_key(i as ll::SDLKey) {\n\n\t\t\t\tSome(key) => Some((key, state == 1)),\n\n\t\t\t\tNone => None,\n\n\t\t\t}\n\n\t\t})\n\n\t\t.collect()\n\n}\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 5, "score": 244653.84373483434 }, { "content": "/// Copies the entire contents of a reader into a writer.\n\n///\n\n/// This function will continuously read data from `reader` and then\n\n/// write it into `writer` in a streaming fashion until `reader`\n\n/// returns EOF.\n\n///\n\n/// On success, the total number of bytes that were copied from\n\n/// `reader` to `writer` is returned.\n\n///\n\n/// If you’re wanting to copy the contents of one file to another and you’re\n\n/// working with filesystem paths, see the [`fs::copy`] function.\n\n///\n\n/// [`fs::copy`]: ../fs/fn.copy.html\n\n///\n\n/// # Errors\n\n///\n\n/// This function will return an error immediately if any call to `read` or\n\n/// `write` returns an error. All instances of `ErrorKind::Interrupted` are\n\n/// handled by this function and the underlying operation is retried.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use std::io;\n\n///\n\n/// fn main() -> io::Result<()> {\n\n/// let mut reader: &[u8] = b\"hello\";\n\n/// let mut writer: Vec<u8> = vec![];\n\n///\n\n/// io::copy(&mut reader, &mut writer)?;\n\n///\n\n/// assert_eq!(&b\"hello\"[..], &writer[..]);\n\n/// Ok(())\n\n/// }\n\n/// ```\n\npub fn copy<R: ?Sized, W: ?Sized>(reader: &mut R, writer: &mut W) -> io::Result<u64>\n\nwhere\n\n\tR: Read,\n\n\tW: Write,\n\n{\n\n\tlet mut buf = MaybeUninit::<[u8; super::DEFAULT_BUF_SIZE]>::uninit();\n\n\t// FIXME: #42788\n\n\t//\n\n\t// - This creates a (mut) reference to a slice of _uninitialized_ integers,\n\n\t// which is **undefined behavior**\n\n\t//\n\n\t// - Only the standard library gets to soundly \"ignore\" this, based on its\n\n\t// privileged knowledge of unstable rustc internals;\n\n\tunsafe {\n\n\t\treader.initializer().initialize(buf.assume_init_mut());\n\n\t}\n\n\n\n\tlet mut written = 0;\n\n\tloop {\n\n\t\tlet len = match reader.read(unsafe { buf.assume_init_mut() }) {\n", "file_path": "ndless/src/file_io/io/util.rs", "rank": 6, "score": 236704.9486385936 }, { "content": "/// Suspends the program until [`any_key_pressed`] returns true.\n\npub fn wait_key_pressed() {\n\n\tunsafe { ndless_sys::wait_key_pressed() }\n\n}\n\n\n", "file_path": "ndless/src/bindings/input.rs", "rank": 7, "score": 236578.25404690983 }, { "content": "/// Suspends the program until [`any_key_pressed`] returns false.\n\npub fn wait_no_key_pressed() {\n\n\tunsafe { ndless_sys::wait_no_key_pressed() }\n\n}\n\n\n\npub mod touchpad {\n\n\tpub use ndless_sys::touchpad_info_t as touchpad_info;\n\n\tuse ndless_sys::touchpad_report_t as touchpad_report;\n\n\n\n\tuse super::Key;\n\n\n\n\t#[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)]\n\n\tpub struct TouchpadReport {\n\n\t\tpub contact: bool,\n\n\t\tpub proximity: u8,\n\n\t\tpub x: u16,\n\n\t\tpub y: u16,\n\n\t\tpub x_vel: u8,\n\n\t\tpub y_vel: u8,\n\n\t\tpub pressed: bool,\n\n\t\tpub arrow: Option<Key>,\n", "file_path": "ndless/src/bindings/input.rs", "rank": 8, "score": 236572.90342233807 }, { "content": "/// Detects if the number of ticks has passed yet\n\npub fn has_time_passed(at_tick: u32) -> bool {\n\n\t// https://arduino.stackexchange.com/a/12588/3134\n\n\tlet half_max = 2u32.pow(31);\n\n\tget_ticks().wrapping_sub(at_tick).wrapping_add(half_max) >= half_max\n\n}\n\n\n", "file_path": "ndless/src/bindings/timer.rs", "rank": 9, "score": 236295.50913205397 }, { "content": "/// Returns true if the \"On\" key is currently pressed.\n\npub fn key_on_pressed() -> bool {\n\n\tunsafe { ndless_sys::on_key_pressed() > 0 }\n\n}\n\n\n", "file_path": "ndless/src/bindings/input.rs", "rank": 10, "score": 229898.64820117268 }, { "content": "/// Returns true if any buttons are currently pressed, including pushing the\n\n/// touchpad.\n\npub fn any_key_pressed() -> bool {\n\n\tunsafe { ndless_sys::any_key_pressed() > 0 }\n\n}\n\n\n", "file_path": "ndless/src/bindings/input.rs", "rank": 11, "score": 229892.72099040137 }, { "content": "pub fn get_key_name(key: Key) -> String {\n\n\tunsafe {\n\n\t\tlet cstr = ll::SDL_GetKeyName(key as ll::SDLKey);\n\n\n\n\t\tstr::from_utf8(CStr::from_ptr(mem::transmute_copy(&cstr)).to_bytes())\n\n\t\t\t.unwrap()\n\n\t\t\t.to_string()\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 12, "score": 221278.44866491458 }, { "content": "/// Returns a [`Vec`] of pressed keys.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use ndless::input::{get_keys, Key};\n\n///\n\n/// let keys = get_keys();\n\n/// if keys.len() == 0 { /* No keys currently pressed */ }\n\n/// ```\n\npub fn get_keys() -> Vec<Key> {\n\n\titer_keys().collect()\n\n}\n\n\n", "file_path": "ndless/src/bindings/input.rs", "rank": 13, "score": 216400.3621125339 }, { "content": "pub fn hw_subtype() -> u32 {\n\n\tunsafe { ndless_sys::nl_hwsubtype() }\n\n}\n\n\n", "file_path": "ndless/src/bindings/hw.rs", "rank": 14, "score": 215890.76999838933 }, { "content": "pub fn copy(from: &Path, to: &Path) -> io::Result<u64> {\n\n\tlet (mut reader, reader_metadata) = open_from(from)?;\n\n\tlet (mut writer, _) = open_to_and_set_permissions(to, reader_metadata)?;\n\n\n\n\tio::copy(&mut reader, &mut writer)\n\n}\n", "file_path": "ndless/src/file_io/sys/fs.rs", "rank": 15, "score": 206896.41154561457 }, { "content": "/// Spawns a task and blocks until the future resolves, returning its result.\n\npub fn block_on<T>(listeners: &AsyncListeners, task: impl Future<Output = T>) -> T {\n\n\tlet wake_marker = Arc::new(TaskWaker {\n\n\t\twake_marker: AtomicBool::new(true),\n\n\t});\n\n\tlet waker = Waker::from(wake_marker.clone());\n\n\tlet mut context = Context::from_waker(&waker);\n\n\tpin_mut!(task);\n\n\tlet mut task = task;\n\n\tloop {\n\n\t\tlisteners.timer.poll();\n\n\t\tlisteners.yielder.poll();\n\n\t\twhile wake_marker.wake_marker.load(Ordering::Relaxed) {\n\n\t\t\tmatch task.as_mut().poll(&mut context) {\n\n\t\t\t\tPoll::Ready(val) => {\n\n\t\t\t\t\tdisable_sleep();\n\n\t\t\t\t\treturn val;\n\n\t\t\t\t}\n\n\t\t\t\tPoll::Pending => {\n\n\t\t\t\t\twake_marker.wake_marker.store(false, Ordering::Relaxed);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tlisteners.timer.poll();\n\n\t\t\tlisteners.yielder.poll();\n\n\t\t}\n\n\t\tlisteners.timer.config_sleep();\n\n\t\tidle();\n\n\t\tdisable_sleep();\n\n\t}\n\n}\n\n\n", "file_path": "ndless-async/src/task.rs", "rank": 16, "score": 203958.567878414 }, { "content": "pub fn copy(from: &Path, to: &Path) -> io::Result<u64> {\n\n\tif !from.is_file() {\n\n\t\treturn Err(Error::new(\n\n\t\t\tErrorKind::InvalidInput,\n\n\t\t\t\"the source path is not an existing regular file\",\n\n\t\t));\n\n\t}\n\n\n\n\tlet mut reader = fs::File::open(from)?;\n\n\tlet mut writer = fs::File::create(to)?;\n\n\n\n\tlet ret = io::copy(&mut reader, &mut writer)?;\n\n\tOk(ret)\n\n}\n\n\n", "file_path": "ndless/src/file_io/sys_common/fs.rs", "rank": 17, "score": 203552.00176282154 }, { "content": "pub fn enable_key_repeat(delay: RepeatDelay, interval: RepeatInterval) -> bool {\n\n\tlet delay = match delay {\n\n\t\tRepeatDelay::Default => 500,\n\n\t\tRepeatDelay::Custom(delay) => delay,\n\n\t};\n\n\tlet interval = match interval {\n\n\t\tRepeatInterval::Default => 30,\n\n\t\tRepeatInterval::Custom(interval) => interval,\n\n\t};\n\n\n\n\tunsafe { ll::SDL_EnableKeyRepeat(delay as c_int, interval as c_int) == 0 as c_int }\n\n}\n\n\n\n// get_mouse_state, get_relative_mouse_state, start_text_input, stop_text_input,\n\n// set_text_input_rect\n", "file_path": "ndless-sdl/src/event.rs", "rank": 18, "score": 200002.13807034356 }, { "content": "pub fn assert_ndless_rev(required_version: u32) {\n\n\tunsafe { ndless_sys::assert_ndless_rev(required_version) }\n\n}\n\n\n", "file_path": "ndless/src/bindings/ndless.rs", "rank": 19, "score": 198623.49308042036 }, { "content": "pub fn set_event_state(ty: EventType, state: bool) {\n\n\tunsafe {\n\n\t\tll::SDL_EventState(ty as u8, state as c_int);\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 20, "score": 193251.99677694342 }, { "content": "pub fn set_mod_state(states: &[Mod]) {\n\n\tunsafe {\n\n\t\tll::SDL_SetModState(\n\n\t\t\tstates\n\n\t\t\t\t.iter()\n\n\t\t\t\t.fold(0u32, |states, &state| states | state as ll::SDLMod),\n\n\t\t);\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 21, "score": 186721.3584952669 }, { "content": "#[doc(hidden)]\n\npub trait AsInnerMut<Inner: ?Sized> {\n\n\tfn as_inner_mut(&mut self) -> &mut Inner;\n\n}\n\n\n\n/// A trait for extracting representations from std types\n", "file_path": "ndless/src/file_io/sys_common.rs", "rank": 22, "score": 186429.36677099473 }, { "content": "pub fn poll_event() -> Event {\n\n\tpump_events();\n\n\n\n\tlet mut raw = null_event();\n\n\tlet have = unsafe { ll::SDL_PollEvent(&mut raw) };\n\n\n\n\tif have != 1 {\n\n\t\treturn Event::None;\n\n\t}\n\n\n\n\twrap_event(raw)\n\n}\n\n\n\n// TODO: set_event_filter, get_event_filter\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 23, "score": 186236.9975537776 }, { "content": "pub fn get_ticks() -> usize {\n\n\tunsafe { ll::SDL_GetTicks() as usize }\n\n}\n", "file_path": "ndless-sdl/src/sdl.rs", "rank": 24, "score": 186224.32153433232 }, { "content": "pub fn get_app_state() -> Vec<AppState> {\n\n\tlet bitflags = unsafe { ll::SDL_GetAppState() };\n\n\n\n\twrap_app_state(bitflags)\n\n}\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 25, "score": 184009.89728838482 }, { "content": "pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> io::Result<u64> {\n\n\tfs_imp::copy(from.as_ref(), to.as_ref())\n\n}\n\n\n\n/// Creates a new hard link on the filesystem.\n\n///\n\n/// The `dst` path will be a link pointing to the `src` path. Note that systems\n\n/// often require these two paths to both be located on the same filesystem.\n\n///\n\n/// # Platform-specific behavior\n\n///\n\n/// This function currently corresponds to the `link` function on Unix\n\n/// and the `CreateHardLink` function on Windows.\n\n/// Note that, this [may change in the future][changes].\n\n///\n\n/// [changes]: ../io/index.html#platform-specific-behavior\n\n///\n\n/// # Errors\n\n///\n\n/// This function will return an error in the following situations, but is not\n", "file_path": "ndless/src/file_io/fs.rs", "rank": 26, "score": 176759.74610536447 }, { "content": "pub fn rustup_component(name: impl AsRef<OsStr>) -> Result<()> {\n\n\tlet mut cmd = Command::new(\"rustup\");\n\n\tlet name = name.as_ref();\n\n\tcmd.arg(\"component\").arg(\"add\").arg(name);\n\n\tcmd.status().map_err(Into::into).and_then(|status| {\n\n\t\tif status.success() {\n\n\t\t\tdebug!(\n\n\t\t\t\t\"Successfully installed {} via Rustup\",\n\n\t\t\t\tAsRef::<Path>::as_ref(name).display()\n\n\t\t\t);\n\n\t\t\tOk(())\n\n\t\t} else {\n\n\t\t\tmatch status.code() {\n\n\t\t\t\tSome(code) => bail!(\"Failed to run rustup: error code {}\", code),\n\n\t\t\t\tNone => bail!(\"Failed to run rustup\"),\n\n\t\t\t}\n\n\t\t}\n\n\t})\n\n}\n", "file_path": "cargo-ndless/src/install.rs", "rank": 27, "score": 175852.887023181 }, { "content": "pub fn get_mod_state() -> Vec<Mod> {\n\n\tunsafe { wrap_mod_state(ll::SDL_GetModState()) }\n\n}\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 28, "score": 174644.06312043694 }, { "content": "#[inline]\n\nfn slice_write(pos_mut: &mut u64, slice: &mut [u8], buf: &[u8]) -> io::Result<usize> {\n\n\tlet pos = cmp::min(*pos_mut, slice.len() as u64);\n\n\tlet amt = (&mut slice[(pos as usize)..]).write(buf)?;\n\n\t*pos_mut += amt as u64;\n\n\tOk(amt)\n\n}\n\n\n", "file_path": "ndless/src/file_io/io/cursor.rs", "rank": 29, "score": 174166.81796569692 }, { "content": "// Resizing write implementation\n\nfn vec_write(pos_mut: &mut u64, vec: &mut Vec<u8>, buf: &[u8]) -> io::Result<usize> {\n\n\tlet pos: usize = (*pos_mut).try_into().map_err(|_| {\n\n\t\tError::new(\n\n\t\t\tErrorKind::InvalidInput,\n\n\t\t\t\"cursor position exceeds maximum possible vector length\",\n\n\t\t)\n\n\t})?;\n\n\t// Make sure the internal buffer is as least as big as where we\n\n\t// currently are\n\n\tlet len = vec.len();\n\n\tif len < pos {\n\n\t\t// use `resize` so that the zero filling is as efficient as possible\n\n\t\tvec.resize(pos, 0);\n\n\t}\n\n\t// Figure out what bytes will be used to overwrite what's currently\n\n\t// there (left), and what will be appended on the end (right)\n\n\t{\n\n\t\tlet space = vec.len() - pos;\n\n\t\tlet (left, right) = buf.split_at(cmp::min(space, buf.len()));\n\n\t\tvec[pos..pos + left.len()].copy_from_slice(left);\n\n\t\tvec.extend_from_slice(right);\n\n\t}\n\n\n\n\t// Bump us forward\n\n\t*pos_mut = (pos + buf.len()) as u64;\n\n\tOk(buf.len())\n\n}\n\n\n", "file_path": "ndless/src/file_io/io/cursor.rs", "rank": 32, "score": 169815.2377238868 }, { "content": "pub fn load_file(file: impl Into<String>) -> Result<Surface, String> {\n\n\tlet cfile = ndless::cstr!(file.into());\n\n\tunsafe {\n\n\t\tlet raw = ll::IMG_Load(cfile.as_ptr());\n\n\n\n\t\tif raw.is_null() {\n\n\t\t\tErr(get_error())\n\n\t\t} else {\n\n\t\t\tOk(Surface { raw, owned: true })\n\n\t\t}\n\n\t}\n\n}\n\n\n\nmacro_rules! load_typed {\n\n\t($name: ident, $function: ident) => {\n\n\t\tpub fn $name(buffer: &[u8]) -> Result<Surface, String> {\n\n\t\t\tunsafe {\n\n\t\t\t\tlet mem =\n\n\t\t\t\t\tSDL_RWFromConstMem(buffer.as_ptr() as *const cty::c_void, buffer.len() as i32);\n\n\t\t\t\tif mem.is_null() {\n", "file_path": "ndless-sdl/src/image.rs", "rank": 33, "score": 168015.17792896126 }, { "content": "#[derive(Copy, Clone, PartialEq, PartialOrd, Debug)]\n\nenum State {\n\n\tPrefix = 0,\n\n\t// c:\n\n\tStartDir = 1,\n\n\t// / or . or nothing\n\n\tBody = 2,\n\n\t// foo/bar/baz\n\n\tDone = 3,\n\n}\n\n\n\n/// A structure wrapping a Windows path prefix as well as its unparsed string\n\n/// representation.\n\n///\n\n/// In addition to the parsed [`Prefix`] information returned by [`kind`],\n\n/// `PrefixComponent` also holds the raw and unparsed [`OsStr`] slice,\n\n/// returned by [`as_os_str`].\n\n///\n\n/// Instances of this `struct` can be obtained by matching against the\n\n/// [`Prefix` variant] on [`Component`].\n\n///\n", "file_path": "ndless/src/file_io/path.rs", "rank": 34, "score": 167243.80512938736 }, { "content": "pub fn get_event_state(ty: EventType) -> bool {\n\n\tunsafe { ll::SDL_EventState(ty as u8, ll::SDL_QUERY as c_int) == ll::SDL_ENABLE as u8 }\n\n}\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 35, "score": 164580.512371097 }, { "content": "struct Queue<T> {\n\n\tqueue: ArrayQueue<T>,\n\n\twaker: AtomicWaker,\n\n}\n\n\n", "file_path": "ndless-async/src/mpsc.rs", "rank": 36, "score": 158258.94106364128 }, { "content": "pub fn cvt_r<T, F>(mut f: F) -> crate::io::Result<T>\n\nwhere\n\n\tT: IsMinusOne,\n\n\tF: FnMut() -> T,\n\n{\n\n\tloop {\n\n\t\tmatch cvt(f()) {\n\n\t\t\tErr(ref e) if e.kind() == ErrorKind::Interrupted => {}\n\n\t\t\tother => return other,\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "ndless/src/file_io/sys.rs", "rank": 37, "score": 157078.97132133672 }, { "content": "#[doc(hidden)]\n\npub trait IntoInner<Inner> {\n\n\tfn into_inner(self) -> Inner;\n\n}\n\n\n\n/// A trait for creating std types from internal representations\n", "file_path": "ndless/src/file_io/sys_common.rs", "rank": 38, "score": 154997.01719634177 }, { "content": "#[doc(hidden)]\n\npub trait FromInner<Inner> {\n\n\tfn from_inner(inner: Inner) -> Self;\n\n}\n", "file_path": "ndless/src/file_io/sys_common.rs", "rank": 39, "score": 154997.01719634177 }, { "content": "// This uses an adaptive system to extend the vector when it fills. We want to\n\n// avoid paying to allocate and zero a huge chunk of memory if the reader only\n\n// has 4 bytes while still making large reads if the reader does have a ton\n\n// of data to return. Simply tacking on an extra DEFAULT_BUF_SIZE space every\n\n// time is 4,500 times (!) slower than a default reservation size of 32 if the\n\n// reader has a very small amount of data to return.\n\n//\n\n// Because we're extending the buffer with uninitialized data for trusted\n\n// readers, we need to make sure to truncate that if any of this panics.\n\nfn read_to_end<R: Read + ?Sized>(r: &mut R, buf: &mut Vec<u8>) -> Result<usize> {\n\n\tread_to_end_with_reservation(r, buf, 32)\n\n}\n\n\n", "file_path": "ndless/src/file_io/io.rs", "rank": 40, "score": 154281.92003120508 }, { "content": "/// ## WARNING\n\n///\n\n/// This **will** leak memory without careful planning, as it does not run any\n\n/// destructors! You need to make sure that all scopes end before calling this!\n\n/// You can use\n\n///\n\n/// ```rust\n\n/// fn main() {\n\n/// {\n\n/// // Main code\n\n/// let a = vec![5];\n\n/// }\n\n/// ndless::process::abort();\n\n/// }\n\n/// ```\n\n/// to ensure that no memory leaks.\n\npub fn abort() -> ! {\n\n\tunsafe { ndless_sys::abort() }\n\n}\n\n\n", "file_path": "ndless/src/bindings/process.rs", "rank": 41, "score": 153760.4494786179 }, { "content": "/// Trigger a breakpoint. If no debugger is connected (i.e. a physical\n\n/// calculator), the calculator will reset. This function will do nothing if\n\n/// compiled in release mode, allowing you to leave this in when compiling for\n\n/// an actual calculator.\n\npub fn bkpt() {\n\n\tif cfg!(debug_assertions) {\n\n\t\tunsafe { asm!(\".long 0xE1212374\") }\n\n\t}\n\n}\n\n\n", "file_path": "ndless/src/bindings/ndless.rs", "rank": 42, "score": 153743.4696506116 }, { "content": "#[doc(hidden)]\n\npub fn __init() {\n\n\tunsafe {\n\n\t\tif has_colors() {\n\n\t\t\tlet value = 0x900C0004 as *mut u32;\n\n\t\t\tlet control = 0x900C0008 as *mut u32;\n\n\t\t\tlet clock_source = 0x900C0080 as *mut u32;\n\n\t\t\twrite_volatile(clock_source, 0xA);\n\n\t\t\twrite_volatile(control, 0b10000010);\n\n\t\t\tSTART_VALUE = read_volatile(value);\n\n\t\t} else {\n\n\t\t\tlet value = 0x900C0000 as *mut u32;\n\n\t\t\tlet control = 0x900C0008 as *mut u32;\n\n\t\t\tlet divider = 0x900C0004 as *mut u32;\n\n\t\t\twrite_volatile(divider, 1);\n\n\t\t\twrite_volatile(control, 0b00001111);\n\n\t\t\twrite_volatile(value, 0);\n\n\t\t}\n\n\t\tinit_sleep();\n\n\t}\n\n}\n\n\n", "file_path": "ndless/src/bindings/timer.rs", "rank": 43, "score": 153743.4696506116 }, { "content": "/// Go to sleep until an interrupt occurs\n\npub fn idle() {\n\n\tunsafe { ndless_sys::idle() }\n\n}\n", "file_path": "ndless/src/bindings/hw.rs", "rank": 44, "score": 153743.4696506116 }, { "content": "pub fn quit() {\n\n\tunsafe {\n\n\t\tll::IMG_Quit();\n\n\t}\n\n}\n", "file_path": "ndless-sdl/src/image.rs", "rank": 45, "score": 153743.4696506116 }, { "content": "pub fn quit() {\n\n\tunsafe {\n\n\t\tll::SDL_Quit();\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/sdl.rs", "rank": 46, "score": 153743.4696506116 }, { "content": "/// Resets the sleep timer so it may be used normally.\n\npub fn disable_sleep() {\n\n\tunsafe {\n\n\t\tif has_colors() {\n\n\t\t\tlet control = 0x900D0008 as *mut u32;\n\n\t\t\tlet load = 0x900D0000 as *mut u32;\n\n\t\t\tlet _value = 0x900D0004 as *mut u32;\n\n\t\t\twrite_volatile(control, 0);\n\n\t\t\twrite_volatile(control, ORIG_CONTROL & 0b01111111);\n\n\t\t\twrite_volatile(load, ORIG_LOAD);\n\n\t\t\twrite_volatile(control, ORIG_CONTROL);\n\n\t\t} else {\n\n\t\t\tlet timer = 0x900D0000 as *mut u32;\n\n\t\t\tlet control = 0x900D0008 as *mut u32;\n\n\t\t\tlet divider = 0x900D0004 as *mut u32;\n\n\t\t\twrite_volatile(control, ORIG_CONTROL);\n\n\t\t\twrite_volatile(divider, ORIG_DIVIDER);\n\n\t\t\twrite_volatile(timer, 32);\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "ndless/src/bindings/timer.rs", "rank": 47, "score": 151432.81309411195 }, { "content": "/// Must be called at the end of a program that creates or deletes files,\n\n/// to update the OS document browser.\n\npub fn refresh_documents() {\n\n\tunsafe { ndless_sys::refresh_osscr() }\n\n}\n\n\n", "file_path": "ndless/src/bindings/ndless.rs", "rank": 48, "score": 151432.73874410967 }, { "content": "/// flush the data cache and invalidate the instruction and data caches of the\n\n/// processor. Should be called before loading code dynamically, after a code\n\n/// patch or with self-modifying code.\n\npub fn clear_cache() {\n\n\tunsafe { ndless_sys::clear_cache() }\n\n}\n\n\n\npub mod screen {\n\n\t/// Returned by [`lcd_type`]\n\n\t#[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)]\n\n\tpub enum Screen {\n\n\t\t/// 4bit grayscale. Native on classic calcs.\n\n\t\tScreen320x240x4,\n\n\t\t/// 8bit paletted mode.\n\n\t\tScreen320x240x8,\n\n\t\t/// RGB444\n\n\t\tScreen320x240x16,\n\n\t\t/// RGB565. Native on CX before HW-W\n\n\t\tScreen320x240x565,\n\n\t\t/// RGB565. Native on CX HW-W\n\n\t\tScreen240x320x565,\n\n\t\tScreen320x240x555,\n\n\t\tScreen240x320x555,\n", "file_path": "ndless/src/bindings/hw.rs", "rank": 49, "score": 151432.42984682688 }, { "content": "/// Creates a dialog box with two numerical inputs\n\npub fn msg_2numeric(\n\n\ttitle: &str,\n\n\tsubtitle: &str,\n\n\tmsg1: &str,\n\n\trange1: (i32, i32),\n\n\tmsg2: &str,\n\n\trange2: (i32, i32),\n\n) -> Option<(i32, i32)> {\n\n\tlet title = cstr!(title);\n\n\tlet subtitle = cstr!(subtitle);\n\n\tlet msg1 = cstr!(msg1);\n\n\tlet msg2 = cstr!(msg2);\n\n\tlet mut num1 = 0i32;\n\n\tlet mut num2 = 0i32;\n\n\tmatch unsafe {\n\n\t\tndless_sys::show_2numeric_input(\n\n\t\t\ttitle.as_ptr(),\n\n\t\t\tsubtitle.as_ptr(),\n\n\t\t\tmsg1.as_ptr(),\n\n\t\t\t&mut num1,\n", "file_path": "ndless/src/bindings/msg.rs", "rank": 50, "score": 151427.63776476777 }, { "content": "pub fn pump_events() {\n\n\tunsafe {\n\n\t\tll::SDL_PumpEvents();\n\n\t}\n\n}\n\n\n\n// TODO: peep_events (a tricky one but doable)\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 51, "score": 151427.63776476777 }, { "content": "pub fn clear_error() {\n\n\tunsafe {\n\n\t\tll::SDL_ClearError();\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/sdl.rs", "rank": 52, "score": 151427.63776476777 }, { "content": "pub fn iconify_window() {\n\n\tunsafe {\n\n\t\tll::SDL_WM_IconifyWindow();\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/wm.rs", "rank": 53, "score": 151427.63776476777 }, { "content": "pub fn swap_buffers() {\n\n\tunsafe {\n\n\t\tll::SDL_GL_SwapBuffers();\n\n\t}\n\n}\n\n\n\n// TODO: YUV\n", "file_path": "ndless-sdl/src/video.rs", "rank": 54, "score": 151427.63776476777 }, { "content": "pub fn swap_buffers() {\n\n\tunsafe {\n\n\t\tll::SDL_GL_SwapBuffers();\n\n\t}\n\n}\n", "file_path": "ndless-sdl/src/gl.rs", "rank": 55, "score": 151427.63776476777 }, { "content": "/// See\n\n/// [Hackspire](https://hackspire.org/index.php/Ndless_features_and_limitations#Resident_programs)\n\npub fn set_resident() {\n\n\tunsafe {\n\n\t\tif ndless_static_vars::PROGRAM_STATE == ndless_static_vars::ProgramState::Normal {\n\n\t\t\tndless_sys::nl_set_resident();\n\n\t\t\tndless_static_vars::ARGUMENTS = None;\n\n\t\t\tndless_static_vars::PROGRAM_STATE = ndless_static_vars::ProgramState::Resident;\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "ndless/src/bindings/ndless.rs", "rank": 56, "score": 151427.63776476777 }, { "content": "pub fn get_file(filename: impl AsRef<Path>, wanted_contents: &[u8]) -> Result<(bool, GenericPath)> {\n\n\tlet filename = filename.as_ref();\n\n\tif let Some(mut path) = dirs::home_dir() {\n\n\t\tpath.push(\".ndless\");\n\n\t\tpath.push(filename);\n\n\t\tif let Ok(mut file) = File::open(&path) {\n\n\t\t\tlet mut contents = vec![];\n\n\t\t\tif file.read_to_end(&mut contents).is_ok() && contents == wanted_contents {\n\n\t\t\t\treturn Ok((false, Box::new(path)));\n\n\t\t\t}\n\n\t\t\tdebug!(\"Updating {}...\", filename.display());\n\n\t\t}\n\n\t\tif create_dir_all(path.parent().unwrap()).is_ok() {\n\n\t\t\tif let Ok(mut file) = File::create(&path) {\n\n\t\t\t\tif file.write_all(wanted_contents).is_ok() {\n\n\t\t\t\t\treturn Ok((true, Box::new(path)));\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t\tdebug!(\n", "file_path": "cargo-ndless/src/files.rs", "rank": 57, "score": 150467.46081775596 }, { "content": "pub fn is_video_mode_ok(\n\n\tw: isize,\n\n\th: isize,\n\n\tbpp: isize,\n\n\tsurface_flags: &[SurfaceFlag],\n\n\tvideo_flags: &[VideoFlag],\n\n) -> Option<isize> {\n\n\tlet flags = surface_flags\n\n\t\t.iter()\n\n\t\t.fold(0u32, |flags, &flag| flags | flag as u32);\n\n\tlet flags = video_flags\n\n\t\t.iter()\n\n\t\t.fold(flags, |flags, &flag| flags | flag as u32);\n\n\n\n\tunsafe {\n\n\t\tlet bpp = ll::SDL_VideoModeOK(w as c_int, h as c_int, bpp as c_int, flags);\n\n\n\n\t\tif bpp == 0 {\n\n\t\t\tNone\n\n\t\t} else {\n", "file_path": "ndless-sdl/src/video.rs", "rank": 58, "score": 149227.35301594157 }, { "content": "pub fn toggle_grab_input() {\n\n\tunsafe {\n\n\t\tif ll::SDL_WM_GrabInput(GrabMode::Query as i32) == GrabMode::On as i32 {\n\n\t\t\tll::SDL_WM_GrabInput(GrabMode::Off as i32);\n\n\t\t} else {\n\n\t\t\tll::SDL_WM_GrabInput(GrabMode::On as i32);\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/wm.rs", "rank": 59, "score": 149227.35301594157 }, { "content": "pub fn set_video_mode(\n\n\tw: isize,\n\n\th: isize,\n\n\tbpp: isize,\n\n\tsurface_flags: &[SurfaceFlag],\n\n\tvideo_flags: &[VideoFlag],\n\n) -> Result<Surface, String> {\n\n\tlet flags = surface_flags\n\n\t\t.iter()\n\n\t\t.fold(0u32, |flags, &flag| flags | flag as u32);\n\n\tlet flags = video_flags\n\n\t\t.iter()\n\n\t\t.fold(flags, |flags, &flag| flags | flag as u32);\n\n\n\n\tunsafe {\n\n\t\tlet raw = ll::SDL_SetVideoMode(w as c_int, h as c_int, bpp as c_int, flags);\n\n\n\n\t\tif raw.is_null() {\n\n\t\t\tErr(get_error())\n\n\t\t} else {\n\n\t\t\tOk(wrap_surface(raw, false))\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/video.rs", "rank": 60, "score": 149227.35301594157 }, { "content": "pub fn toggle_cursor_visible() {\n\n\tunsafe {\n\n\t\tif ll::SDL_ShowCursor(ll::SDL_QUERY) == ll::SDL_ENABLE {\n\n\t\t\tll::SDL_ShowCursor(ll::SDL_DISABLE);\n\n\t\t} else {\n\n\t\t\tll::SDL_ShowCursor(ll::SDL_ENABLE);\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/mouse.rs", "rank": 61, "score": 149227.35301594157 }, { "content": "pub fn parse_prefix(_: &OsStr) -> Option<Prefix<'_>> {\n\n\tNone\n\n}\n\n\n\npub const MAIN_SEP_STR: &str = \"/\";\n\npub const MAIN_SEP: char = '/';\n", "file_path": "ndless/src/file_io/sys/path.rs", "rank": 62, "score": 148023.96417485102 }, { "content": "#[doc(hidden)]\n\npub trait AsInner<Inner: ?Sized> {\n\n\tfn as_inner(&self) -> &Inner;\n\n}\n\n\n\n/// A trait for viewing representations from std types\n", "file_path": "ndless/src/file_io/sys_common.rs", "rank": 63, "score": 147584.0514745536 }, { "content": "/// Returns the arguments which this program was started with.\n\n///\n\n/// The first element is traditionally the path of the executable, but it can be\n\n/// set to arbitrary text, and may not even exist. This means this property\n\n/// should not be relied upon for security purposes.\n\n///\n\n/// On Unix systems shell usually expands unquoted arguments with glob patterns\n\n/// (such as `*` and `?`).\n\n///\n\n/// # Panics\n\n///\n\n/// The returned iterator will panic during iteration if any argument to the\n\n/// process is not valid unicode.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use ndless::env;\n\n///\n\n/// // Prints each argument on a separate line\n\n/// for argument in env::args() {\n\n/// println!(\"{}\", argument);\n\n/// }\n\n/// ```\n\npub fn args() -> Args {\n\n\tunsafe { &crate::ARGUMENTS }\n\n\t\t.map(|args| {\n\n\t\t\targs.iter()\n\n\t\t\t\t.map(|arg| {\n\n\t\t\t\t\tunsafe { CStr::from_ptr(*arg) }\n\n\t\t\t\t\t\t.to_str()\n\n\t\t\t\t\t\t.unwrap()\n\n\t\t\t\t\t\t.to_string()\n\n\t\t\t\t})\n\n\t\t\t\t.collect::<Vec<_>>()\n\n\t\t})\n\n\t\t.unwrap_or_default()\n\n\t\t.into_iter()\n\n}\n\n\n", "file_path": "ndless/src/bindings/env.rs", "rank": 64, "score": 144756.00283040575 }, { "content": "/// TRUE on a TI-Nspire Touchpad or on a TI-Nspire CX.\n\npub fn has_touchpad() -> bool {\n\n\tunsafe { ndless_sys::_is_touchpad() > 0 }\n\n}\n\n\n", "file_path": "ndless/src/bindings/hw.rs", "rank": 65, "score": 144753.0947430896 }, { "content": "/// since Ndless v3.1. TRUE if the device has a screen in color.\n\npub fn has_colors() -> bool {\n\n\t!is_classic()\n\n}\n\n\n", "file_path": "ndless/src/bindings/hw.rs", "rank": 66, "score": 144753.0947430896 }, { "content": "/// since Ndless v3.1 r863. TRUE on TI-Nspire CM/CM-C.\n\npub fn is_cm() -> bool {\n\n\thw_subtype() == 1\n\n}\n\n\n", "file_path": "ndless/src/bindings/hw.rs", "rank": 67, "score": 144752.93211966494 }, { "content": "/// since Ndless v3.1. TRUE on classic TI-Nspire.\n\n///\n\n/// This is the preferred way to check CX/CM-specific features.\n\npub fn is_classic() -> bool {\n\n\tunsafe { ndless_sys::hwtype() < 1 }\n\n}\n\n\n", "file_path": "ndless/src/bindings/hw.rs", "rank": 68, "score": 144752.7302183217 }, { "content": "pub fn is_startup() -> bool {\n\n\tunsafe { ndless_sys::nl_isstartup() > 0 }\n\n}\n\n\n", "file_path": "ndless/src/bindings/ndless.rs", "rank": 69, "score": 144747.80064133878 }, { "content": "pub fn hw_type() -> Type {\n\n\tmatch unsafe { ndless_sys::hwtype() } {\n\n\t\t0 => Type::Nspire,\n\n\t\t1 => Type::NspireCX,\n\n\t\tfuture => Type::Future(future),\n\n\t}\n\n}\n\n\n", "file_path": "ndless/src/bindings/hw.rs", "rank": 70, "score": 142547.5158925126 }, { "content": "pub fn stdout() -> Stdout {\n\n\tStdout {\n\n\t\tinner: super::sys::stdio::Stdout::new(),\n\n\t}\n\n}\n\n\n\nimpl Write for Stdout {\n\n\tfn write(&mut self, buf: &[u8]) -> Result<usize> {\n\n\t\tself.inner.write(buf)\n\n\t}\n\n\n\n\tfn flush(&mut self) -> Result<()> {\n\n\t\tself.inner.flush()\n\n\t}\n\n}\n\n\n", "file_path": "ndless/src/file_io/io.rs", "rank": 71, "score": 142547.5158925126 }, { "content": "pub fn get_error() -> String {\n\n\tunsafe {\n\n\t\tlet cstr = ll::SDL_GetError() as *const cty::c_char;\n\n\t\tlet slice = CStr::from_ptr(cstr).to_bytes();\n\n\n\n\t\tstr::from_utf8(slice).unwrap().to_string()\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/sdl.rs", "rank": 72, "score": 142547.5158925126 }, { "content": "pub fn wait_event() -> Event {\n\n\tlet mut raw = null_event();\n\n\tlet success = unsafe { ll::SDL_WaitEvent(&mut raw) == 1 as c_int };\n\n\n\n\tif success {\n\n\t\twrap_event(raw)\n\n\t} else {\n\n\t\tEvent::None\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 73, "score": 142547.5158925126 }, { "content": "pub fn is_unicode_enabled() -> bool {\n\n\tunsafe { ll::SDL_EnableUNICODE(-1 as c_int) == 1 }\n\n}\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 74, "score": 142547.5158925126 }, { "content": "pub fn get_cursor() -> Cursor {\n\n\tunsafe { wrap_cursor(ll::SDL_GetCursor(), false) }\n\n}\n\n\n", "file_path": "ndless-sdl/src/mouse.rs", "rank": 75, "score": 142547.5158925126 }, { "content": "pub fn is_cursor_visible() -> bool {\n\n\tunsafe { ll::SDL_ShowCursor(ll::SDL_QUERY) == ll::SDL_ENABLE }\n\n}\n", "file_path": "ndless-sdl/src/mouse.rs", "rank": 76, "score": 142547.5158925126 }, { "content": "pub fn is_grabbing_input() -> bool {\n\n\tunsafe { ll::SDL_WM_GrabInput(GrabMode::Query as i32) == GrabMode::On as i32 }\n\n}\n\n\n\n// TODO: get_wm_info\n", "file_path": "ndless-sdl/src/wm.rs", "rank": 77, "score": 142547.5158925126 }, { "content": "fn wrap_key(i: ll::SDLKey) -> Option<Key> {\n\n\tFromPrimitive::from_usize(i as usize)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Copy, Clone)]\n\npub enum Mod {\n\n\tNone = 0x0000,\n\n\tLShift = 0x0001,\n\n\tRShift = 0x0002,\n\n\tLCtrl = 0x0040,\n\n\tRCtrl = 0x0080,\n\n\tLAlt = 0x0100,\n\n\tRAlt = 0x0200,\n\n\tLMeta = 0x0400,\n\n\tRMeta = 0x0800,\n\n\tNum = 0x1000,\n\n\tCaps = 0x2000,\n\n\tMode = 0x4000,\n\n\tReserved = 0x8000,\n\n}\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 78, "score": 142270.3186655884 }, { "content": "/// Constructs a new handle to an empty reader.\n\n///\n\n/// All reads from the returned reader will return [`Ok`]`(0)`.\n\n///\n\n/// [`Ok`]: ../result/enum.Result.html#variant.Ok\n\n///\n\n/// # Examples\n\n///\n\n/// A slightly sad example of not reading anything into a buffer:\n\n///\n\n/// ```\n\n/// use std::io::{self, Read};\n\n///\n\n/// let mut buffer = String::new();\n\n/// io::empty().read_to_string(&mut buffer).unwrap();\n\n/// assert!(buffer.is_empty());\n\n/// ```\n\npub fn empty() -> Empty {\n\n\tEmpty { _priv: () }\n\n}\n\n\n\nimpl Read for Empty {\n\n\t#[inline]\n\n\tfn read(&mut self, _buf: &mut [u8]) -> io::Result<usize> {\n\n\t\tOk(0)\n\n\t}\n\n\n\n\t#[inline]\n\n\tunsafe fn initializer(&self) -> Initializer {\n\n\t\tInitializer::nop()\n\n\t}\n\n}\n\nimpl BufRead for Empty {\n\n\t#[inline]\n\n\tfn fill_buf(&mut self) -> io::Result<&[u8]> {\n\n\t\tOk(&[])\n\n\t}\n", "file_path": "ndless/src/file_io/io/util.rs", "rank": 79, "score": 140479.14625356553 }, { "content": "/// Creates an instance of a writer which will successfully consume all data.\n\n///\n\n/// All calls to `write` on the returned instance will return `Ok(buf.len())`\n\n/// and the contents of the buffer will not be inspected.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// use std::io::{self, Write};\n\n///\n\n/// let buffer = vec![1, 2, 3, 5, 8];\n\n/// let num_bytes = io::sink().write(&buffer).unwrap();\n\n/// assert_eq!(num_bytes, 5);\n\n/// ```\n\npub fn sink() -> Sink {\n\n\tSink { _priv: () }\n\n}\n\n\n\nimpl Write for Sink {\n\n\t#[inline]\n\n\tfn write(&mut self, buf: &[u8]) -> io::Result<usize> {\n\n\t\tOk(buf.len())\n\n\t}\n\n\n\n\t#[inline]\n\n\tfn write_vectored(&mut self, bufs: &[IoSlice<'_>]) -> io::Result<usize> {\n\n\t\tlet total_len = bufs.iter().map(|b| b.len()).sum();\n\n\t\tOk(total_len)\n\n\t}\n\n\n\n\t#[inline]\n\n\tfn flush(&mut self) -> io::Result<()> {\n\n\t\tOk(())\n\n\t}\n", "file_path": "ndless/src/file_io/io/util.rs", "rank": 80, "score": 140472.14269264604 }, { "content": "/// return true if a third-party Launcher was used to boot the OS, such as\n\n/// nLaunch/nLaunchy\n\npub fn third_party_loader() -> bool {\n\n\tunsafe { ndless_sys::nl_loaded_by_3rd_party_loader() > 0 }\n\n}\n", "file_path": "ndless/src/bindings/ndless.rs", "rank": 81, "score": 140469.55330490274 }, { "content": "/// Returns the platform-specific value of errno\n\npub fn errno() -> i32 {\n\n\tunsafe { (*errno_location()) as i32 }\n\n}\n\n\n", "file_path": "ndless/src/file_io/sys/os.rs", "rank": 82, "score": 140459.7345952555 }, { "content": "pub fn get_video_info() -> VideoInfo {\n\n\tlet raw = unsafe { ll::SDL_GetVideoInfo() };\n\n\tVideoInfo {\n\n\t\tflags: wrap_video_info_flags(unsafe { (*raw).flags } as u32),\n\n\t\twidth: unsafe { (*raw).current_w } as isize,\n\n\t\theight: unsafe { (*raw).current_h } as isize,\n\n\t\tformat: wrap_pixel_format(unsafe { (*raw).vfmt }),\n\n\t}\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub enum PaletteType {\n\n\tLogical = 1,\n\n\tPhysical,\n\n}\n\n\n", "file_path": "ndless-sdl/src/video.rs", "rank": 83, "score": 138460.64072342025 }, { "content": "pub trait Error: Debug + Display {\n\n\t/// **This method is soft-deprecated.**\n\n\t///\n\n\t/// Although using it won’t cause compilation warning,\n\n\t/// new code should use [`Display`] instead\n\n\t/// and new `impl`s can omit it.\n\n\t///\n\n\t/// To obtain error description as a string, use `to_string()`.\n\n\t///\n\n\t/// # Examples\n\n\t///\n\n\t/// ```\n\n\t/// match \"xc\".parse::<u32>() {\n\n\t/// Err(e) => {\n\n\t/// // Print `e` itself, not `e.description()`.\n\n\t/// println!(\"Error: {}\", e);\n\n\t/// }\n\n\t/// _ => println!(\"No error\"),\n\n\t/// }\n\n\t/// ```\n", "file_path": "ndless/src/file_io/error.rs", "rank": 84, "score": 137548.83151686948 }, { "content": "/// ## WARNING\n\n///\n\n/// This **will** leak memory without careful planning, as it does not run any\n\n/// destructors! You need to make sure that all scopes end before calling this!\n\n/// You can either use\n\n///\n\n/// ```rust\n\n/// fn main() {\n\n/// {\n\n/// // Main code\n\n/// let a = vec![5];\n\n/// }\n\n/// ndless::process::exit(1);\n\n/// }\n\n/// ```\n\n/// or\n\n/// ```rust\n\n/// fn main() {\n\n/// ndless::process::exit({\n\n/// // Main code\n\n/// \t let a = vec![5];\n\n/// 0\n\n/// });\n\n/// }\n\n/// ```\n\n/// to ensure that no memory leaks.\n\npub fn exit(code: i32) -> ! {\n\n\tunsafe { ndless_sys::exit(code) }\n\n}\n\n\n", "file_path": "ndless/src/bindings/process.rs", "rank": 85, "score": 137162.16462318288 }, { "content": "struct Dir(*mut libc::c_void);\n\n\n\nunsafe impl Send for Dir {}\n\n\n\nunsafe impl Sync for Dir {}\n\n\n\npub struct DirEntry {\n\n\tname: CString,\n\n\tdir: ReadDir,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct OpenOptions {\n\n\t// generic\n\n\tread: bool,\n\n\twrite: bool,\n\n\tappend: bool,\n\n\ttruncate: bool,\n\n\tcreate: bool,\n\n\tcreate_new: bool,\n", "file_path": "ndless/src/file_io/sys/fs.rs", "rank": 86, "score": 136748.35662506692 }, { "content": "pub fn set_cursor(cursor: &Cursor) {\n\n\tunsafe {\n\n\t\tll::SDL_SetCursor(cursor.raw);\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/mouse.rs", "rank": 87, "score": 135051.010791081 }, { "content": "pub fn get_caption() -> (String, String) {\n\n\tlet mut title_buf = ptr::null_mut();\n\n\tlet mut icon_buf = ptr::null_mut();\n\n\tlet mut title = String::new();\n\n\tlet mut icon = String::new();\n\n\n\n\tunsafe {\n\n\t\tll::SDL_WM_GetCaption(&mut title_buf, &mut icon_buf);\n\n\n\n\t\tif !title_buf.is_null() {\n\n\t\t\tlet slice = CStr::from_ptr(mem::transmute_copy(&title_buf)).to_bytes();\n\n\t\t\ttitle = str::from_utf8(slice).unwrap().to_string();\n\n\t\t}\n\n\n\n\t\tif !icon_buf.is_null() {\n\n\t\t\tlet slice = CStr::from_ptr(mem::transmute_copy(&icon_buf)).to_bytes();\n\n\t\t\ticon = str::from_utf8(slice).unwrap().to_string();\n\n\t\t}\n\n\n\n\t\t(title, icon)\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/wm.rs", "rank": 88, "score": 135051.010791081 }, { "content": "pub fn set_error(err: &str) {\n\n\tunsafe {\n\n\t\tll::SDL_SetError(CString::new(err.as_bytes()).unwrap().as_ptr());\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/sdl.rs", "rank": 89, "score": 135051.010791081 }, { "content": "pub fn enable_unicode(enable: bool) {\n\n\tunsafe {\n\n\t\tll::SDL_EnableUNICODE(enable as c_int);\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/event.rs", "rank": 90, "score": 135051.010791081 }, { "content": "pub fn quit_subsystem(flags: &[InitFlag]) {\n\n\tlet flags = flags\n\n\t\t.iter()\n\n\t\t.fold(0u32, |flags, &flag| flags | flag as ll::SDL_InitFlag);\n\n\n\n\tunsafe {\n\n\t\tll::SDL_QuitSubSystem(flags);\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/sdl.rs", "rank": 91, "score": 133057.31048745813 }, { "content": "pub fn set_cursor_visible(visible: bool) {\n\n\tunsafe {\n\n\t\tll::SDL_ShowCursor(visible as c_int);\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/mouse.rs", "rank": 92, "score": 133057.31048745813 }, { "content": "pub fn set_errno(e: i32) {\n\n\tunsafe { *errno_location() = e as c_int }\n\n}\n\n\n", "file_path": "ndless/src/file_io/sys/os.rs", "rank": 93, "score": 133057.31048745813 }, { "content": "pub fn grab_input(mode: GrabMode) {\n\n\tunsafe {\n\n\t\tll::SDL_WM_GrabInput(mode as i32);\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/wm.rs", "rank": 94, "score": 133057.31048745813 }, { "content": "pub fn set_error_from_code(err: Error) {\n\n\tunsafe { ll::SDL_Error(err as ll::SDL_errorcode) }\n\n}\n\n\n", "file_path": "ndless-sdl/src/sdl.rs", "rank": 95, "score": 133057.31048745813 }, { "content": "pub fn is_separator(c: char) -> bool {\n\n\tc.is_ascii() && is_sep_byte(c as u8)\n\n}\n\n\n\n/// The primary separator of path components for the current platform.\n\n///\n\n/// For example, `/` on Unix and `\\` on Windows.\n\n\n\npub const MAIN_SEPARATOR: char = crate::file_io::sys::path::MAIN_SEP;\n\n\n\n////////////////////////////////////////////////////////////////////////////////\n\n// Misc helpers\n\n////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "ndless/src/file_io/path.rs", "rank": 96, "score": 130590.19256671018 }, { "content": "struct TaskWaker {\n\n\twake_marker: AtomicBool,\n\n}\n\n\n\nimpl TaskWaker {\n\n\tfn wake_task(&self) {\n\n\t\tself.wake_marker.store(true, Ordering::Relaxed);\n\n\t}\n\n}\n\n\n\nimpl Wake for TaskWaker {\n\n\tfn wake(self: Arc<Self>) {\n\n\t\tself.wake_task();\n\n\t}\n\n\n\n\tfn wake_by_ref(self: &Arc<Self>) {\n\n\t\tself.wake_task();\n\n\t}\n\n}\n\n\n", "file_path": "ndless-async/src/task.rs", "rank": 97, "score": 129489.71215171836 }, { "content": "pub fn init(flags: &[InitFlag]) -> bool {\n\n\tunsafe {\n\n\t\tll::SDL_Init(\n\n\t\t\tflags\n\n\t\t\t\t.iter()\n\n\t\t\t\t.fold(0u32, |flags, &flag| flags | flag as ll::SDL_InitFlag),\n\n\t\t) == 0\n\n\t}\n\n}\n\n\n", "file_path": "ndless-sdl/src/sdl.rs", "rank": 98, "score": 128596.49226308732 }, { "content": "struct WakerData {\n\n\twaker: AtomicWaker,\n\n\tdone: Cell<bool>,\n\n}\n\n\n\n#[derive(Default)]\n\npub(crate) struct YieldListener {\n\n\twakers: RefCell<Vec<Rc<WakerData>>>,\n\n}\n\n\n\nimpl YieldListener {\n\n\tpub(crate) fn poll(&self) {\n\n\t\tlet mut wakers = self.wakers.borrow_mut();\n\n\t\twakers.retain(|waker| Rc::strong_count(waker) > 1);\n\n\t\twakers.iter_mut().for_each(|waker| {\n\n\t\t\twaker.done.set(true);\n\n\t\t\twaker.waker.wake();\n\n\t\t})\n\n\t}\n\n\tpub(crate) fn yield_now(&self) -> Yield {\n", "file_path": "ndless-async/src/yield_now.rs", "rank": 99, "score": 127137.83714828768 } ]
Rust
reql/src/cmd/do_.rs
kid/rethinkdb-rs
945e161aec8288fa4f915bc59bc0575438a1d4ae
use super::args::Args; use crate::{cmd, Command, Func}; use ql2::term::TermType; use serde::Serialize; pub trait Arg { fn arg(self, parent: Option<Command>) -> cmd::Arg<()>; } impl Arg for Command { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let cmd = Command::new(TermType::Funcall).with_arg(self); match parent { Some(parent) => cmd.with_arg(parent).into_arg(), None => cmd.into_arg(), } } } impl<T> Arg for T where T: Serialize, { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { Command::from_json(self).arg(parent) } } impl Arg for Args<(Command, Command)> { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((arg, expr)) = self; expr.arg(parent).with_arg(arg) } } #[allow(array_into_iter)] #[allow(clippy::into_iter_on_ref)] impl<const N: usize> Arg for Args<([Command; N], Command)> { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((args, expr)) = self; let mut cmd = expr.arg(parent); for arg in args.into_iter().cloned() { cmd = cmd.with_arg(arg); } cmd } } #[allow(array_into_iter)] #[allow(clippy::into_iter_on_ref)] impl<T, const N: usize> Arg for Args<([T; N], Command)> where T: Serialize + Clone, { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((args, expr)) = self; let mut cmd = expr.arg(parent); for arg in args.into_iter().cloned() { let arg = Command::from_json(arg); cmd = cmd.with_arg(arg); } cmd } } impl Arg for Args<(Command, Func)> { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((arg, Func(func))) = self; func.arg(parent).with_arg(arg) } } #[allow(array_into_iter)] #[allow(clippy::into_iter_on_ref)] impl<const N: usize> Arg for Args<([Command; N], Func)> { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((args, Func(func))) = self; let mut cmd = func.arg(parent); for arg in args.into_iter().cloned() { cmd = cmd.with_arg(arg); } cmd } } #[allow(array_into_iter)] #[allow(clippy::into_iter_on_ref)] impl<T, const N: usize> Arg for Args<([T; N], Func)> where T: Serialize + Clone, { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((args, Func(func))) = self; let mut cmd = func.arg(parent); for arg in args.into_iter().cloned() { let arg = Command::from_json(arg); cmd = cmd.with_arg(arg); } cmd } } impl Arg for Func { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Func(func) = self; func.arg(parent) } } #[cfg(test)] mod tests { use crate::{self as reql, cmd, func, r}; #[test] fn r_do() { let counter = crate::current_counter(); let query = r.do_(r.args(([10, 20], func!(|x, y| x + y)))); let serialised = cmd::serialise(&query); let expected = format!( r#"[64,[[69,[[2,[2,3]],[24,[[10,[{}]],[10,[{}]]]]]],10,20]]"#, counter, counter + 1 ); assert_eq!(serialised, expected); } #[test] fn r_db_table_get_do() { let counter = crate::current_counter(); let query = r .db("mydb") .table("table1") .get("[email protected]") .do_(func!(|doc| r .db("mydb") .table("table2") .get(doc.get_field("id")))); let serialised = cmd::serialise(&query); let expected = format!( r#"[64,[[69,[[2,[1]],[16,[[15,[[14,["mydb"]],"table2"]],[31,[[10,[{}]],"id"]]]]]],[16,[[15,[[14,["mydb"]],"table1"]],"[email protected]"]]]]"#, counter ); assert_eq!(serialised, expected); } }
use super::args::Args; use crate::{cmd, Command, Func}; use ql2::term::TermType; use serde::Serialize; pub trait Arg { fn arg(self, parent: Option<Command>) -> cmd::Arg<()>; } impl Arg for Command { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let cmd = Command::new(TermType::Funcall).with_arg(self);
} } impl<T> Arg for T where T: Serialize, { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { Command::from_json(self).arg(parent) } } impl Arg for Args<(Command, Command)> { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((arg, expr)) = self; expr.arg(parent).with_arg(arg) } } #[allow(array_into_iter)] #[allow(clippy::into_iter_on_ref)] impl<const N: usize> Arg for Args<([Command; N], Command)> { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((args, expr)) = self; let mut cmd = expr.arg(parent); for arg in args.into_iter().cloned() { cmd = cmd.with_arg(arg); } cmd } } #[allow(array_into_iter)] #[allow(clippy::into_iter_on_ref)] impl<T, const N: usize> Arg for Args<([T; N], Command)> where T: Serialize + Clone, { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((args, expr)) = self; let mut cmd = expr.arg(parent); for arg in args.into_iter().cloned() { let arg = Command::from_json(arg); cmd = cmd.with_arg(arg); } cmd } } impl Arg for Args<(Command, Func)> { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((arg, Func(func))) = self; func.arg(parent).with_arg(arg) } } #[allow(array_into_iter)] #[allow(clippy::into_iter_on_ref)] impl<const N: usize> Arg for Args<([Command; N], Func)> { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((args, Func(func))) = self; let mut cmd = func.arg(parent); for arg in args.into_iter().cloned() { cmd = cmd.with_arg(arg); } cmd } } #[allow(array_into_iter)] #[allow(clippy::into_iter_on_ref)] impl<T, const N: usize> Arg for Args<([T; N], Func)> where T: Serialize + Clone, { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Args((args, Func(func))) = self; let mut cmd = func.arg(parent); for arg in args.into_iter().cloned() { let arg = Command::from_json(arg); cmd = cmd.with_arg(arg); } cmd } } impl Arg for Func { fn arg(self, parent: Option<Command>) -> cmd::Arg<()> { let Func(func) = self; func.arg(parent) } } #[cfg(test)] mod tests { use crate::{self as reql, cmd, func, r}; #[test] fn r_do() { let counter = crate::current_counter(); let query = r.do_(r.args(([10, 20], func!(|x, y| x + y)))); let serialised = cmd::serialise(&query); let expected = format!( r#"[64,[[69,[[2,[2,3]],[24,[[10,[{}]],[10,[{}]]]]]],10,20]]"#, counter, counter + 1 ); assert_eq!(serialised, expected); } #[test] fn r_db_table_get_do() { let counter = crate::current_counter(); let query = r .db("mydb") .table("table1") .get("[email protected]") .do_(func!(|doc| r .db("mydb") .table("table2") .get(doc.get_field("id")))); let serialised = cmd::serialise(&query); let expected = format!( r#"[64,[[69,[[2,[1]],[16,[[15,[[14,["mydb"]],"table2"]],[31,[[10,[{}]],"id"]]]]]],[16,[[15,[[14,["mydb"]],"table1"]],"[email protected]"]]]]"#, counter ); assert_eq!(serialised, expected); } }
match parent { Some(parent) => cmd.with_arg(parent).into_arg(), None => cmd.into_arg(), }
if_condition
[ { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Or).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/or.rs", "rank": 0, "score": 216228.98313413086 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::ForEach).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/for_each.rs", "rank": 1, "score": 216228.98313413086 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::During).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/during.rs", "rank": 2, "score": 216228.98313413086 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for cmd::Arg<()> {\n\n fn arg(self) -> cmd::Arg<()> {\n\n self\n\n }\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::And).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/and.rs", "rank": 4, "score": 216228.98313413086 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Not).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/not.rs", "rank": 5, "score": 216228.98313413086 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Branch).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for Args<(Command, Command, Command)> {\n\n fn arg(self) -> cmd::Arg<()> {\n\n let Args((test, true_action, false_action)) = self;\n\n test.arg().with_arg(true_action).with_arg(false_action)\n\n }\n\n}\n\n\n\n#[allow(array_into_iter)]\n\n#[allow(clippy::into_iter_on_ref)]\n\nimpl<const N: usize> Arg for Args<([(Command, Command); N], Command)> {\n", "file_path": "reql/src/cmd/branch.rs", "rank": 6, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n", "file_path": "reql/src/cmd/skip.rs", "rank": 7, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for cmd::Arg<()> {\n\n fn arg(self) -> cmd::Arg<()> {\n\n self\n\n }\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Serialize,\n\n{\n\n fn arg(self) -> cmd::Arg<()> {\n\n let arg = Command::from_json(self);\n\n Command::new(TermType::Eq).with_arg(arg).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/eq.rs", "rank": 8, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::OrderBy).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for Desc {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::new(TermType::OrderBy).with_arg(self.0).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for Asc {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::new(TermType::OrderBy).with_arg(self.0).into_arg()\n\n }\n", "file_path": "reql/src/cmd/order_by.rs", "rank": 9, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for cmd::Arg<()> {\n\n fn arg(self) -> cmd::Arg<()> {\n\n self\n\n }\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Append).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/append.rs", "rank": 10, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Pluck).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/pluck.rs", "rank": 11, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Into<String>,\n\n{\n\n fn arg(self) -> cmd::Arg<()> {\n\n let cmd = Command::from_json(self.into());\n\n Command::from_json(Inner { index: Query(&cmd) }).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for Asc {\n\n fn arg(self) -> cmd::Arg<()> {\n\n let Asc(index) = self;\n\n Command::from_json(Inner {\n\n index: Query(&index),\n\n })\n", "file_path": "reql/src/cmd/index.rs", "rank": 12, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn into_run_opts(self) -> Result<(Connection, Options)>;\n\n}\n\n\n\nimpl Arg for &Session {\n\n fn into_run_opts(self) -> Result<(Connection, Options)> {\n\n let conn = self.connection()?;\n\n Ok((conn, Default::default()))\n\n }\n\n}\n\n\n\nimpl Arg for Connection {\n\n fn into_run_opts(self) -> Result<(Connection, Options)> {\n\n Ok((self, Default::default()))\n\n }\n\n}\n\n\n\nimpl Arg for Args<(&Session, Options)> {\n\n fn into_run_opts(self) -> Result<(Connection, Options)> {\n\n let Args((session, options)) = self;\n", "file_path": "reql/src/cmd/run.rs", "rank": 13, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Slice).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/slice.rs", "rank": 14, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<Options>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n Self::new(TermType::GetAll).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Into<String>,\n\n{\n\n fn arg(self) -> cmd::Arg<Options> {\n\n Command::from_json(self.into()).arg()\n\n }\n\n}\n\n\n\nimpl Arg for Args<(&str, Options)> {\n", "file_path": "reql/src/cmd/get_all.rs", "rank": 15, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::InsertAt).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/insert_at.rs", "rank": 16, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Grant).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/grant.rs", "rank": 17, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::OffsetsOf).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/offsets_of.rs", "rank": 18, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Min).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/min.rs", "rank": 19, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Ceil).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/ceil.rs", "rank": 20, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Difference).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/difference.rs", "rank": 21, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Sample).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/sample.rs", "rank": 22, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Default).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/default.rs", "rank": 23, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Reduce).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/reduce.rs", "rank": 24, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Max).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/max.rs", "rank": 25, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Merge).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/merge.rs", "rank": 26, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Literal).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/literal.rs", "rank": 27, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<Options>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n Self::new(TermType::Table).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Into<String>,\n\n{\n\n fn arg(self) -> cmd::Arg<Options> {\n\n Command::from_json(self.into()).arg()\n\n }\n\n}\n\n\n\nimpl Arg for Args<(Command, Options)> {\n", "file_path": "reql/src/cmd/table.rs", "rank": 28, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Binary).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for Binary {\n\n fn arg(self) -> cmd::Arg<()> {\n\n r.expr(self).arg()\n\n }\n\n}\n\n\n\nimpl Arg for &[u8] {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Binary::new(self).arg()\n\n }\n", "file_path": "reql/src/cmd/binary.rs", "rank": 29, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Iso8601).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/iso8601.rs", "rank": 30, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Round).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/round.rs", "rank": 31, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::new(TermType::Get).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Serialize,\n\n{\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::from_json(self).arg()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "reql/src/cmd/get.rs", "rank": 32, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Javascript).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/js.rs", "rank": 33, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::CoerceTo).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/coerce_to.rs", "rank": 34, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Lt).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/lt.rs", "rank": 35, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Desc).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Into<String>,\n\n{\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::from_json(self.into()).arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/desc.rs", "rank": 36, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<Options>;\n\n}\n\n\n\nimpl Arg for () {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n Command::new(TermType::Changes)\n\n .mark_change_feed()\n\n .into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for Options {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n ().arg().with_opts(self)\n\n }\n\n}\n", "file_path": "reql/src/cmd/changes.rs", "rank": 37, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Line).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/line.rs", "rank": 38, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Floor).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/floor.rs", "rank": 39, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::IsEmpty).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/is_empty.rs", "rank": 40, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Downcase).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/downcase.rs", "rank": 41, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::SpliceAt).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/splice_at.rs", "rank": 42, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Error).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Into<String>,\n\n{\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::from_json(self.into()).arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/error.rs", "rank": 43, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for cmd::Arg<()> {\n\n fn arg(self) -> cmd::Arg<()> {\n\n self\n\n }\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Asc).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Into<String>,\n\n{\n", "file_path": "reql/src/cmd/asc.rs", "rank": 44, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Serialize,\n\n{\n\n fn arg(self) -> cmd::Arg<()> {\n\n let arg = Command::from_json(self);\n\n Command::new(TermType::HasFields).with_arg(arg).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/has_fields.rs", "rank": 45, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Intersects).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/intersects.rs", "rank": 46, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for cmd::Arg<()> {\n\n fn arg(self) -> cmd::Arg<()> {\n\n self\n\n }\n\n}\n\n\n\nimpl Arg for () {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::new(TermType::Uuid).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n ().arg().with_arg(self)\n\n }\n", "file_path": "reql/src/cmd/uuid.rs", "rank": 47, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Distinct).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/distinct.rs", "rank": 48, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Fold).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/fold.rs", "rank": 49, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Sum).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/sum.rs", "rank": 50, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Split).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/split.rs", "rank": 51, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Polygon).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/polygon.rs", "rank": 52, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Range).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/range.rs", "rank": 53, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Serialize,\n\n{\n\n fn arg(self) -> cmd::Arg<()> {\n\n let arg = Command::from_json(self);\n\n Command::new(TermType::Bracket).with_arg(arg).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/bracket.rs", "rank": 54, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::BitAnd).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> BitAnd<T> for Command\n\nwhere\n\n T: Arg,\n\n{\n\n type Output = Self;\n\n\n\n fn bitand(self, arg: T) -> Self {\n\n arg.arg().with_parent(self).into_cmd()\n\n }\n\n}\n", "file_path": "reql/src/cmd/bit_and.rs", "rank": 55, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Serialize,\n\n{\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::from_json(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/expr.rs", "rank": 56, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for cmd::Arg<()> {\n\n fn arg(self) -> cmd::Arg<()> {\n\n self\n\n }\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::new(TermType::Add).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Serialize,\n\n{\n", "file_path": "reql/src/cmd/add.rs", "rank": 57, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Distance).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/distance.rs", "rank": 58, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Mod).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Rem<T> for Command\n\nwhere\n\n T: Arg,\n\n{\n\n type Output = Self;\n\n\n\n fn rem(self, arg: T) -> Self {\n\n arg.arg().with_parent(self).into_cmd()\n\n }\n\n}\n", "file_path": "reql/src/cmd/rem.rs", "rank": 59, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::InTimezone).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/in_timezone.rs", "rank": 60, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Object).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/object.rs", "rank": 61, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Nth).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for isize {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::from_json(self).arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/nth.rs", "rank": 62, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Ge).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/ge.rs", "rank": 63, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for cmd::Arg<()> {\n\n fn arg(self) -> cmd::Arg<()> {\n\n self\n\n }\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::new(TermType::Map).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for Func {\n\n fn arg(self) -> cmd::Arg<()> {\n\n let Func(func) = self;\n\n func.arg()\n", "file_path": "reql/src/cmd/map.rs", "rank": 64, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Limit).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for isize {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::from_json(self).arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/limit.rs", "rank": 65, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Includes).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/includes.rs", "rank": 66, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Json).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/json.rs", "rank": 67, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Random).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/random.rs", "rank": 68, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Count).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/count.rs", "rank": 69, "score": 212490.54844385578 }, { "content": "/// The arguments accepted by [crate::r::connect]\n\npub trait Arg {\n\n type ToAddrs: AsyncToSocketAddrs;\n\n\n\n fn into_connect_opts(self) -> (Option<Self::ToAddrs>, Options);\n\n}\n\n\n\nimpl Arg for () {\n\n type ToAddrs = SocketAddr;\n\n\n\n fn into_connect_opts(self) -> (Option<Self::ToAddrs>, Options) {\n\n (None, Default::default())\n\n }\n\n}\n\n\n\nimpl Arg for Options {\n\n type ToAddrs = SocketAddr;\n\n\n\n fn into_connect_opts(self) -> (Option<Self::ToAddrs>, Options) {\n\n (None, self)\n\n }\n", "file_path": "reql/src/cmd/connect.rs", "rank": 70, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Db).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Into<String>,\n\n{\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::from_json(self.into()).arg()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "reql/src/cmd/db.rs", "rank": 71, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Circle).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/circle.rs", "rank": 72, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Div).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Div<T> for Command\n\nwhere\n\n T: Arg,\n\n{\n\n type Output = Self;\n\n\n\n fn div(self, arg: T) -> Self {\n\n arg.arg().with_parent(self).into_cmd()\n\n }\n\n}\n", "file_path": "reql/src/cmd/div.rs", "rank": 73, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Info).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/info.rs", "rank": 74, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Sub).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Sub<T> for Command\n\nwhere\n\n T: Arg,\n\n{\n\n type Output = Self;\n\n\n\n fn sub(self, arg: T) -> Self {\n\n arg.arg().into_cmd().with_parent(self)\n\n }\n\n}\n", "file_path": "reql/src/cmd/sub.rs", "rank": 75, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<Options>;\n\n}\n\n\n\nimpl Arg for cmd::Arg<Options> {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n self\n\n }\n\n}\n\n\n\nimpl Arg for () {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n Command::new(TermType::Delete).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for Options {\n\n fn arg(self) -> cmd::Arg<Self> {\n\n ().arg().with_opts(self)\n\n }\n\n}\n", "file_path": "reql/src/cmd/delete.rs", "rank": 76, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Le).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/le.rs", "rank": 77, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n", "file_path": "reql/src/cmd/with_fields.rs", "rank": 78, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<Options>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n Self::new(TermType::Filter).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for Args<(Command, Options)> {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n let Args((arg, opts)) = self;\n\n arg.arg().with_opts(opts)\n\n }\n\n}\n\n\n\nimpl Arg for Func {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n let Func(arg) = self;\n", "file_path": "reql/src/cmd/filter.rs", "rank": 79, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Ne).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/ne.rs", "rank": 80, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::BitOr).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> BitOr<T> for Command\n\nwhere\n\n T: Arg,\n\n{\n\n type Output = Self;\n\n\n\n fn bitor(self, arg: T) -> Self {\n\n arg.arg().with_parent(self).into_cmd()\n\n }\n\n}\n", "file_path": "reql/src/cmd/bit_or.rs", "rank": 81, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Reconfigure).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/reconfigure.rs", "rank": 82, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Prepend).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/prepend.rs", "rank": 83, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::DeleteAt).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl Arg for i64 {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Command::from_json(self).arg()\n\n }\n\n}\n\n\n\nimpl Arg for Args<[i64; 2]> {\n\n fn arg(self) -> cmd::Arg<()> {\n\n let Args([offset, end_offset]) = self;\n\n Command::from_json(offset)\n\n .arg()\n\n .with_arg(Command::from_json(end_offset))\n\n }\n\n}\n", "file_path": "reql/src/cmd/delete_at.rs", "rank": 84, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Http).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/http.rs", "rank": 85, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Time).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/time.rs", "rank": 86, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Point).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/point.rs", "rank": 87, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::ChangeAt).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/change_at.rs", "rank": 88, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<Options>;\n\n}\n\n\n\nimpl Arg for cmd::Arg<Options> {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n self\n\n }\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n Command::new(TermType::Replace).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Serialize,\n\n{\n", "file_path": "reql/src/cmd/replace.rs", "rank": 89, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Avg).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/avg.rs", "rank": 90, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Mul).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Mul<T> for Command\n\nwhere\n\n T: Arg,\n\n{\n\n type Output = Self;\n\n\n\n fn mul(self, arg: T) -> Self {\n\n arg.arg().with_parent(self).into_cmd()\n\n }\n\n}\n", "file_path": "reql/src/cmd/mul.rs", "rank": 91, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Match).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/match_.rs", "rank": 92, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Group).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/group.rs", "rank": 93, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Gt).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/gt.rs", "rank": 94, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Contains).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/contains.rs", "rank": 95, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn noreply_wait(self) -> bool;\n\n}\n\n\n\nimpl Arg for () {\n\n fn noreply_wait(self) -> bool {\n\n true\n\n }\n\n}\n\n\n\nimpl Arg for SkipNoreplyWait {\n\n fn noreply_wait(self) -> bool {\n\n false\n\n }\n\n}\n", "file_path": "reql/src/cmd/close.rs", "rank": 96, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Geojson).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/geojson.rs", "rank": 97, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<Options>;\n\n}\n\n\n\nimpl Arg for cmd::Arg<Options> {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n self\n\n }\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<Options> {\n\n Command::new(TermType::Insert).with_arg(self).into_arg()\n\n }\n\n}\n\n\n\nimpl<T> Arg for T\n\nwhere\n\n T: Serialize,\n\n{\n", "file_path": "reql/src/cmd/insert.rs", "rank": 98, "score": 212490.54844385578 }, { "content": "pub trait Arg {\n\n fn arg(self) -> cmd::Arg<()>;\n\n}\n\n\n\nimpl Arg for Command {\n\n fn arg(self) -> cmd::Arg<()> {\n\n Self::new(TermType::Wait).with_arg(self).into_arg()\n\n }\n\n}\n", "file_path": "reql/src/cmd/wait.rs", "rank": 99, "score": 212490.54844385578 } ]
Rust
cranelift/codegen/src/isa/x64/abi.rs
jgouly/wasmtime-old
715ca4112a1bd187a32e8238e3bce2b5a2c9d635
#![allow(dead_code)] #![allow(non_snake_case)] use crate::ir; use crate::ir::types; use crate::ir::types::*; use crate::ir::StackSlot; use crate::ir::Type; use crate::isa; use crate::isa::x64::inst::*; use crate::isa::x64::*; use crate::machinst::*; use alloc::vec::Vec; use regalloc::{RealReg, Reg, RegClass, Set, SpillSlot, Writable}; #[derive(Clone, Debug)] enum ABIArg { Reg(RealReg), Stack, } #[derive(Clone, Debug)] enum ABIRet { Reg(RealReg), Mem, } pub struct X64ABIBody { args: Vec<ABIArg>, rets: Vec<ABIRet>, stackslots: Vec<usize>, stackslots_size: usize, clobbered: Set<Writable<RealReg>>, spillslots: Option<usize>, spill_area_sizeB: Option<usize>, call_conv: isa::CallConv, } fn in_int_reg(ty: types::Type) -> bool { match ty { types::I8 | types::I16 | types::I32 | types::I64 => true, types::B1 | types::B8 | types::B16 | types::B32 | types::B64 => true, _ => false, } } fn get_intreg_for_arg_ELF(idx: usize) -> Option<Reg> { match idx { 0 => Some(reg_RDI()), 1 => Some(reg_RSI()), 2 => Some(reg_RDX()), 3 => Some(reg_RCX()), 4 => Some(reg_R8()), 5 => Some(reg_R9()), _ => None, } } fn get_intreg_for_retval_ELF(idx: usize) -> Option<Reg> { match idx { 0 => Some(reg_RAX()), 1 => Some(reg_RDX()), _ => None, } } fn is_callee_save_ELF(r: RealReg) -> bool { match r.get_class() { RegClass::I64 => match r.get_hw_encoding() as u8 { ENC_RBX | ENC_RBP | ENC_R12 | ENC_R13 | ENC_R14 | ENC_R15 => true, _ => false, }, _ => unimplemented!(), } } fn get_callee_saves(regs: Vec<Writable<RealReg>>) -> Vec<Writable<RealReg>> { regs.into_iter() .filter(|r| is_callee_save_ELF(r.to_reg())) .collect() } impl X64ABIBody { pub fn new(f: &ir::Function) -> Self { println!("X64 ABI: func signature {:?}", f.signature); let mut args = vec![]; let mut next_int_arg = 0; for param in &f.signature.params { match param.purpose { ir::ArgumentPurpose::Normal => { if in_int_reg(param.value_type) { if let Some(reg) = get_intreg_for_arg_ELF(next_int_arg) { args.push(ABIArg::Reg(reg.to_real_reg())); } else { unimplemented!("passing arg on the stack"); } next_int_arg += 1; } else { unimplemented!("non int normal register") } } ir::ArgumentPurpose::VMContext => { debug_assert!(f.signature.call_conv.extends_baldrdash()); args.push(ABIArg::Reg(reg_R14().to_real_reg())); } _ => unimplemented!("other parameter purposes"), } } let mut rets = vec![]; let mut next_int_retval = 0; for ret in &f.signature.returns { match ret.purpose { ir::ArgumentPurpose::Normal => { if in_int_reg(ret.value_type) { if let Some(reg) = get_intreg_for_retval_ELF(next_int_retval) { rets.push(ABIRet::Reg(reg.to_real_reg())); } else { unimplemented!("passing return on the stack"); } next_int_retval += 1; } else { unimplemented!("returning non integer normal value"); } } _ => { unimplemented!("non normal argument purpose"); } } } let mut stack_offset: usize = 0; let mut stackslots = vec![]; for (stackslot, data) in f.stack_slots.iter() { let off = stack_offset; stack_offset += data.size as usize; stack_offset = (stack_offset + 7) & !7usize; assert_eq!(stackslot.as_u32() as usize, stackslots.len()); stackslots.push(off); } Self { args, rets, stackslots, stackslots_size: stack_offset, clobbered: Set::empty(), spillslots: None, spill_area_sizeB: None, call_conv: f.signature.call_conv.clone(), } } } impl ABIBody<Inst> for X64ABIBody { fn num_args(&self) -> usize { unimplemented!() } fn num_retvals(&self) -> usize { unimplemented!() } fn num_stackslots(&self) -> usize { unimplemented!() } fn liveins(&self) -> Set<RealReg> { let mut set: Set<RealReg> = Set::empty(); for arg in &self.args { if let &ABIArg::Reg(r) = arg { set.insert(r); } } println!("X64 ABI: liveins {:?}", set); set } fn liveouts(&self) -> Set<RealReg> { let mut set: Set<RealReg> = Set::empty(); for ret in &self.rets { if let &ABIRet::Reg(r) = ret { set.insert(r); } } println!("X64 ABI: liveouts {:?}", set); set } fn gen_copy_arg_to_reg(&self, idx: usize, to_reg: Writable<Reg>) -> Inst { match &self.args[idx] { ABIArg::Reg(from_reg) => { if from_reg.get_class() == RegClass::I32 || from_reg.get_class() == RegClass::I64 { return i_Mov_R_R(/*is64=*/ true, from_reg.to_reg(), to_reg); } unimplemented!("moving from non-int arg to vreg"); } ABIArg::Stack => unimplemented!("moving from stack arg to vreg"), } } fn gen_copy_reg_to_retval(&self, idx: usize, from_reg: Reg) -> Inst { match &self.rets[idx] { ABIRet::Reg(to_reg) => { if to_reg.get_class() == RegClass::I32 || to_reg.get_class() == RegClass::I64 { return i_Mov_R_R( /*is64=*/ true, from_reg, Writable::<Reg>::from_reg(to_reg.to_reg()), ); } unimplemented!("moving from vreg to non-int return value"); } ABIRet::Mem => { panic!("moving from vreg to memory return value"); } } } fn gen_ret(&self) -> Inst { i_Ret() } fn gen_epilogue_placeholder(&self) -> Inst { i_epilogue_placeholder() } fn set_num_spillslots(&mut self, slots: usize) { self.spillslots = Some(slots); } fn set_clobbered(&mut self, clobbered: Set<Writable<RealReg>>) { self.clobbered = clobbered; } fn load_stackslot( &self, _slot: StackSlot, _offset: usize, _ty: Type, _into_reg: Writable<Reg>, ) -> Inst { unimplemented!() } fn store_stackslot(&self, _slot: StackSlot, _offset: usize, _ty: Type, _from_reg: Reg) -> Inst { unimplemented!() } fn load_spillslot(&self, _slot: SpillSlot, _ty: Type, _into_reg: Writable<Reg>) -> Inst { unimplemented!() } fn store_spillslot(&self, _slot: SpillSlot, _ty: Type, _from_reg: Reg) -> Inst { unimplemented!() } fn gen_prologue(&mut self) -> Vec<Inst> { let total_stacksize = self.stackslots_size + 8 * self.spillslots.unwrap(); let total_stacksize = (total_stacksize + 15) & !15; let r_rbp = reg_RBP(); let r_rsp = reg_RSP(); let w_rbp = Writable::<Reg>::from_reg(r_rbp); let w_rsp = Writable::<Reg>::from_reg(r_rsp); let mut insts = vec![]; if !self.call_conv.extends_baldrdash() { insts.push(i_Push64(ip_RMI_R(r_rbp))); insts.push(i_Mov_R_R(true, r_rsp, w_rbp)); } let mut callee_saved_used = 0; let clobbered = get_callee_saves(self.clobbered.to_vec()); for reg in clobbered { let r_reg = reg.to_reg(); match r_reg.get_class() { RegClass::I64 => { insts.push(i_Push64(ip_RMI_R(r_reg.to_reg()))); callee_saved_used += 8; } _ => unimplemented!(), } } let mut spill_area_sizeB = total_stacksize; match callee_saved_used % 16 { 0 => spill_area_sizeB += 0, 8 => spill_area_sizeB += 8, _ => panic!("gen_prologue(x86): total_stacksize is not 8-aligned"), } if spill_area_sizeB > 0x7FFF_FFFF { panic!("gen_prologue(x86): total_stacksize >= 2G"); } if spill_area_sizeB > 0 { insts.push(i_Alu_RMI_R( true, RMI_R_Op::Sub, ip_RMI_I(spill_area_sizeB as u32), w_rsp, )); } debug_assert!(self.spill_area_sizeB.is_none()); self.spill_area_sizeB = Some(spill_area_sizeB); insts } fn gen_epilogue(&self) -> Vec<Inst> { let r_rbp = reg_RBP(); let r_rsp = reg_RSP(); let w_rbp = Writable::<Reg>::from_reg(r_rbp); let w_rsp = Writable::<Reg>::from_reg(r_rsp); let mut insts = vec![]; let spill_area_sizeB = self.spill_area_sizeB.unwrap(); if spill_area_sizeB > 0 { insts.push(i_Alu_RMI_R( true, RMI_R_Op::Add, ip_RMI_I(spill_area_sizeB as u32), w_rsp, )); } let mut tmp_insts = vec![]; let clobbered = get_callee_saves(self.clobbered.to_vec()); for w_real_reg in clobbered { match w_real_reg.to_reg().get_class() { RegClass::I64 => { tmp_insts.push(i_Pop64(Writable::<Reg>::from_reg( w_real_reg.to_reg().to_reg(), ))) } _ => unimplemented!(), } } tmp_insts.reverse(); for i in tmp_insts { insts.push(i); } if !self.call_conv.extends_baldrdash() { insts.push(i_Pop64(w_rbp)); insts.push(i_Ret()); } insts } fn frame_size(&self) -> u32 { self.spill_area_sizeB .expect("frame size not computed before prologue generation") as u32 } fn get_spillslot_size(&self, rc: RegClass, ty: Type) -> u32 { match (rc, ty) { (RegClass::I64, _) => 1, (RegClass::V128, F32) | (RegClass::V128, F64) => 1, (RegClass::V128, _) => 2, _ => panic!("Unexpected register class!"), } } fn gen_spill(&self, _to_slot: SpillSlot, _from_reg: RealReg, _ty: Type) -> Inst { unimplemented!() } fn gen_reload(&self, _to_reg: Writable<RealReg>, _from_slot: SpillSlot, _ty: Type) -> Inst { unimplemented!() } }
#![allow(dead_code)] #![allow(non_snake_case)] use crate::ir; use crate::ir::types; use crate::ir::types::*; use crate::ir::StackSlot; use crate::ir::Type; use crate::isa; use crate::isa::x64::inst::*; use crate::isa::x64::*; use crate::machinst::*; use alloc::vec::Vec; use regalloc::{RealReg, Reg, RegClass, Set, SpillSlot, Writable}; #[derive(Clone, Debug)] enum ABIArg { Reg(RealReg), Stack, } #[derive(Clone, Debug)] enum ABIRet { Reg(RealReg), Mem, } pub struct X64ABIBody { args: Vec<ABIArg>, rets: Vec<ABIRet>, stackslots: Vec<usize>, stackslots_size: usize, clobbered: Set<Writable<RealReg>>, spillslots: Option<usize>, spill_area_sizeB: Option<usize>, call_conv: isa::CallConv, }
fn get_intreg_for_arg_ELF(idx: usize) -> Option<Reg> { match idx { 0 => Some(reg_RDI()), 1 => Some(reg_RSI()), 2 => Some(reg_RDX()), 3 => Some(reg_RCX()), 4 => Some(reg_R8()), 5 => Some(reg_R9()), _ => None, } } fn get_intreg_for_retval_ELF(idx: usize) -> Option<Reg> { match idx { 0 => Some(reg_RAX()), 1 => Some(reg_RDX()), _ => None, } } fn is_callee_save_ELF(r: RealReg) -> bool { match r.get_class() { RegClass::I64 => match r.get_hw_encoding() as u8 { ENC_RBX | ENC_RBP | ENC_R12 | ENC_R13 | ENC_R14 | ENC_R15 => true, _ => false, }, _ => unimplemented!(), } } fn get_callee_saves(regs: Vec<Writable<RealReg>>) -> Vec<Writable<RealReg>> { regs.into_iter() .filter(|r| is_callee_save_ELF(r.to_reg())) .collect() } impl X64ABIBody { pub fn new(f: &ir::Function) -> Self { println!("X64 ABI: func signature {:?}", f.signature); let mut args = vec![]; let mut next_int_arg = 0; for param in &f.signature.params { match param.purpose { ir::ArgumentPurpose::Normal => { if in_int_reg(param.value_type) { if let Some(reg) = get_intreg_for_arg_ELF(next_int_arg) { args.push(ABIArg::Reg(reg.to_real_reg())); } else { unimplemented!("passing arg on the stack"); } next_int_arg += 1; } else { unimplemented!("non int normal register") } } ir::ArgumentPurpose::VMContext => { debug_assert!(f.signature.call_conv.extends_baldrdash()); args.push(ABIArg::Reg(reg_R14().to_real_reg())); } _ => unimplemented!("other parameter purposes"), } } let mut rets = vec![]; let mut next_int_retval = 0; for ret in &f.signature.returns { match ret.purpose { ir::ArgumentPurpose::Normal => { if in_int_reg(ret.value_type) { if let Some(reg) = get_intreg_for_retval_ELF(next_int_retval) { rets.push(ABIRet::Reg(reg.to_real_reg())); } else { unimplemented!("passing return on the stack"); } next_int_retval += 1; } else { unimplemented!("returning non integer normal value"); } } _ => { unimplemented!("non normal argument purpose"); } } } let mut stack_offset: usize = 0; let mut stackslots = vec![]; for (stackslot, data) in f.stack_slots.iter() { let off = stack_offset; stack_offset += data.size as usize; stack_offset = (stack_offset + 7) & !7usize; assert_eq!(stackslot.as_u32() as usize, stackslots.len()); stackslots.push(off); } Self { args, rets, stackslots, stackslots_size: stack_offset, clobbered: Set::empty(), spillslots: None, spill_area_sizeB: None, call_conv: f.signature.call_conv.clone(), } } } impl ABIBody<Inst> for X64ABIBody { fn num_args(&self) -> usize { unimplemented!() } fn num_retvals(&self) -> usize { unimplemented!() } fn num_stackslots(&self) -> usize { unimplemented!() } fn liveins(&self) -> Set<RealReg> { let mut set: Set<RealReg> = Set::empty(); for arg in &self.args { if let &ABIArg::Reg(r) = arg { set.insert(r); } } println!("X64 ABI: liveins {:?}", set); set } fn liveouts(&self) -> Set<RealReg> { let mut set: Set<RealReg> = Set::empty(); for ret in &self.rets { if let &ABIRet::Reg(r) = ret { set.insert(r); } } println!("X64 ABI: liveouts {:?}", set); set } fn gen_copy_arg_to_reg(&self, idx: usize, to_reg: Writable<Reg>) -> Inst { match &self.args[idx] { ABIArg::Reg(from_reg) => { if from_reg.get_class() == RegClass::I32 || from_reg.get_class() == RegClass::I64 { return i_Mov_R_R(/*is64=*/ true, from_reg.to_reg(), to_reg); } unimplemented!("moving from non-int arg to vreg"); } ABIArg::Stack => unimplemented!("moving from stack arg to vreg"), } } fn gen_copy_reg_to_retval(&self, idx: usize, from_reg: Reg) -> Inst { match &self.rets[idx] { ABIRet::Reg(to_reg) => { if to_reg.get_class() == RegClass::I32 || to_reg.get_class() == RegClass::I64 { return i_Mov_R_R( /*is64=*/ true, from_reg, Writable::<Reg>::from_reg(to_reg.to_reg()), ); } unimplemented!("moving from vreg to non-int return value"); } ABIRet::Mem => { panic!("moving from vreg to memory return value"); } } } fn gen_ret(&self) -> Inst { i_Ret() } fn gen_epilogue_placeholder(&self) -> Inst { i_epilogue_placeholder() } fn set_num_spillslots(&mut self, slots: usize) { self.spillslots = Some(slots); } fn set_clobbered(&mut self, clobbered: Set<Writable<RealReg>>) { self.clobbered = clobbered; } fn load_stackslot( &self, _slot: StackSlot, _offset: usize, _ty: Type, _into_reg: Writable<Reg>, ) -> Inst { unimplemented!() } fn store_stackslot(&self, _slot: StackSlot, _offset: usize, _ty: Type, _from_reg: Reg) -> Inst { unimplemented!() } fn load_spillslot(&self, _slot: SpillSlot, _ty: Type, _into_reg: Writable<Reg>) -> Inst { unimplemented!() } fn store_spillslot(&self, _slot: SpillSlot, _ty: Type, _from_reg: Reg) -> Inst { unimplemented!() } fn gen_prologue(&mut self) -> Vec<Inst> { let total_stacksize = self.stackslots_size + 8 * self.spillslots.unwrap(); let total_stacksize = (total_stacksize + 15) & !15; let r_rbp = reg_RBP(); let r_rsp = reg_RSP(); let w_rbp = Writable::<Reg>::from_reg(r_rbp); let w_rsp = Writable::<Reg>::from_reg(r_rsp); let mut insts = vec![]; if !self.call_conv.extends_baldrdash() { insts.push(i_Push64(ip_RMI_R(r_rbp))); insts.push(i_Mov_R_R(true, r_rsp, w_rbp)); } let mut callee_saved_used = 0; let clobbered = get_callee_saves(self.clobbered.to_vec()); for reg in clobbered { let r_reg = reg.to_reg(); match r_reg.get_class() { RegClass::I64 => { insts.push(i_Push64(ip_RMI_R(r_reg.to_reg()))); callee_saved_used += 8; } _ => unimplemented!(), } } let mut spill_area_sizeB = total_stacksize; match callee_saved_used % 16 { 0 => spill_area_sizeB += 0, 8 => spill_area_sizeB += 8, _ => panic!("gen_prologue(x86): total_stacksize is not 8-aligned"), } if spill_area_sizeB > 0x7FFF_FFFF { panic!("gen_prologue(x86): total_stacksize >= 2G"); } if spill_area_sizeB > 0 { insts.push(i_Alu_RMI_R( true, RMI_R_Op::Sub, ip_RMI_I(spill_area_sizeB as u32), w_rsp, )); } debug_assert!(self.spill_area_sizeB.is_none()); self.spill_area_sizeB = Some(spill_area_sizeB); insts } fn gen_epilogue(&self) -> Vec<Inst> { let r_rbp = reg_RBP(); let r_rsp = reg_RSP(); let w_rbp = Writable::<Reg>::from_reg(r_rbp); let w_rsp = Writable::<Reg>::from_reg(r_rsp); let mut insts = vec![]; let spill_area_sizeB = self.spill_area_sizeB.unwrap(); if spill_area_sizeB > 0 { insts.push(i_Alu_RMI_R( true, RMI_R_Op::Add, ip_RMI_I(spill_area_sizeB as u32), w_rsp, )); } let mut tmp_insts = vec![]; let clobbered = get_callee_saves(self.clobbered.to_vec()); for w_real_reg in clobbered { match w_real_reg.to_reg().get_class() { RegClass::I64 => { tmp_insts.push(i_Pop64(Writable::<Reg>::from_reg( w_real_reg.to_reg().to_reg(), ))) } _ => unimplemented!(), } } tmp_insts.reverse(); for i in tmp_insts { insts.push(i); } if !self.call_conv.extends_baldrdash() { insts.push(i_Pop64(w_rbp)); insts.push(i_Ret()); } insts } fn frame_size(&self) -> u32 { self.spill_area_sizeB .expect("frame size not computed before prologue generation") as u32 } fn get_spillslot_size(&self, rc: RegClass, ty: Type) -> u32 { match (rc, ty) { (RegClass::I64, _) => 1, (RegClass::V128, F32) | (RegClass::V128, F64) => 1, (RegClass::V128, _) => 2, _ => panic!("Unexpected register class!"), } } fn gen_spill(&self, _to_slot: SpillSlot, _from_reg: RealReg, _ty: Type) -> Inst { unimplemented!() } fn gen_reload(&self, _to_reg: Writable<RealReg>, _from_slot: SpillSlot, _ty: Type) -> Inst { unimplemented!() } }
fn in_int_reg(ty: types::Type) -> bool { match ty { types::I8 | types::I16 | types::I32 | types::I64 => true, types::B1 | types::B8 | types::B16 | types::B32 | types::B64 => true, _ => false, } }
function_block-full_function
[ { "content": "fn memarg_regs(memarg: &MemArg, used: &mut Set<Reg>, modified: &mut Set<Writable<Reg>>) {\n\n match memarg {\n\n &MemArg::Unscaled(reg, ..) | &MemArg::UnsignedOffset(reg, ..) => {\n\n used.insert(reg);\n\n }\n\n &MemArg::RegScaled(r1, r2, ..) => {\n\n used.insert(r1);\n\n used.insert(r2);\n\n }\n\n &MemArg::Label(..) => {}\n\n &MemArg::PreIndexed(reg, ..) | &MemArg::PostIndexed(reg, ..) => {\n\n modified.insert(reg);\n\n }\n\n &MemArg::StackOffset(..) => {\n\n used.insert(fp_reg());\n\n }\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/mod.rs", "rank": 0, "score": 376185.45189179754 }, { "content": "/// Get a writable reference to the stack-pointer register.\n\npub fn writable_stack_reg() -> Writable<Reg> {\n\n Writable::from_reg(stack_reg())\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 1, "score": 315004.4000730076 }, { "content": "fn abisig_to_uses_and_defs(sig: &ABISig) -> (Set<Reg>, Set<Writable<Reg>>) {\n\n // Compute uses: all arg regs.\n\n let mut uses = Set::empty();\n\n for arg in &sig.args {\n\n match arg {\n\n &ABIArg::Reg(reg) => uses.insert(reg.to_reg()),\n\n _ => {}\n\n }\n\n }\n\n\n\n // Compute defs: all retval regs, and all caller-save (clobbered) regs.\n\n let mut defs = get_caller_saves_set();\n\n for ret in &sig.rets {\n\n match ret {\n\n &ABIRet::Reg(reg) => defs.insert(Writable::from_reg(reg.to_reg())),\n\n _ => {}\n\n }\n\n }\n\n\n\n (uses, defs)\n", "file_path": "cranelift/codegen/src/isa/arm64/abi.rs", "rank": 2, "score": 306052.7592895642 }, { "content": "/// Get a writable reference to the spilltmp reg.\n\npub fn writable_spilltmp_reg() -> Writable<Reg> {\n\n Writable::from_reg(spilltmp_reg())\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 3, "score": 266016.68463476055 }, { "content": "/// Get a writable reference to the link register.\n\npub fn writable_link_reg() -> Writable<Reg> {\n\n Writable::from_reg(link_reg())\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 4, "score": 266010.06051630364 }, { "content": "/// Get a writable reference to the frame pointer.\n\npub fn writable_fp_reg() -> Writable<Reg> {\n\n Writable::from_reg(fp_reg())\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 5, "score": 266010.06051630364 }, { "content": "/// Get a writable reference to the zero-register (this discards a result).\n\npub fn writable_zero_reg() -> Writable<Reg> {\n\n Writable::from_reg(zero_reg())\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 6, "score": 266009.950025736 }, { "content": "/// Get a reference to the stack-pointer register.\n\npub fn stack_reg() -> Reg {\n\n // XSP (stack) and XZR (zero) are logically different registers which have\n\n // the same hardware encoding, and whose meaning, in real arm64\n\n // instructions, is context-dependent. For convenience of\n\n // universe-construction and for correct printing, we make them be two\n\n // different real registers.\n\n Reg::new_real(\n\n RegClass::I64,\n\n /* enc = */ 31,\n\n /* index = */ SP_REG_INDEX,\n\n )\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 7, "score": 261258.10989646014 }, { "content": "/// Get a writable reference to a V-register.\n\npub fn writable_vreg(num: u8) -> Writable<Reg> {\n\n Writable::from_reg(vreg(num))\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 8, "score": 248315.7978309881 }, { "content": "/// Get a writable reference to an X-register.\n\npub fn writable_xreg(num: u8) -> Writable<Reg> {\n\n Writable::from_reg(xreg(num))\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 9, "score": 248315.7978309881 }, { "content": "fn get_caller_saves_set() -> Set<Writable<Reg>> {\n\n let mut set = Set::empty();\n\n for i in 0..28 {\n\n let x = writable_xreg(i);\n\n if is_caller_save(x.to_reg().to_real_reg()) {\n\n set.insert(x);\n\n }\n\n }\n\n for i in 0..32 {\n\n let v = writable_vreg(i);\n\n if is_caller_save(v.to_reg().to_real_reg()) {\n\n set.insert(v);\n\n }\n\n }\n\n set\n\n}\n\n\n\nimpl ABIBody<Inst> for ARM64ABIBody {\n\n fn liveins(&self) -> Set<RealReg> {\n\n let mut set: Set<RealReg> = Set::empty();\n", "file_path": "cranelift/codegen/src/isa/arm64/abi.rs", "rank": 10, "score": 245544.20241316644 }, { "content": "// Given a register class and a register unit in the class, compute a word index and a bit mask of\n\n// register units representing that register.\n\n//\n\n// Note that a register is not allowed to straddle words.\n\nfn bitmask(rc: RegClass, reg: RegUnit) -> (usize, u32) {\n\n // Bit mask representing the register. It is `rc.width` consecutive units.\n\n let width_bits = (1 << rc.width) - 1;\n\n // Index into avail[] of the word containing `reg`.\n\n let word_index = (reg / 32) as usize;\n\n // The actual bits in the word that cover `reg`.\n\n let reg_bits = width_bits << (reg % 32);\n\n\n\n (word_index, reg_bits)\n\n}\n\n\n\nimpl RegisterSet {\n\n /// Create a new register set with all registers available.\n\n ///\n\n /// Note that this includes *all* registers. Query the `TargetIsa` object to get a set of\n\n /// allocatable registers where reserved registers have been filtered out.\n\n pub fn new() -> Self {\n\n Self { avail: [!0; 3] }\n\n }\n\n\n", "file_path": "cranelift/codegen/src/regalloc/register_set.rs", "rank": 11, "score": 236087.18872909987 }, { "content": "pub fn i_Pop64(wdst: Writable<Reg>) -> Inst {\n\n Inst::Pop64 { dst: wdst.to_reg() }\n\n}\n\n\n\n//pub fn i_CallKnown(target: FuncRef) -> Inst {\n\n// Inst::CallKnown { target }\n\n//}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 12, "score": 224212.60673160985 }, { "content": "pub fn i_Mov_R_R(is64: bool, src: Reg, wdst: Writable<Reg>) -> Inst {\n\n let dst = wdst.to_reg();\n\n debug_assert!(src.get_class() == RegClass::I64);\n\n debug_assert!(dst.get_class() == RegClass::I64);\n\n Inst::Mov_R_R { is64, src, dst }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 13, "score": 223692.82473737735 }, { "content": "/// Get register class for a type appearing in a legalized signature.\n\npub fn regclass_for_abi_type(ty: Type) -> RegClass {\n\n if ty.is_float() {\n\n FPR\n\n } else {\n\n GPR\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/riscv/abi.rs", "rank": 14, "score": 222210.39184827346 }, { "content": "#[derive(Clone, Copy)]\n\nstruct RegUse {\n\n value: Value,\n\n opidx: u16,\n\n\n\n // Register class required by the use.\n\n rci: RegClassIndex,\n\n\n\n // A use with a fixed register constraint.\n\n fixed: bool,\n\n\n\n // A register use of a spilled value.\n\n spilled: bool,\n\n\n\n // A use with a tied register constraint *and* the used value is not killed.\n\n tied: bool,\n\n}\n\n\n\nimpl RegUse {\n\n fn new(value: Value, idx: usize, rci: RegClassIndex) -> Self {\n\n Self {\n", "file_path": "cranelift/codegen/src/regalloc/spilling.rs", "rank": 15, "score": 216535.84239542988 }, { "content": "/// Get register class for a type appearing in a legalized signature.\n\npub fn regclass_for_abi_type(ty: ir::Type) -> RegClass {\n\n if ty.is_int() {\n\n GPR\n\n } else {\n\n match ty.bits() {\n\n 32 => S,\n\n 64 => D,\n\n 128 => Q,\n\n _ => panic!(\"Unexpected {} ABI type for arm32\", ty),\n\n }\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm32/abi.rs", "rank": 16, "score": 215654.60394455725 }, { "content": "/// Get register class for a type appearing in a legalized signature.\n\npub fn regclass_for_abi_type(ty: ir::Type) -> RegClass {\n\n if ty.is_int() || ty.is_bool() || ty.is_ref() {\n\n GPR\n\n } else {\n\n FPR\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x86/abi.rs", "rank": 17, "score": 215654.60394455725 }, { "content": "/// Get a reference to the \"spill temp\" register. This register is used to\n\n/// compute the address of a spill slot when a direct offset addressing mode from\n\n/// FP is not sufficient (+/- 2^11 words). We exclude this register from regalloc\n\n/// and reserve it for this purpose for simplicity; otherwise we need a\n\n/// multi-stage analysis where we first determine how many spill slots we have,\n\n/// then perhaps remove the reg from the pool and recompute regalloc.\n\npub fn spilltmp_reg() -> Reg {\n\n xreg(15)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 18, "score": 210181.1966198177 }, { "content": "/// Get a reference to the link register (x30).\n\npub fn link_reg() -> Reg {\n\n xreg(30)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 19, "score": 210171.04072730395 }, { "content": "/// Get a reference to the frame pointer (x29).\n\npub fn fp_reg() -> Reg {\n\n xreg(29)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 20, "score": 210171.04072730395 }, { "content": "/// Get a reference to the zero-register.\n\npub fn zero_reg() -> Reg {\n\n // This should be the same as what xreg(31) returns, except that\n\n // we use the special index into the register index space.\n\n Reg::new_real(\n\n RegClass::I64,\n\n /* enc = */ 31,\n\n /* index = */ ZERO_REG_INDEX,\n\n )\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 21, "score": 210171.04072730395 }, { "content": "#[derive(Clone, Debug)]\n\nenum ABIRet {\n\n Reg(RealReg),\n\n Mem, // TODO\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/abi.rs", "rank": 22, "score": 208819.08832185104 }, { "content": "#[derive(Clone, Debug)]\n\nenum ABIArg {\n\n Reg(RealReg),\n\n Stack, // TODO\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/abi.rs", "rank": 25, "score": 208779.14451081387 }, { "content": "pub fn i_Mov64_M_R(addr: Addr, wdst: Writable<Reg>) -> Inst {\n\n let dst = wdst.to_reg();\n\n debug_assert!(dst.get_class() == RegClass::I64);\n\n Inst::Mov64_M_R { addr, dst }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 26, "score": 206193.05900920683 }, { "content": "pub fn reg_RBP() -> Reg {\n\n info_RBP().0.to_reg()\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 27, "score": 205296.14160800658 }, { "content": "pub fn reg_RSP() -> Reg {\n\n info_RSP().0.to_reg()\n\n}\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 28, "score": 205296.14160800658 }, { "content": "pub fn reg_RDI() -> Reg {\n\n info_RDI().0.to_reg()\n\n}\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 29, "score": 205296.14160800658 }, { "content": "pub fn reg_RCX() -> Reg {\n\n info_RCX().0.to_reg()\n\n}\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 30, "score": 205296.14160800658 }, { "content": "pub fn reg_R9() -> Reg {\n\n info_R9().0.to_reg()\n\n}\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 31, "score": 205296.14160800658 }, { "content": "// For external consumption. It's probably important that LLVM optimises\n\n// these into a 32-bit constant. That will require sprinkling a bunch of\n\n// inline-always pragmas around the place.\n\npub fn reg_RAX() -> Reg {\n\n info_RAX().0.to_reg()\n\n}\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 32, "score": 205296.14160800658 }, { "content": "pub fn reg_RSI() -> Reg {\n\n info_RSI().0.to_reg()\n\n}\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 33, "score": 205296.14160800658 }, { "content": "pub fn reg_R14() -> Reg {\n\n info_R14().0.to_reg()\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 34, "score": 205296.14160800658 }, { "content": "pub fn reg_RDX() -> Reg {\n\n info_RDX().0.to_reg()\n\n}\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 35, "score": 205296.14160800658 }, { "content": "pub fn reg_R8() -> Reg {\n\n info_R8().0.to_reg()\n\n}\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 36, "score": 205296.14160800658 }, { "content": "pub fn ip_RMI_R(reg: Reg) -> RMI {\n\n debug_assert!(reg.get_class() == RegClass::I64);\n\n RMI::R { reg }\n\n}\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 37, "score": 202662.40287537748 }, { "content": "pub fn ip_RM_R(reg: Reg) -> RM {\n\n debug_assert!(reg.get_class() == RegClass::I64);\n\n RM::R { reg }\n\n}\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 38, "score": 202662.40287537748 }, { "content": "/// Whether the REX prefix is needed for encoding extended registers (via REX.RXB).\n\n///\n\n/// Normal x86 instructions have only 3 bits for encoding a register.\n\n/// The REX prefix adds REX.R, REX,X, and REX.B bits, interpreted as fourth bits.\n\npub fn is_extended_reg(reg: RegUnit) -> bool {\n\n // Extended registers have the fourth bit set.\n\n reg as u8 & 0b1000 != 0\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x86/enc_tables.rs", "rank": 39, "score": 196311.12547989137 }, { "content": "pub fn i_Imm_R(dstIs64: bool, simm64: u64, wdst: Writable<Reg>) -> Inst {\n\n let dst = wdst.to_reg();\n\n debug_assert!(dst.get_class() == RegClass::I64);\n\n if !dstIs64 {\n\n debug_assert!(low32willSXto64(simm64));\n\n }\n\n Inst::Imm_R {\n\n dstIs64,\n\n simm64,\n\n dst,\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 40, "score": 195628.26993013453 }, { "content": "/// Show a vector register used in a scalar context.\n\npub fn show_vreg_scalar(reg: Reg, mb_rru: Option<&RealRegUniverse>) -> String {\n\n let mut s = reg.show_rru(mb_rru);\n\n if reg.get_class() != RegClass::V128 {\n\n // We can't do any better.\n\n return s;\n\n }\n\n\n\n if reg.is_real() {\n\n // Change (eg) \"v0\" into \"d0\".\n\n if reg.get_class() == RegClass::V128 && s.starts_with(\"v\") {\n\n s = \"d\".to_string() + &s[1..];\n\n }\n\n } else {\n\n // Add a \"d\" suffix to RegClass::V128 vregs.\n\n if reg.get_class() == RegClass::V128 {\n\n s = s + &\"d\";\n\n }\n\n }\n\n s\n\n}\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 41, "score": 194502.5554663387 }, { "content": "/// Get a reference to a V-register (vector/FP register).\n\npub fn vreg(num: u8) -> Reg {\n\n assert!(num < 32);\n\n Reg::new_real(RegClass::V128, /* enc = */ num, /* index = */ num)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 42, "score": 192659.04088367798 }, { "content": "/// Get a reference to an X-register (integer register).\n\npub fn xreg(num: u8) -> Reg {\n\n assert!(num < 31);\n\n Reg::new_real(\n\n RegClass::I64,\n\n /* enc = */ num,\n\n /* index = */ XREG_INDICES[num as usize],\n\n )\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 43, "score": 192659.04088367798 }, { "content": "pub fn print_bytes(mem: &[u8]) {\n\n print!(\".byte \");\n\n let mut first = true;\n\n for byte in mem.iter() {\n\n if first {\n\n first = false;\n\n } else {\n\n print!(\", \");\n\n }\n\n print!(\"{}\", byte);\n\n }\n\n println!();\n\n}\n\n\n", "file_path": "cranelift/src/disasm.rs", "rank": 44, "score": 191543.4526175757 }, { "content": "pub fn build_dependencies<R: Reader<Offset = usize>>(\n\n dwarf: &read::Dwarf<R>,\n\n at: &AddressTransform,\n\n) -> read::Result<Dependencies> {\n\n let mut deps = Dependencies::new();\n\n let mut units = dwarf.units();\n\n while let Some(unit) = units.next()? {\n\n build_unit_dependencies(unit, dwarf, at, &mut deps)?;\n\n }\n\n Ok(deps)\n\n}\n\n\n", "file_path": "crates/debug/src/gc.rs", "rank": 45, "score": 189573.68922188465 }, { "content": "/// Test of two registers overlap.\n\n///\n\n/// A register is identified as a `(RegClass, RegUnit)` pair. The register class is needed to\n\n/// determine the width (in regunits) of the register.\n\npub fn regs_overlap(rc1: RegClass, reg1: RegUnit, rc2: RegClass, reg2: RegUnit) -> bool {\n\n let end1 = reg1 + RegUnit::from(rc1.width);\n\n let end2 = reg2 + RegUnit::from(rc2.width);\n\n !(end1 <= reg2 || end2 <= reg1)\n\n}\n\n\n\n/// Information about the registers in an ISA.\n\n///\n\n/// The `RegUnit` data structure collects all relevant static information about the registers in an\n\n/// ISA.\n\n#[derive(Clone)]\n\npub struct RegInfo {\n\n /// All register banks, ordered by their `first_unit`. The register banks are disjoint, but\n\n /// there may be holes of unused register unit numbers between banks due to alignment.\n\n pub banks: &'static [RegBank],\n\n\n\n /// All register classes ordered topologically so a sub-class always follows its parent.\n\n pub classes: &'static [RegClass],\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/registers.rs", "rank": 46, "score": 189230.82713926624 }, { "content": "pub fn print_readonly_data(mem: &[u8]) {\n\n if mem.is_empty() {\n\n return;\n\n }\n\n\n\n println!(\"\\nFollowed by {} bytes of read-only data:\", mem.len());\n\n\n\n for (i, byte) in mem.iter().enumerate() {\n\n if i % 16 == 0 {\n\n if i != 0 {\n\n println!();\n\n }\n\n print!(\"{:4}: \", i);\n\n }\n\n if i % 4 == 0 {\n\n print!(\" \");\n\n }\n\n print!(\"{:02x} \", byte);\n\n }\n\n println!();\n\n}\n", "file_path": "cranelift/src/disasm.rs", "rank": 47, "score": 189112.8506461507 }, { "content": "pub fn needs_offset(reg: RegUnit) -> bool {\n\n reg == RU::r13 as RegUnit || reg == RU::rbp as RegUnit\n\n}\n", "file_path": "cranelift/codegen/src/isa/x86/enc_tables.rs", "rank": 48, "score": 188954.49935030355 }, { "content": "pub fn i_MovZX_M_R(extMode: ExtMode, addr: Addr, wdst: Writable<Reg>) -> Inst {\n\n let dst = wdst.to_reg();\n\n debug_assert!(dst.get_class() == RegClass::I64);\n\n Inst::MovZX_M_R { extMode, addr, dst }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 49, "score": 188604.16015835086 }, { "content": "pub fn i_MovSX_M_R(extMode: ExtMode, addr: Addr, wdst: Writable<Reg>) -> Inst {\n\n let dst = wdst.to_reg();\n\n debug_assert!(dst.get_class() == RegClass::I64);\n\n Inst::MovSX_M_R { extMode, addr, dst }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 50, "score": 188604.16015835086 }, { "content": "pub fn needs_sib_byte(reg: RegUnit) -> bool {\n\n reg == RU::r12 as RegUnit || reg == RU::rsp as RegUnit\n\n}\n", "file_path": "cranelift/codegen/src/isa/x86/enc_tables.rs", "rank": 51, "score": 187194.53100126225 }, { "content": "/// If |ireg| denotes an I64-classed reg, make a best-effort attempt to show\n\n/// its name at the 32-bit size.\n\npub fn show_ireg_sized(reg: Reg, mb_rru: Option<&RealRegUniverse>, is32: bool) -> String {\n\n let mut s = reg.show_rru(mb_rru);\n\n if reg.get_class() != RegClass::I64 || !is32 {\n\n // We can't do any better.\n\n return s;\n\n }\n\n\n\n if reg.is_real() {\n\n // Change (eg) \"x42\" into \"w42\" as appropriate\n\n if reg.get_class() == RegClass::I64 && is32 && s.starts_with(\"x\") {\n\n s = \"w\".to_string() + &s[1..];\n\n }\n\n } else {\n\n // Add a \"w\" suffix to RegClass::I64 vregs used in a 32-bit role\n\n if reg.get_class() == RegClass::I64 && is32 {\n\n s = s + &\"w\";\n\n }\n\n }\n\n s\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 52, "score": 186681.04043470617 }, { "content": "/// A machine instruction.\n\npub trait MachInst: Clone + Debug {\n\n /// Return the registers referenced by this machine instruction along with\n\n /// the modes of reference (use, def, modify).\n\n fn get_regs(&self) -> InstRegUses;\n\n\n\n /// Map virtual registers to physical registers using the given virt->phys\n\n /// maps corresponding to the program points prior to, and after, this instruction.\n\n fn map_regs(\n\n &mut self,\n\n pre_map: &RegallocMap<VirtualReg, RealReg>,\n\n post_map: &RegallocMap<VirtualReg, RealReg>,\n\n );\n\n\n\n /// If this is a simple move, return the (source, destination) tuple of registers.\n\n fn is_move(&self) -> Option<(Writable<Reg>, Reg)>;\n\n\n\n /// Is this a terminator (branch or ret)? If so, return its type\n\n /// (ret/uncond/cond) and target if applicable.\n\n fn is_term<'a>(&'a self) -> MachTerminator<'a>;\n\n\n", "file_path": "cranelift/codegen/src/machinst/mod.rs", "rank": 53, "score": 186665.92041763017 }, { "content": "pub fn needs_sib_byte_or_offset(reg: RegUnit) -> bool {\n\n needs_sib_byte(reg) || needs_offset(reg)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x86/enc_tables.rs", "rank": 54, "score": 185491.920246033 }, { "content": "fn load_stack(fp_offset: i64, into_reg: Writable<Reg>, ty: Type) -> Inst {\n\n assert!(into_reg.to_reg().get_class() == RegClass::I64);\n\n let mem = get_stack_addr(fp_offset);\n\n\n\n match ty {\n\n types::B1 | types::B8 | types::I8 => Inst::ULoad8 { rd: into_reg, mem },\n\n types::B16 | types::I16 => Inst::ULoad16 { rd: into_reg, mem },\n\n types::B32 | types::I32 => Inst::ULoad32 { rd: into_reg, mem },\n\n types::B64 | types::I64 => Inst::ULoad64 { rd: into_reg, mem },\n\n _ => unimplemented!(),\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/abi.rs", "rank": 55, "score": 184202.07502239826 }, { "content": "struct InstanceTranslateContext(pub wasmtime::Instance);\n\n\n\nimpl TranslateContext for InstanceTranslateContext {\n\n fn invoke_alloc(&mut self, alloc_func_name: &str, len: i32) -> Result<i32> {\n\n let alloc = self\n\n .0\n\n .get_export(alloc_func_name)\n\n .ok_or_else(|| format_err!(\"failed to find alloc function `{}`\", alloc_func_name))?\n\n .func()\n\n .ok_or_else(|| format_err!(\"`{}` is not a (alloc) function\", alloc_func_name))?\n\n .clone();\n\n let alloc_args = vec![wasmtime::Val::I32(len)];\n\n let results = alloc.call(&alloc_args)?;\n\n if results.len() != 1 {\n\n bail!(\"allocator function wrong number of results\");\n\n }\n\n Ok(match results[0] {\n\n wasmtime::Val::I32(i) => i,\n\n _ => bail!(\"allocator function bad return type\"),\n\n })\n", "file_path": "crates/interface-types/src/lib.rs", "rank": 56, "score": 183816.40751890017 }, { "content": "/// This is a single-use macro intended to be used in the `wasmtime-wasi` crate.\n\n///\n\n/// This macro will generate a structure, `Wasi`, which will create all the\n\n/// functions necessary to bind wasi and hook everything up via the `wasmtime`\n\n/// crate.\n\n///\n\n/// The generated shim functions here will also `trace!` their arguments for\n\n/// logging purposes. Otherwise this is hopefully somewhat straightforward!\n\n///\n\n/// I'd recommend using `cargo +nightly expand` to explore the output of this\n\n/// macro some more.\n\npub fn define_struct(args: TokenStream) -> TokenStream {\n\n let (path, _phase) = utils::witx_path_from_args(args);\n\n let doc = match witx::load(&[&path]) {\n\n Ok(doc) => doc,\n\n Err(e) => {\n\n panic!(\"error opening file {}: {}\", path, e);\n\n }\n\n };\n\n\n\n let mut fields = Vec::new();\n\n let mut get_exports = Vec::new();\n\n let mut ctor_externs = Vec::new();\n\n let mut ctor_fields = Vec::new();\n\n\n\n for module in doc.modules() {\n\n for func in module.funcs() {\n\n let name = func.name.as_str();\n\n let name_ident = Ident::new(func.name.as_str(), Span::call_site());\n\n fields.push(quote! { pub #name_ident: wasmtime::Func });\n\n get_exports.push(quote! { #name => Some(&self.#name_ident) });\n", "file_path": "crates/wasi-common/wig/src/wasi.rs", "rank": 58, "score": 182219.3575367987 }, { "content": "pub fn i_Alu_RMI_R(is64: bool, op: RMI_R_Op, src: RMI, wdst: Writable<Reg>) -> Inst {\n\n let dst = wdst.to_reg();\n\n debug_assert!(dst.get_class() == RegClass::I64);\n\n Inst::Alu_RMI_R { is64, op, src, dst }\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 59, "score": 181797.43771033478 }, { "content": "#[proc_macro]\n\npub fn define_wasi_struct(args: TokenStream) -> TokenStream {\n\n wasi::define_struct(args.into()).into()\n\n}\n\n\n", "file_path": "crates/wasi-common/wig/src/lib.rs", "rank": 60, "score": 179384.61606177798 }, { "content": "pub fn ip_Addr_IRRS(simm32: u32, base: Reg, index: Reg, shift: u8) -> Addr {\n\n debug_assert!(base.get_class() == RegClass::I64);\n\n debug_assert!(index.get_class() == RegClass::I64);\n\n debug_assert!(shift <= 3);\n\n Addr::IRRS {\n\n simm32,\n\n base,\n\n index,\n\n shift,\n\n }\n\n}\n\nimpl ShowWithRRU for Addr {\n\n fn show_rru(&self, mb_rru: Option<&RealRegUniverse>) -> String {\n\n match self {\n\n Addr::IR { simm32, base } => format!(\"{}({})\", *simm32 as i32, base.show_rru(mb_rru)),\n\n Addr::IRRS {\n\n simm32,\n\n base,\n\n index,\n\n shift,\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 61, "score": 175324.99964680363 }, { "content": "#[derive(Clone)]\n\nstruct DebugReloc {\n\n offset: u32,\n\n size: u8,\n\n name: String,\n\n addend: i64,\n\n}\n\n\n\npub enum ResolvedSymbol {\n\n PhysicalAddress(u64),\n\n Reloc { name: String, addend: i64 },\n\n}\n\n\n", "file_path": "crates/debug/src/write_debuginfo.rs", "rank": 62, "score": 174264.57805367652 }, { "content": "/// This is the information provided during building for a setting.\n\nstruct ProtoSetting {\n\n name: &'static str,\n\n comment: &'static str,\n\n specific: ProtoSpecificSetting,\n\n}\n\n\n\n#[derive(Hash, PartialEq, Eq)]\n\npub(crate) enum PredicateNode {\n\n OwnedBool(BoolSettingIndex),\n\n SharedBool(&'static str, &'static str),\n\n Not(Box<PredicateNode>),\n\n And(Box<PredicateNode>, Box<PredicateNode>),\n\n}\n\n\n\nimpl Into<PredicateNode> for BoolSettingIndex {\n\n fn into(self) -> PredicateNode {\n\n PredicateNode::OwnedBool(self)\n\n }\n\n}\n\nimpl<'a> Into<PredicateNode> for (BoolSettingIndex, &'a SettingGroup) {\n", "file_path": "cranelift/codegen/meta/src/cdsl/settings.rs", "rank": 63, "score": 172039.21181339963 }, { "content": "#[derive(Hash, PartialEq, Eq)]\n\nenum SettingOrPreset<'a> {\n\n Setting(&'a Setting),\n\n Preset(&'a Preset),\n\n}\n\n\n\nimpl<'a> SettingOrPreset<'a> {\n\n fn name(&self) -> &str {\n\n match *self {\n\n SettingOrPreset::Setting(s) => s.name,\n\n SettingOrPreset::Preset(p) => p.name,\n\n }\n\n }\n\n}\n\n\n", "file_path": "cranelift/codegen/meta/src/gen_settings.rs", "rank": 64, "score": 169729.39852705103 }, { "content": "/// Compute the stack frame layout.\n\n///\n\n/// Determine the total size of this stack frame and assign offsets to all `Spill` and `Explicit`\n\n/// stack slots.\n\n///\n\n/// The total frame size will be a multiple of `alignment` which must be a power of two, unless the\n\n/// function doesn't perform any call.\n\n///\n\n/// Returns the total stack frame size which is also saved in `frame.frame_size`.\n\n///\n\n/// If the stack frame is too big, returns an `ImplLimitExceeded` error.\n\npub fn layout_stack(\n\n frame: &mut StackSlots,\n\n is_leaf: bool,\n\n alignment: StackSize,\n\n) -> CodegenResult<StackSize> {\n\n // Each object and the whole stack frame must fit in 2 GB such that any relative offset within\n\n // the frame fits in a `StackOffset`.\n\n let max_size = StackOffset::max_value() as StackSize;\n\n debug_assert!(alignment.is_power_of_two() && alignment <= max_size);\n\n\n\n // We assume a stack that grows toward lower addresses as implemented by modern ISAs. The\n\n // stack layout from high to low addresses will be:\n\n //\n\n // 1. incoming arguments.\n\n // 2. spills + explicits + struct returns.\n\n // 3. outgoing arguments.\n\n //\n\n // The incoming arguments can have both positive and negative offsets. A negative offset\n\n // incoming arguments is usually the x86 return address pushed by the call instruction, but\n\n // it can also be fixed stack slots pushed by an externally generated prologue.\n", "file_path": "cranelift/codegen/src/stack_layout.rs", "rank": 65, "score": 168128.01099476762 }, { "content": "/// Determine the right action to take when passing a `have` value type to a call signature where\n\n/// the next argument is `arg` which has a different value type.\n\n///\n\n/// The signature legalization process in `legalize_args` above can replace a single argument value\n\n/// with multiple arguments of smaller types. It can also change the type of an integer argument to\n\n/// a larger integer type, requiring the smaller value to be sign- or zero-extended.\n\n///\n\n/// The legalizer needs to repair the values at all ABI boundaries:\n\n///\n\n/// - Incoming function arguments to the entry block.\n\n/// - Function arguments passed to a call.\n\n/// - Return values from a call.\n\n/// - Return values passed to a return instruction.\n\n///\n\n/// The `legalize_abi_value` function helps the legalizer with the process. When the legalizer\n\n/// needs to pass a pre-legalized `have` argument, but the ABI argument `arg` has a different value\n\n/// type, `legalize_abi_value(have, arg)` tells the legalizer how to create the needed value type\n\n/// for the argument.\n\n///\n\n/// It may be necessary to call `legalize_abi_value` more than once for a given argument before the\n\n/// desired argument type appears. This will happen when a vector or integer type needs to be split\n\n/// more than once, for example.\n\npub fn legalize_abi_value(have: Type, arg: &AbiParam) -> ValueConversion {\n\n let have_bits = have.bits();\n\n let arg_bits = arg.value_type.bits();\n\n\n\n match have_bits.cmp(&arg_bits) {\n\n // We have fewer bits than the ABI argument.\n\n Ordering::Less => {\n\n debug_assert!(\n\n have.is_int() && arg.value_type.is_int(),\n\n \"Can only extend integer values\"\n\n );\n\n match arg.extension {\n\n ArgumentExtension::Uext => ValueConversion::Uext(arg.value_type),\n\n ArgumentExtension::Sext => ValueConversion::Sext(arg.value_type),\n\n _ => panic!(\"No argument extension specified\"),\n\n }\n\n }\n\n // We have the same number of bits as the argument.\n\n Ordering::Equal => {\n\n // This must be an integer vector that is split and then extended.\n", "file_path": "cranelift/codegen/src/abi.rs", "rank": 66, "score": 167127.65995804867 }, { "content": "fn map_reg(reg: RegUnit) -> Register {\n\n static mut REG_X86_MAP: Option<HashMap<RegUnit, Register>> = None;\n\n // FIXME lazy initialization?\n\n unsafe {\n\n if REG_X86_MAP.is_none() {\n\n REG_X86_MAP = Some(HashMap::new());\n\n }\n\n if let Some(val) = REG_X86_MAP.as_mut().unwrap().get(&reg) {\n\n return *val;\n\n }\n\n let result = match reg {\n\n 0 => X86_64::RAX,\n\n 1 => X86_64::RCX,\n\n 2 => X86_64::RDX,\n\n 3 => X86_64::RBX,\n\n 4 => X86_64::RSP,\n\n 5 => X86_64::RBP,\n\n 6 => X86_64::RSI,\n\n 7 => X86_64::RDI,\n\n 8 => X86_64::R8,\n", "file_path": "crates/debug/src/transform/expression.rs", "rank": 67, "score": 165187.84518062178 }, { "content": "pub fn ip_Addr_IR(simm32: u32, base: Reg) -> Addr {\n\n debug_assert!(base.get_class() == RegClass::I64);\n\n Addr::IR { simm32, base }\n\n}\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 68, "score": 164939.60168598624 }, { "content": "#[derive(Clone, Copy)]\n\nstruct SetValue();\n\n\n", "file_path": "cranelift/bforest/src/lib.rs", "rank": 69, "score": 163352.91362131442 }, { "content": "#[derive(Debug)]\n\nenum UseVarCases {\n\n Unsealed(Value),\n\n SealedOnePredecessor(SSABlock),\n\n SealedMultiplePredecessors(Value, Block),\n\n}\n\n\n", "file_path": "cranelift/frontend/src/ssa.rs", "rank": 70, "score": 161411.46669784494 }, { "content": "/// Create the register universe for ARM64.\n\npub fn create_reg_universe() -> RealRegUniverse {\n\n let mut regs = vec![];\n\n let mut allocable_by_class = [None; NUM_REG_CLASSES];\n\n\n\n // Numbering Scheme: we put V-regs first, then X-regs. The X-regs\n\n // exclude several registers: x18 (globally reserved for platform-specific\n\n // purposes), x29 (frame pointer), x30 (link register), x31 (stack pointer\n\n // or zero register, depending on context).\n\n\n\n let v_reg_base = 0u8; // in contiguous real-register index space\n\n let v_reg_count = 32;\n\n for i in 0u8..v_reg_count {\n\n let reg = Reg::new_real(\n\n RegClass::V128,\n\n /* enc = */ i,\n\n /* index = */ v_reg_base + i,\n\n )\n\n .to_real_reg();\n\n let name = format!(\"v{}\", i);\n\n regs.push((reg, name));\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/regs.rs", "rank": 71, "score": 160883.11776266008 }, { "content": "struct MemOpInfo {\n\n opcode: Opcode,\n\n itype: Type,\n\n arg: Value,\n\n st_arg: Option<Value>,\n\n flags: MemFlags,\n\n offset: Offset32,\n\n}\n\n\n", "file_path": "cranelift/codegen/src/postopt.rs", "rank": 72, "score": 160794.1242688248 }, { "content": "struct Args {\n\n pointer_bits: u8,\n\n pointer_bytes: u8,\n\n pointer_type: Type,\n\n regs: u32,\n\n reg_limit: u32,\n\n offset: u32,\n\n}\n\n\n\nimpl Args {\n\n fn new(bits: u8, enable_e: bool) -> Self {\n\n Self {\n\n pointer_bits: bits,\n\n pointer_bytes: bits / 8,\n\n pointer_type: Type::int(u16::from(bits)).unwrap(),\n\n regs: 0,\n\n reg_limit: if enable_e { 6 } else { 8 },\n\n offset: 0,\n\n }\n\n }\n", "file_path": "cranelift/codegen/src/isa/riscv/abi.rs", "rank": 73, "score": 160756.3520845141 }, { "content": "struct Args {\n\n pointer_bits: u8,\n\n pointer_bytes: u8,\n\n pointer_type: Type,\n\n regs: u32,\n\n reg_limit: u32,\n\n offset: u32,\n\n}\n\n\n\nimpl Args {\n\n fn new(bits: u8) -> Self {\n\n Self {\n\n pointer_bits: bits,\n\n pointer_bytes: bits / 8,\n\n pointer_type: Type::int(u16::from(bits)).unwrap(),\n\n regs: 0,\n\n reg_limit: 8,\n\n offset: 0,\n\n }\n\n }\n", "file_path": "cranelift/codegen/src/isa/arm32/abi.rs", "rank": 74, "score": 160756.3520845141 }, { "content": "#[derive(Clone)]\n\nstruct Args {\n\n pointer_bytes: u8,\n\n pointer_bits: u8,\n\n pointer_type: ir::Type,\n\n gpr: &'static [RU],\n\n gpr_used: usize,\n\n fpr_limit: usize,\n\n fpr_used: usize,\n\n offset: u32,\n\n call_conv: CallConv,\n\n shared_flags: shared_settings::Flags,\n\n #[allow(dead_code)]\n\n isa_flags: isa_settings::Flags,\n\n}\n\n\n\nimpl Args {\n\n fn new(\n\n bits: u8,\n\n gpr: &'static [RU],\n\n fpr_limit: usize,\n", "file_path": "cranelift/codegen/src/isa/x86/abi.rs", "rank": 75, "score": 160756.3520845141 }, { "content": "fn enc_csel(rd: Writable<Reg>, rn: Reg, rm: Reg, cond: Cond) -> u32 {\n\n 0b100_11010100_00000_0000_00_00000_00000\n\n | (machreg_to_gpr(rm) << 16)\n\n | (machreg_to_gpr(rn) << 5)\n\n | machreg_to_gpr(rd.to_reg())\n\n | (cond.bits() << 12)\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/emit.rs", "rank": 76, "score": 160731.48551335098 }, { "content": "/// Keep track of the set of available registers in two interference domains: all registers\n\n/// considering diversions and global registers not considering diversions.\n\nstruct AvailableRegs {\n\n /// The exact set of registers available on the input side of the current instruction. This\n\n /// takes into account register diversions, and it includes both local and global live ranges.\n\n input: RegisterSet,\n\n\n\n /// Registers available for allocating globally live values. This set ignores any local values,\n\n /// and it does not account for register diversions.\n\n ///\n\n /// Global values must be allocated out of this set because conflicts with other global values\n\n /// can't be resolved with local diversions.\n\n global: RegisterSet,\n\n}\n\n\n\nimpl AvailableRegs {\n\n /// Initialize both the input and global sets from `regs`.\n\n pub fn new(regs: &RegisterSet) -> Self {\n\n Self {\n\n input: regs.clone(),\n\n global: regs.clone(),\n\n }\n", "file_path": "cranelift/codegen/src/regalloc/coloring.rs", "rank": 77, "score": 160680.57478128214 }, { "content": "#[derive(Debug)]\n\nstruct Range {\n\n wasm_start: WasmAddress,\n\n wasm_end: WasmAddress,\n\n gen_start: GeneratedAddress,\n\n gen_end: GeneratedAddress,\n\n positions: Box<[Position]>,\n\n}\n\n\n\n/// Helper function address lookup data. Contains ranges start positions\n\n/// index and ranges data. The multiple ranges can include the same\n\n/// original source position. The index (B-Tree) uses range start\n\n/// position as a key.\n", "file_path": "crates/debug/src/transform/address_transform.rs", "rank": 78, "score": 160666.16164054757 }, { "content": "#[derive(Debug)]\n\nstruct Position {\n\n wasm_pos: WasmAddress,\n\n gen_start: GeneratedAddress,\n\n gen_end: GeneratedAddress,\n\n}\n\n\n\n/// Mapping of continuous range of source location to its generated\n\n/// code. The positions are always in accending order for search.\n", "file_path": "crates/debug/src/transform/address_transform.rs", "rank": 79, "score": 160666.16164054757 }, { "content": "struct FunctionRelocResolver {}\n\nimpl SymbolResolver for FunctionRelocResolver {\n\n fn resolve_symbol(&self, symbol: usize, addend: i64) -> ResolvedSymbol {\n\n let name = format!(\"_wasm_function_{}\", symbol);\n\n ResolvedSymbol::Reloc { name, addend }\n\n }\n\n}\n\n\n", "file_path": "crates/debug/src/lib.rs", "rank": 80, "score": 160659.3592749107 }, { "content": "fn enc_arith_rrrr(top11: u32, rm: Reg, bit15: u32, ra: Reg, rn: Reg, rd: Writable<Reg>) -> u32 {\n\n (top11 << 21)\n\n | (machreg_to_gpr(rm) << 16)\n\n | (bit15 << 15)\n\n | (machreg_to_gpr(ra) << 10)\n\n | (machreg_to_gpr(rn) << 5)\n\n | machreg_to_gpr(rd.to_reg())\n\n}\n\n\n", "file_path": "cranelift/codegen/src/isa/arm64/inst/emit.rs", "rank": 81, "score": 160188.7268698628 }, { "content": "/// Tag type defining forest types for a set.\n\nstruct SetTypes<K>(PhantomData<K>);\n\n\n\nimpl<K> Forest for SetTypes<K>\n\nwhere\n\n K: Copy,\n\n{\n\n type Key = K;\n\n type Value = SetValue;\n\n type LeafKeys = [K; 2 * INNER_SIZE - 1];\n\n type LeafValues = [SetValue; 2 * INNER_SIZE - 1];\n\n\n\n fn splat_key(key: Self::Key) -> Self::LeafKeys {\n\n [key; 2 * INNER_SIZE - 1]\n\n }\n\n\n\n fn splat_value(value: Self::Value) -> Self::LeafValues {\n\n [value; 2 * INNER_SIZE - 1]\n\n }\n\n}\n\n\n", "file_path": "cranelift/bforest/src/set.rs", "rank": 82, "score": 160014.61524528835 }, { "content": "/// A string-based configurator for settings groups.\n\n///\n\n/// The `Configurable` protocol allows settings to be modified by name before a finished `Flags`\n\n/// struct is created.\n\npub trait Configurable {\n\n /// Set the string value of any setting by name.\n\n ///\n\n /// This can set any type of setting whether it is numeric, boolean, or enumerated.\n\n fn set(&mut self, name: &str, value: &str) -> SetResult<()>;\n\n\n\n /// Enable a boolean setting or apply a preset.\n\n ///\n\n /// If the identified setting isn't a boolean or a preset, a `BadType` error is returned.\n\n fn enable(&mut self, name: &str) -> SetResult<()>;\n\n}\n\n\n\n/// Collect settings values based on a template.\n\n#[derive(Clone)]\n\npub struct Builder {\n\n template: &'static detail::Template,\n\n bytes: Box<[u8]>,\n\n}\n\n\n\nimpl Builder {\n", "file_path": "cranelift/codegen/src/settings.rs", "rank": 83, "score": 159330.78466251772 }, { "content": "pub fn compile_cranelift(\n\n wasm: &[u8],\n\n target: Option<Triple>,\n\n output: impl AsRef<Path>,\n\n) -> Result<()> {\n\n let obj = compile_to_obj(\n\n wasm,\n\n target.as_ref(),\n\n Strategy::Cranelift,\n\n false,\n\n wasmtime::OptLevel::None,\n\n true,\n\n output\n\n .as_ref()\n\n .file_name()\n\n .unwrap()\n\n .to_string_lossy()\n\n .to_string(),\n\n &CacheConfig::new_cache_disabled(),\n\n )?;\n\n\n\n let file = File::create(output).context(\"failed to create object file\")?;\n\n obj.write(file).context(\"failed to write object file\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/debug/obj.rs", "rank": 84, "score": 159289.62414506043 }, { "content": "#[derive(Debug)]\n\nenum CompiledExpressionPart {\n\n Code(Vec<u8>),\n\n Local(ValueLabel),\n\n Deref,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct CompiledExpression {\n\n parts: Vec<CompiledExpressionPart>,\n\n need_deref: bool,\n\n}\n\n\n\nimpl Clone for CompiledExpressionPart {\n\n fn clone(&self) -> Self {\n\n match self {\n\n CompiledExpressionPart::Code(c) => CompiledExpressionPart::Code(c.clone()),\n\n CompiledExpressionPart::Local(i) => CompiledExpressionPart::Local(*i),\n\n CompiledExpressionPart::Deref => CompiledExpressionPart::Deref,\n\n }\n\n }\n", "file_path": "crates/debug/src/transform/expression.rs", "rank": 85, "score": 158733.85806283634 }, { "content": "struct ProtoPredicate {\n\n pub name: &'static str,\n\n node: PredicateNode,\n\n}\n\n\n\npub(crate) type SettingPredicateNumber = u8;\n\n\n\npub(crate) struct Predicate {\n\n pub name: &'static str,\n\n node: PredicateNode,\n\n pub number: SettingPredicateNumber,\n\n}\n\n\n\nimpl Predicate {\n\n pub fn render(&self, group: &SettingGroup) -> String {\n\n self.node.render(group)\n\n }\n\n}\n\n\n\npub(crate) struct SettingGroupBuilder {\n", "file_path": "cranelift/codegen/meta/src/cdsl/settings.rs", "rank": 86, "score": 158141.633564743 }, { "content": "#[derive(Debug)]\n\nstruct FuncTransform {\n\n start: WasmAddress,\n\n end: WasmAddress,\n\n index: DefinedFuncIndex,\n\n lookup: FuncLookup,\n\n}\n\n\n\n/// Module functions mapping to generated code.\n\n#[derive(Debug)]\n\npub struct AddressTransform {\n\n map: PrimaryMap<DefinedFuncIndex, FunctionMap>,\n\n func: Vec<(WasmAddress, FuncTransform)>,\n\n}\n\n\n", "file_path": "crates/debug/src/transform/address_transform.rs", "rank": 87, "score": 158121.99077299045 }, { "content": "#[derive(Debug)]\n\nstruct FuncLookup {\n\n index: Vec<(WasmAddress, Box<[usize]>)>,\n\n ranges: Box<[Range]>,\n\n}\n\n\n\n/// Mapping of original functions to generated code locations/ranges.\n", "file_path": "crates/debug/src/transform/address_transform.rs", "rank": 88, "score": 158121.99077299045 }, { "content": "struct ImageRelocResolver<'a> {\n\n func_offsets: &'a Vec<u64>,\n\n}\n\n\n\nimpl<'a> SymbolResolver for ImageRelocResolver<'a> {\n\n fn resolve_symbol(&self, symbol: usize, addend: i64) -> ResolvedSymbol {\n\n let func_start = self.func_offsets[symbol];\n\n ResolvedSymbol::PhysicalAddress(func_start + addend as u64)\n\n }\n\n}\n\n\n", "file_path": "crates/debug/src/lib.rs", "rank": 89, "score": 157775.78561573176 }, { "content": "/// A trait implemented for types which can be returned from closures passed to\n\n/// [`Func::wrap1`] and friends.\n\n///\n\n/// This trait should not be implemented by user types. This trait may change at\n\n/// any time internally. The types which implement this trait, however, are\n\n/// stable over time.\n\n///\n\n/// For more information see [`Func::wrap1`]\n\npub trait WasmRet {\n\n #[doc(hidden)]\n\n type Abi;\n\n #[doc(hidden)]\n\n fn push(dst: &mut Vec<ValType>);\n\n #[doc(hidden)]\n\n fn matches(tys: impl Iterator<Item = ValType>) -> anyhow::Result<()>;\n\n #[doc(hidden)]\n\n fn into_abi(self) -> Self::Abi;\n\n}\n\n\n\nimpl<T: WasmTy> WasmRet for T {\n\n type Abi = T::Abi;\n\n fn push(dst: &mut Vec<ValType>) {\n\n T::push(dst)\n\n }\n\n\n\n fn matches(tys: impl Iterator<Item = ValType>) -> anyhow::Result<()> {\n\n T::matches(tys)\n\n }\n", "file_path": "crates/api/src/func.rs", "rank": 90, "score": 156764.91501862905 }, { "content": "/// Common trait for assigning arguments to registers or stack locations.\n\n///\n\n/// This will be implemented by individual ISAs.\n\npub trait ArgAssigner {\n\n /// Pick an assignment action for function argument (or return value) `arg`.\n\n fn assign(&mut self, arg: &AbiParam) -> ArgAction;\n\n}\n\n\n", "file_path": "cranelift/codegen/src/abi.rs", "rank": 91, "score": 156730.43453056857 }, { "content": "/// Parse \"set\" and \"triple\" commands.\n\npub fn parse_sets_and_triple(\n\n flag_set: &[String],\n\n flag_triple: &str,\n\n) -> Result<OwnedFlagsOrIsa, String> {\n\n let mut flag_builder = settings::builder();\n\n parse_options(\n\n flag_set.iter().map(|x| x.as_str()),\n\n &mut flag_builder,\n\n Location { line_number: 0 },\n\n )\n\n .map_err(|err| err.to_string())?;\n\n\n\n let mut words = flag_triple.trim().split_whitespace();\n\n // Look for `target foo`.\n\n if let Some(triple_name) = words.next() {\n\n let triple = match Triple::from_str(triple_name) {\n\n Ok(triple) => triple,\n\n Err(parse_error) => return Err(parse_error.to_string()),\n\n };\n\n let builder_or_backend = isa::lookup(triple).map_err(|err| match err {\n", "file_path": "cranelift/src/utils.rs", "rank": 92, "score": 156660.770842092 }, { "content": "pub fn emit_debugsections(\n\n obj: &mut Artifact,\n\n vmctx_info: &ModuleVmctxInfo,\n\n target_config: TargetFrontendConfig,\n\n debuginfo_data: &DebugInfoData,\n\n at: &ModuleAddressMap,\n\n ranges: &ValueLabelsRanges,\n\n) -> Result<(), Error> {\n\n let resolver = FunctionRelocResolver {};\n\n let dwarf = transform_dwarf(target_config, debuginfo_data, at, vmctx_info, ranges)?;\n\n emit_dwarf(obj, dwarf, &resolver)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/debug/src/lib.rs", "rank": 93, "score": 156627.04555867382 }, { "content": "struct ProcessingStackElem {\n\n /// Indicates the AvailEnv at the current point in the Block.\n\n avail_env: AvailEnv,\n\n\n\n /// Shows where we currently are inside the Block.\n\n cursor: CursorPosition,\n\n\n\n /// Indicates the currently active register diversions at the current point.\n\n diversions: RegDiversions,\n\n}\n\n\n\n// =============================================================================================\n\n// The top level data structure\n\n\n\n// `RedundantReloadRemover` contains data structures for the two passes: discovery of tree shaped\n\n// regions, and processing of them. These are allocated once and stay alive for the entire\n\n// function, even though they are cleared out for each new tree shaped region. It also caches\n\n// `num_regunits` and `num_preds_per_block`, which are computed at the start of each function and\n\n// then remain constant.\n\n\n", "file_path": "cranelift/codegen/src/redundant_reload_remover.rs", "rank": 94, "score": 155801.482035726 }, { "content": "#[derive(Clone)]\n\nstruct EntryRegDiversionsValue {\n\n key: Block,\n\n divert: RegDiversions,\n\n}\n\n\n\n/// Map block to their matching RegDiversions at basic blocks entry.\n\npub struct EntryRegDiversions {\n\n map: SparseMap<Block, EntryRegDiversionsValue>,\n\n}\n\n\n\nimpl RegDiversions {\n\n /// Create a new empty diversion tracker.\n\n pub fn new() -> Self {\n\n Self {\n\n current: FxHashMap::default(),\n\n }\n\n }\n\n\n\n /// Clear the content of the diversions, to reset the state of the compiler.\n\n pub fn clear(&mut self) {\n", "file_path": "cranelift/codegen/src/regalloc/diversion.rs", "rank": 95, "score": 155698.99186316077 }, { "content": "#[derive(Debug, Clone)]\n\nstruct CachedValueLabelRange {\n\n func_index: DefinedFuncIndex,\n\n start: usize,\n\n end: usize,\n\n label_location: HashMap<ValueLabel, ValueLoc>,\n\n}\n\n\n", "file_path": "crates/debug/src/transform/expression.rs", "rank": 96, "score": 155691.29130497642 }, { "content": "struct WasmTypesDieRefs {\n\n vmctx: write::UnitEntryId,\n\n i32: write::UnitEntryId,\n\n i64: write::UnitEntryId,\n\n f32: write::UnitEntryId,\n\n f64: write::UnitEntryId,\n\n}\n\n\n", "file_path": "crates/debug/src/transform/simulate.rs", "rank": 97, "score": 155684.54691446346 }, { "content": "/// Create the register universe for X64.\n\npub fn create_reg_universe() -> RealRegUniverse {\n\n let mut regs = Vec::<(RealReg, String)>::new();\n\n let mut allocable_by_class = [None; NUM_REG_CLASSES];\n\n\n\n // Integer regs\n\n let mut base = regs.len();\n\n // Callee-saved, in the ELF x86_64 ABI\n\n regs.push(info_R12());\n\n regs.push(info_R13());\n\n regs.push(info_R14());\n\n regs.push(info_R15());\n\n regs.push(info_RBX());\n\n // Caller-saved, in the ELF x86_64 ABI\n\n regs.push(info_RSI());\n\n regs.push(info_RDI());\n\n regs.push(info_RAX());\n\n regs.push(info_RCX());\n\n regs.push(info_RDX());\n\n regs.push(info_R8());\n\n regs.push(info_R9());\n", "file_path": "cranelift/codegen/src/isa/x64/inst.rs", "rank": 98, "score": 155346.892505201 }, { "content": "struct InheritedAttr<T> {\n\n stack: Vec<(usize, T)>,\n\n}\n\n\n\nimpl<T> InheritedAttr<T> {\n\n fn new() -> Self {\n\n InheritedAttr { stack: Vec::new() }\n\n }\n\n\n\n fn update(&mut self, depth: usize) {\n\n while !self.stack.is_empty() && self.stack.last().unwrap().0 >= depth {\n\n self.stack.pop();\n\n }\n\n }\n\n\n\n fn push(&mut self, depth: usize, value: T) {\n\n self.stack.push((depth, value));\n\n }\n\n\n\n fn top(&self) -> Option<&T> {\n\n self.stack.last().map(|entry| &entry.1)\n\n }\n\n\n\n fn is_empty(&self) -> bool {\n\n self.stack.is_empty()\n\n }\n\n}\n\n\n", "file_path": "crates/debug/src/transform/unit.rs", "rank": 99, "score": 155231.61474817464 } ]
Rust
src/test/instruction_tests/instr_vfmsub213ss.rs
ftilde/rust-x86asm
f6584b8cfe8e75d978bf7b83a67c69444fd3f161
use instruction_def::*; use test::run_test; use Operand::*; use Reg::*; use RegScale::*; use RegType::*; use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; #[test] fn vfmsub213ss_1() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM0)), operand3: Some(Direct(XMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[196, 226, 121, 171, 195], OperandSize::Dword, ) } #[test] fn vfmsub213ss_2() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM3)), operand3: Some(Indirect(EBX, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[196, 226, 97, 171, 59], OperandSize::Dword, ) } #[test] fn vfmsub213ss_3() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM1)), operand3: Some(Direct(XMM6)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[196, 226, 113, 171, 254], OperandSize::Qword, ) } #[test] fn vfmsub213ss_4() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM3)), operand3: Some(IndirectScaledIndexedDisplaced( RSI, RDI, Eight, 912513807, Some(OperandSize::Dword), None, )), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[196, 226, 97, 171, 172, 254, 15, 219, 99, 54], OperandSize::Qword, ) } #[test] fn vfmsub213ss_5() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM4)), operand3: Some(Direct(XMM4)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Down), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None, }, &[98, 242, 93, 189, 171, 204], OperandSize::Dword, ) } #[test] fn vfmsub213ss_6() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM5)), operand3: Some(IndirectDisplaced( EBX, 1132410721, Some(OperandSize::Dword), None, )), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None, }, &[98, 242, 85, 143, 171, 187, 97, 55, 127, 67], OperandSize::Dword, ) } #[test] fn vfmsub213ss_7() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM23)), operand3: Some(Direct(XMM3)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Nearest), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None, }, &[98, 242, 69, 150, 171, 227], OperandSize::Qword, ) } #[test] fn vfmsub213ss_8() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM31)), operand2: Some(Direct(XMM21)), operand3: Some(IndirectDisplaced( RCX, 851224790, Some(OperandSize::Dword), None, )), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None, }, &[98, 98, 85, 130, 171, 185, 214, 168, 188, 50], OperandSize::Qword, ) }
use instruction_def::*; use test::run_test; use Operand::*; use Reg::*; use RegScale::*; use RegType::*; use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; #[test] fn vfmsub213ss_1() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM0)), operand3: Some(Direct(XMM3)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[196, 226, 121, 171, 195], OperandSize::Dword, ) } #[test] fn vfmsub213ss_2() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM3)), operand3: Some(Indirect(EBX, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[196, 226, 97, 171, 59], OperandSize::Dword, ) } #[test] fn vfmsub213ss_3() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM1)), operand3: Some(Direct(XMM6)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[196, 226, 113, 171, 254], OperandSize::Qword, ) } #[test] fn vfmsub213ss_4() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM3)), operand3: Some(IndirectScaledIndexedDisplaced( RSI, RDI, Eight, 912513807, Some(OperandSize::Dword), None, )), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[196, 226, 97, 171, 172, 254, 15, 219, 99, 54], OperandSize::Qword, ) } #[test] fn vfmsub213ss_5()
EBX, 1132410721, Some(OperandSize::Dword), None, )), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None, }, &[98, 242, 85, 143, 171, 187, 97, 55, 127, 67], OperandSize::Dword, ) } #[test] fn vfmsub213ss_7() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM23)), operand3: Some(Direct(XMM3)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Nearest), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None, }, &[98, 242, 69, 150, 171, 227], OperandSize::Qword, ) } #[test] fn vfmsub213ss_8() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM31)), operand2: Some(Direct(XMM21)), operand3: Some(IndirectDisplaced( RCX, 851224790, Some(OperandSize::Dword), None, )), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None, }, &[98, 98, 85, 130, 171, 185, 214, 168, 188, 50], OperandSize::Qword, ) }
{ run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM4)), operand3: Some(Direct(XMM4)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Down), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None, }, &[98, 242, 93, 189, 171, 204], OperandSize::Dword, ) } #[test] fn vfmsub213ss_6() { run_test( &Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM5)), operand3: Some(IndirectDisplaced(
random
[ { "content": "fn encode32_helper2(mnemonic: Mnemonic, operand1: Operand, operand2: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(operand1),\n\n operand2: Some(operand2),\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode32_helper(&instr, expected);\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 0, "score": 606330.1135968915 }, { "content": "fn encode16_helper2(mnemonic: Mnemonic, operand1: Operand, operand2: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(operand1),\n\n operand2: Some(operand2),\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode16_helper(&instr, expected);\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 1, "score": 606330.1135968915 }, { "content": "fn encode64_helper2(mnemonic: Mnemonic, operand1: Operand, operand2: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(operand1),\n\n operand2: Some(operand2),\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode64_helper(&instr, expected);\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 2, "score": 606330.1135968915 }, { "content": "#[test]\n\nfn operand_type_mask_reg() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x28, 0xC2, 0xDB, 0x05],\n\n Mode::Long,\n\n &Instruction::new4(\n\n Mnemonic::VCMPPD,\n\n Operand::Direct(Reg::K3),\n\n Operand::Direct(Reg::YMM2),\n\n Operand::Direct(Reg::YMM3),\n\n Operand::Literal8(5),\n\n ),\n\n ); // VCMPPD K3, YMM2, YMM3, 5\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 3, "score": 446160.93751226546 }, { "content": "fn random_reg_of_size(size: OperandSize) -> Reg {\n\n match size {\n\n OperandSize::Byte => random_reg_8(),\n\n OperandSize::Word => random_reg_16(),\n\n OperandSize::Dword => random_reg_32(),\n\n OperandSize::Qword => random_reg_64(),\n\n _ => panic!(\"Invalid general register size: {:?}.\", size)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 4, "score": 405074.69505944534 }, { "content": "fn random_reg_of_size_no_stack(size: OperandSize) -> Reg {\n\n match size {\n\n OperandSize::Byte => random_reg_8(),\n\n OperandSize::Word => random_reg_16_no_stack(),\n\n OperandSize::Dword => random_reg_32_no_stack(),\n\n OperandSize::Qword => random_reg_64_no_stack(),\n\n _ => panic!(\"Invalid general register size: {:?}.\", size)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 5, "score": 400092.43049772026 }, { "content": "#[allow(dead_code)]\n\nfn encode32_helper1(mnemonic: Mnemonic, op1: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(op1),\n\n operand2: None,\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode32_helper(&instr, expected);\n\n}\n", "file_path": "src/test/mod.rs", "rank": 6, "score": 383637.69327454804 }, { "content": "#[test]\n\nfn operand_type_mask_or_mem_64() {\n\n decode_helper(\n\n &vec![0xC4, 0xE1, 0xF8, 0x90, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVQ,\n\n Operand::Direct(Reg::K1),\n\n Operand::Direct(Reg::K2),\n\n ),\n\n ); // KMOVQ K1, K2\n\n decode_helper(\n\n &vec![0xC4, 0xE1, 0xF8, 0x90, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVQ,\n\n Operand::Direct(Reg::K1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // KMOVQ K1, QWORD PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 7, "score": 379029.60926208395 }, { "content": "#[test]\n\nfn operand_type_mask_or_mem_32() {\n\n decode_helper(\n\n &vec![0xC4, 0xE1, 0xF9, 0x90, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVD,\n\n Operand::Direct(Reg::K1),\n\n Operand::Direct(Reg::K2),\n\n ),\n\n ); // KMOVD K1, K2\n\n decode_helper(\n\n &vec![0xC4, 0xE1, 0xF9, 0x90, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVD,\n\n Operand::Direct(Reg::K1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Dword), None),\n\n ),\n\n ); // KMOVD K1, DWORD PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 8, "score": 379029.609262084 }, { "content": "#[test]\n\nfn operand_type_mask_or_mem_16() {\n\n decode_helper(\n\n &vec![0xC5, 0xF8, 0x90, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVW,\n\n Operand::Direct(Reg::K1),\n\n Operand::Direct(Reg::K2),\n\n ),\n\n ); // KMOVW K1, K2\n\n decode_helper(\n\n &vec![0xC5, 0xF8, 0x90, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVW,\n\n Operand::Direct(Reg::K1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Word), None),\n\n ),\n\n ); // KMOVW K1, BYTE PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 9, "score": 379029.609262084 }, { "content": "#[test]\n\nfn operand_type_mask_or_mem_8() {\n\n decode_helper(\n\n &vec![0xC5, 0xF9, 0x90, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVB,\n\n Operand::Direct(Reg::K1),\n\n Operand::Direct(Reg::K2),\n\n ),\n\n ); // KMOVB K1, K2\n\n decode_helper(\n\n &vec![0xC5, 0xF9, 0x90, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::KMOVB,\n\n Operand::Direct(Reg::K1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Byte), None),\n\n ),\n\n ); // KMOVB K1, BYTE PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 10, "score": 379029.60926208395 }, { "content": "fn random_mask() -> MaskReg {\n\n random_of(&[MaskReg::K1, MaskReg::K2, MaskReg::K3, MaskReg::K4, MaskReg::K5, MaskReg::K6,\n\n MaskReg::K7])\n\n}\n\n\n\nnamed!(parse_as_output<Vec<u8>>, do_parse!(\n\n take_until_and_consume!(\"0:\\t\") >>\n\n bytes: flat_map!(\n\n take_until!(\"\\t\"),\n\n ws!(many1!(parse_u8_hex_str))\n\n ) >>\n\n (bytes)\n\n));\n\n\n\nnamed!(parse_u8_hex_str<u8>, map!(\n\n alphanumeric,\n\n |val| u8::from_str_radix(str::from_utf8(val).unwrap(), 16).unwrap()\n\n));\n\n\n\nimpl Display for Reg {\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 11, "score": 372693.5476501826 }, { "content": "fn random_mask_reg() -> Reg\n\n { random_of(&[Reg::K1, Reg::K2, Reg::K3, Reg::K4, Reg::K5, Reg::K6, Reg::K7]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 12, "score": 372689.33923427993 }, { "content": "fn random_imm(size: OperandSize) -> Operand {\n\n let mut gen = rand::thread_rng();\n\n match size {\n\n OperandSize::Byte => Operand::Literal8(gen.gen_range::<u8>(0, 128)),\n\n OperandSize::Word => Operand::Literal16(\n\n gen.gen_range::<u16>(u8::max_value() as u16 + 1, i16::max_value() as u16)),\n\n OperandSize::Dword => Operand::Literal32(\n\n gen.gen_range::<u32>(u16::max_value() as u32 + 1, i32::max_value() as u32)),\n\n OperandSize::Qword => Operand::Literal64(\n\n gen.gen_range::<u64>(u32::max_value() as u64 + 1, i64::max_value() as u64)),\n\n OperandSize::Far16 => Operand::MemoryAndSegment16(rand::random(), rand::random()),\n\n OperandSize::Far32 => Operand::MemoryAndSegment32(rand::random(), rand::random()),\n\n _ => panic!(\"Invalid immediate value size: {:?}.\", size)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 13, "score": 364239.84979508806 }, { "content": "fn random_reg(reg_type: RegType, size: OperandSize, addr_size: OperandSize, \n\n def: &InstructionDefinition) -> Reg {\n\n match reg_type {\n\n RegType::General => {\n\n match size {\n\n OperandSize::Byte => random_reg_8(),\n\n OperandSize::Word => random_reg_16(),\n\n OperandSize::Dword => random_reg_32(),\n\n OperandSize::Qword => random_reg_64(),\n\n OperandSize::Unsized => random_reg_of_size(addr_size),\n\n _ => panic!(\"Invalid general register size: {:?}.\", size)\n\n }\n\n },\n\n RegType::Avx => {\n\n let allow_all = if let Some(CompositePrefix::Evex {..}) = def.composite_prefix {\n\n addr_size == OperandSize::Qword } else { false };\n\n match size {\n\n OperandSize::Xmmword => random_xmm_reg(allow_all),\n\n OperandSize::Ymmword => random_ymm_reg(allow_all),\n\n OperandSize::Zmmword => random_zmm_reg(allow_all),\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 14, "score": 362089.76032153267 }, { "content": "fn random_fixed(fixed_op: FixedOperand) -> Operand {\n\n match fixed_op {\n\n FixedOperand::Reg(reg) => Operand::Direct(reg),\n\n FixedOperand::Constant(c) => Operand::Literal8(c as u8)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 15, "score": 360428.80400271865 }, { "content": "fn run_test(instr: &Instruction, expected: &[u8], addr_size: OperandSize) {\n\n let mut buffer = Cursor::new(Vec::new());\n\n instr\n\n .encode(&mut buffer, Mode::from_size(addr_size).unwrap())\n\n .expect(\"Encoding failed\");\n\n if &buffer.get_ref()[..] != expected {\n\n println!(\"Test failed.\");\n\n print!(\"Output: [\");\n\n output_hex_array(buffer.get_ref());\n\n println!(\"]\");\n\n print!(\"Expected: [\");\n\n output_hex_array(expected);\n\n println!(\"]\");\n\n panic!(\n\n \"Failure. Mode: {:?}.\\nInstruction: {:?}.\\n\",\n\n addr_size, instr\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 16, "score": 358425.5942229312 }, { "content": "fn random_mem(size: OperandSize, addr_size: OperandSize) -> Operand {\n\n if addr_size != OperandSize::Word {\n\n match rand::random::<u32>() % 5 { // Select addressing mode\n\n 0 => { // Indirect - [EAX]\n\n Operand::Indirect(\n\n random_reg_of_size_no_stack(addr_size),\n\n Some(size), None)\n\n },\n\n 1 => { // Indirect Displaced - [EAX+5]\n\n Operand::IndirectDisplaced(\n\n random_reg_of_size_no_stack(addr_size),\n\n (rand::random::<u32>() as u64) & 0x7FFFFFFF,\n\n Some(size), None)\n\n },\n\n 2 => { // Indirect Scaled Indexed - [EAX+EBX*2]\n\n Operand::IndirectScaledIndexed(\n\n random_reg_of_size_no_stack(addr_size),\n\n random_reg_of_size_no_stack(addr_size),\n\n random_reg_scale(),\n\n Some(size), None)\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 17, "score": 353851.0840731592 }, { "content": "fn random_mib(size: OperandSize, addr_size: OperandSize) -> Operand {\n\n Operand::IndirectScaledIndexed(\n\n random_reg_of_size_no_stack(addr_size),\n\n random_reg_of_size_no_stack(addr_size),\n\n RegScale::One,\n\n Some(size), None)\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 18, "score": 353851.0840731592 }, { "content": "fn write_operand<W: Write>(op: &Operand, instr_def: &InstructionDefinition, f: &mut W)\n\n -> io::Result<()> { \n\n match *op {\n\n Operand::Direct(reg) => write!(f, \"{}\", reg),\n\n Operand::Indirect(reg, size, seg) => \n\n write_indirect(f, Some(reg), None, None, None, size, seg, instr_def),\n\n Operand::IndirectDisplaced(reg, dsp, size, seg) =>\n\n write_indirect(f, Some(reg), None, None, Some(dsp), size, seg, instr_def),\n\n Operand::IndirectScaledIndexed(base, index, scale, size, seg) => \n\n write_indirect(f, Some(base), Some(index), Some(scale), None, size, seg, instr_def),\n\n Operand::IndirectScaledIndexedDisplaced(base, index, scale, dsp, size, seg) =>\n\n write_indirect(f, Some(base), Some(index), Some(scale), Some(dsp), size, seg,\n\n instr_def),\n\n Operand::IndirectScaledDisplaced(reg, scale, dsp, size, seg) =>\n\n write_indirect(f, Some(reg), None, Some(scale), Some(dsp), size, seg, instr_def),\n\n Operand::Memory(addr, size, seg) |\n\n Operand::Offset(addr, size, seg) => size_seg_helper(f, size, seg, |fmt| write!(fmt, \"[{}]\", addr)), // TODO Is this correct?\n\n Operand::Literal8(v) => write!(f, \"0x{:X}\", v),\n\n Operand::Literal16(v) => write!(f, \"0x{:X}\", v),\n\n Operand::Literal32(v) => write!(f, \"0x{:X}\", v),\n\n Operand::Literal64(v) => write!(f, \"0x{:X}\", v),\n\n Operand::MemoryAndSegment16(seg, addr) => write!(f, \"0x{:X}:0x{:X}\", seg, addr),\n\n Operand::MemoryAndSegment32(seg, addr) => write!(f, \"0x{:X}:0x{:X}\", seg, addr),\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 19, "score": 348761.8677862119 }, { "content": "fn random_ymm_reg(use_all: bool) -> Reg { \n\n if use_all { random_of(&[\n\n Reg::YMM0, Reg::YMM1, Reg::YMM2, Reg::YMM3, Reg::YMM4, Reg::YMM5, Reg::YMM6, Reg::YMM7,\n\n Reg::YMM8, Reg::YMM9, Reg::YMM10, Reg::YMM11, Reg::YMM12, Reg::YMM13, Reg::YMM14, Reg::YMM15,\n\n Reg::YMM16, Reg::YMM17, Reg::YMM18, Reg::YMM19, Reg::YMM20, Reg::YMM21, Reg::YMM22, Reg::YMM23,\n\n Reg::YMM24, Reg::YMM25, Reg::YMM26, Reg::YMM27, Reg::YMM28, Reg::YMM29, Reg::YMM30, Reg::YMM31\n\n ]) } else { random_of(&[\n\n Reg::YMM0, Reg::YMM1, Reg::YMM2, Reg::YMM3, Reg::YMM4, Reg::YMM5, Reg::YMM6, Reg::YMM7,\n\n ]) }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 20, "score": 348038.44714981 }, { "content": "fn random_xmm_reg(use_all: bool) -> Reg { \n\n if use_all { random_of(&[\n\n Reg::XMM0, Reg::XMM1, Reg::XMM2, Reg::XMM3, Reg::XMM4, Reg::XMM5, Reg::XMM6, Reg::XMM7,\n\n Reg::XMM8, Reg::XMM9, Reg::XMM10, Reg::XMM11, Reg::XMM12, Reg::XMM13, Reg::XMM14, Reg::XMM15,\n\n Reg::XMM16, Reg::XMM17, Reg::XMM18, Reg::XMM19, Reg::XMM20, Reg::XMM21, Reg::XMM22, Reg::XMM23,\n\n Reg::XMM24, Reg::XMM25, Reg::XMM26, Reg::XMM27, Reg::XMM28, Reg::XMM29, Reg::XMM30, Reg::XMM31\n\n ]) } else { random_of(&[\n\n Reg::XMM0, Reg::XMM1, Reg::XMM2, Reg::XMM3, Reg::XMM4, Reg::XMM5, Reg::XMM6, Reg::XMM7,\n\n ]) }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 21, "score": 348038.44714981 }, { "content": "fn random_zmm_reg(use_all: bool) -> Reg { \n\n if use_all { random_of(&[\n\n Reg::ZMM0, Reg::ZMM1, Reg::ZMM2, Reg::ZMM3, Reg::ZMM4, Reg::ZMM5, Reg::ZMM6, Reg::ZMM7,\n\n Reg::ZMM8, Reg::ZMM9, Reg::ZMM10, Reg::ZMM11, Reg::ZMM12, Reg::ZMM13, Reg::ZMM14, Reg::ZMM15,\n\n Reg::ZMM16, Reg::ZMM17, Reg::ZMM18, Reg::ZMM19, Reg::ZMM20, Reg::ZMM21, Reg::ZMM22, Reg::ZMM23,\n\n Reg::ZMM24, Reg::ZMM25, Reg::ZMM26, Reg::ZMM27, Reg::ZMM28, Reg::ZMM29, Reg::ZMM30, Reg::ZMM31\n\n ]) } else { random_of(&[\n\n Reg::ZMM0, Reg::ZMM1, Reg::ZMM2, Reg::ZMM3, Reg::ZMM4, Reg::ZMM5, Reg::ZMM6, Reg::ZMM7,\n\n ]) }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 22, "score": 348038.44714981 }, { "content": "fn make_operand_combinations(instr: &InstructionDefinition) -> Vec<Vec<OperandDefinition>> {\n\n let set_parts = instr.operands.iter().by_ref().filter_map(\n\n |maybe_op| maybe_op.as_ref().and_then(\n\n |op| if let OperandType::Set(ref items) = op.op_type {\n\n if instr.mnemonic.find(\"CVT\").is_none() {\n\n Some(items.clone())\n\n } else {\n\n Some(items.iter().filter(|i| if let OperandType::Bcst(_) = **i { false }\n\n else { true }).cloned().collect())\n\n }\n\n } else { None }\n\n )\n\n ).next();\n\n if let Some(parts) = set_parts { \n\n parts.iter().map(|part| instr.operands.iter().filter_map(\n\n |maybe_op| maybe_op.as_ref().map(|op| if let OperandType::Set(_) = op.op_type {\n\n OperandDefinition {\n\n encoding: op.encoding,\n\n access: op.access,\n\n size: op.size,\n\n op_type: part.clone()\n\n }\n\n } else { op.clone() }\n\n )).collect()).collect()\n\n } else {\n\n vec![instr.operands.iter().filter_map(|x| x.as_ref()).cloned().collect()]\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 23, "score": 328989.55420121795 }, { "content": "#[test]\n\nfn operand_type_q() {\n\n decode_helper(\n\n &vec![0xFF, 0x20],\n\n Mode::Long,\n\n &Instruction::new1(\n\n Mnemonic::JMP,\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // JMP QWORD PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 24, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_a() {\n\n decode_helper(\n\n &vec![0x66, 0x62, 0x00],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BOUND,\n\n Operand::Direct(Reg::AX),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Unsized), None),\n\n ),\n\n ); // BOUND AX, [EAX]\n\n decode_helper(\n\n &vec![0x62, 0x00],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BOUND,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Unsized), None),\n\n ),\n\n ); // BOUND EAX, [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 25, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_p() {\n\n decode_helper(\n\n &vec![0x9A, 0xAB, 0x89, 0x67, 0x45, 0x23, 0x01],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::MemoryAndSegment32(0x0123, 0x456789AB),\n\n ),\n\n ); // CALL 0x0123:0x456789AB\n\n decode_helper(\n\n &vec![0x66, 0x9A, 0x67, 0x45, 0x23, 0x01],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::CALL, Operand::MemoryAndSegment16(0x0123, 0x4567)),\n\n ); // CALL 0x0123:0x4567\n\n decode_helper(\n\n &vec![0x9A, 0x67, 0x45, 0x23, 0x01],\n\n Mode::Real,\n\n &Instruction::new1(Mnemonic::CALL, Operand::MemoryAndSegment16(0x0123, 0x4567)),\n\n ); // CALL 0x0123:0x4567\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 26, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_b() {\n\n decode_helper(\n\n &vec![0xC4, 0xE3, 0x79, 0x32, 0xCA, 0x05],\n\n Mode::Protected,\n\n &Instruction::new3(\n\n Mnemonic::KSHIFTLB,\n\n Operand::Direct(Reg::K1),\n\n Operand::Direct(Reg::K2),\n\n Operand::Literal8(5),\n\n ),\n\n ); // KSHIFTLB K1, K2, 5\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 27, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_v() {\n\n decode_helper(\n\n &vec![0x40],\n\n Mode::Real,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::AX)),\n\n ); // INC AX\n\n decode_helper(\n\n &vec![0x66, 0x40],\n\n Mode::Real,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::EAX)),\n\n ); // INC EAX\n\n decode_helper(\n\n &vec![0x66, 0x40],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::AX)),\n\n ); // INC AX\n\n decode_helper(\n\n &vec![0x40],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::EAX)),\n\n ); // INC EAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 28, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_w() {\n\n decode_helper(\n\n &vec![0xC8, 0x05, 0x00, 0x06],\n\n Mode::Real,\n\n &Instruction::new2(\n\n Mnemonic::ENTER,\n\n Operand::Literal16(0x5),\n\n Operand::Literal8(0x06),\n\n ),\n\n ); // ENTER 5, 6\n\n decode_helper(\n\n &vec![0xC8, 0x05, 0x00, 0x06],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::ENTER,\n\n Operand::Literal16(0x5),\n\n Operand::Literal8(0x06),\n\n ),\n\n ); // ENTER 5, 6\n\n decode_helper(\n\n &vec![0xC8, 0x05, 0x00, 0x06],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ENTER,\n\n Operand::Literal16(0x5),\n\n Operand::Literal8(0x06),\n\n ),\n\n ); // ENTER 5, 6\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 29, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn operand_type_d() {\n\n decode_helper(\n\n &vec![0x0F, 0x6E, 0xD0],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::MOVD,\n\n Operand::Direct(Reg::MM2),\n\n Operand::Direct(Reg::EAX),\n\n ),\n\n ); // MOVD MM2, EAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 30, "score": 322405.83004577464 }, { "content": "#[test]\n\nfn addressing_mode_masked_mask_reg() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0x6D, 0x2B, 0x74, 0xD3],\n\n Mode::Long,\n\n &Instruction {\n\n mnemonic: Mnemonic::VPCMPEQB,\n\n operand1: Some(Operand::Direct(Reg::K2)),\n\n operand2: Some(Operand::Direct(Reg::YMM2)),\n\n operand3: Some(Operand::Direct(Reg::YMM3)),\n\n mask: Some(MaskReg::K3),\n\n merge_mode: Some(MergeMode::Merge),\n\n ..Default::default()\n\n },\n\n ); // VPCMPEQB K2 {K3}, YMM2, YMM3\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 31, "score": 322402.04090135463 }, { "content": "fn build_test_operand(instr: &mut Instruction, instr_def: &InstructionDefinition,\n\n def: &OperandDefinition, addr_size: OperandSize) -> Operand {\n\n match def.op_type {\n\n OperandType::Reg(reg_type) =>\n\n Operand::Direct(random_reg(reg_type, def.size, addr_size, instr_def)),\n\n OperandType::Mem(size) => random_mem(size.unwrap_or(def.size), addr_size),\n\n OperandType::Imm => random_imm(def.size),\n\n OperandType::Offset => Operand::Offset(rand_value_of_size(def.size), Some(def.size), None),\n\n OperandType::Rel(op_size) => random_imm(op_size), // TODO Is this correct?\n\n OperandType::Mib => random_mib(def.size, addr_size),\n\n OperandType::Bcst(bcst_size) => random_mem(bcst_size, addr_size),\n\n OperandType::Fixed(fixed_op) => random_fixed(fixed_op),\n\n OperandType::Constant => unimplemented!(), // TODO What is this?\n\n _ => unreachable!() // Set(_) should be split apart already\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 32, "score": 320714.5979397588 }, { "content": "fn make_rm(size: OperandSize, reg_type: RegType) -> InstructionToken {\n\n let vec = vec![InstructionToken::Reg(reg_type, size), InstructionToken::Mem(size)];\n\n InstructionToken::Set(vec)\n\n}\n\n\n", "file_path": "gen_defs/src/main.rs", "rank": 33, "score": 318056.8734751269 }, { "content": "// Test decoding of the operand size prefix.\n\nfn operand_size_prefix() {\n\n decode_helper(\n\n &vec![0xC5, 0xE9, 0x58, 0x08],\n\n Mode::Protected,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // VADDPD XMM1, XMM2, [EAX]\n\n}\n\n\n\n#[test]\n", "file_path": "src/test/decode.rs", "rank": 34, "score": 317057.37213678606 }, { "content": "#[test]\n\nfn operand_type_sd() {\n\n decode_helper(\n\n &vec![0xF2, 0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDSD XMM1, XMM2\n\n decode_helper(\n\n &vec![0xF2, 0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // ADDSD XMM1, [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 35, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_bss() {\n\n decode_helper(\n\n &vec![0x6A, 0x12],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::PUSH, Operand::Literal8(0x12)),\n\n ); // PUSH 0x12\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 36, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_dq() {\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x38, 0x00, 0x08],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::PSHUFB,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // PSHUFB XMM1, [EAX]\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x38, 0x00, 0xCA],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::PSHUFB,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // PSHUFB XMM1, [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 37, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_pi() {\n\n decode_helper(\n\n &vec![0x0F, 0x2A, 0xCA],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CVTPI2PS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::MM2),\n\n ),\n\n ); // CVTPI2PS XMM1, MM2\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 38, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_ss() {\n\n decode_helper(\n\n &vec![0xF3, 0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDSS XMM1, XMM2\n\n decode_helper(\n\n &vec![0xF3, 0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Dword), None),\n\n ),\n\n ); // ADDSS XMM1, [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 39, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_avx() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x0A, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction {\n\n mnemonic: Mnemonic::VADDPD,\n\n operand1: Some(Operand::Direct(Reg::XMM1)),\n\n operand2: Some(Operand::Direct(Reg::XMM2)),\n\n operand3: Some(Operand::Direct(Reg::XMM3)),\n\n mask: Some(MaskReg::K2),\n\n merge_mode: Some(MergeMode::Merge),\n\n ..Default::default()\n\n },\n\n ); // VADDPD XMM1 {K2}, XMM2, XMM3\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x2A, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction {\n\n mnemonic: Mnemonic::VADDPD,\n\n operand1: Some(Operand::Direct(Reg::YMM1)),\n", "file_path": "src/test/decode.rs", "rank": 40, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_bound() {\n\n decode_helper(\n\n &vec![0xF3, 0x0F, 0x1A, 0xC8],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BNDCL,\n\n Operand::Direct(Reg::BND1),\n\n Operand::Direct(Reg::EAX),\n\n ),\n\n ); // BNDCL BND1, EAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 41, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_qp() {\n\n decode_helper(\n\n &vec![0x48, 0xCF],\n\n Mode::Long,\n\n &Instruction::new0(Mnemonic::IRETQ),\n\n ); // IRETQ\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 42, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_dqp() {\n\n decode_helper(\n\n &vec![0xF2, 0x48, 0x0F, 0x38, 0xF0, 0xC0],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::CRC32,\n\n Operand::Direct(Reg::RAX),\n\n Operand::Direct(Reg::AL),\n\n ),\n\n ); // CRC32 RAX, AL\n\n decode_helper(\n\n &vec![0xF2, 0x0F, 0x38, 0xF0, 0xC0],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CRC32,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Direct(Reg::AL),\n\n ),\n\n ); // CRC32 EAX, AL\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 43, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_pd() {\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDPD XMM1, XMM2\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // ADDPD XMM1, [EAX]1\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 44, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_xmm() {\n\n decode_helper(\n\n &vec![0xC5, 0xE9, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Direct(Reg::XMM3),\n\n ),\n\n ); // VADDPD XMM1, XMM2, XMM3\n\n decode_helper(\n\n &vec![0xC5, 0xE9, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // VADDPD XMM1, XMM2, [EAX}\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 45, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_ymm() {\n\n decode_helper(\n\n &vec![0xC5, 0xED, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::YMM1),\n\n Operand::Direct(Reg::YMM2),\n\n Operand::Direct(Reg::YMM3),\n\n ),\n\n ); // VADDPD YMM1, YMM2, YMM3\n\n decode_helper(\n\n &vec![0xC5, 0xED, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::YMM1),\n\n Operand::Direct(Reg::YMM2),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Ymmword), None),\n\n ),\n\n ); // VADDPD YMM1, YMM2, [EAX}\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 46, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_vs() {\n\n decode_helper(\n\n &vec![0x66, 0x68, 0x34, 0x12],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::PUSH, Operand::Literal16(0x1234)),\n\n ); // PUSH 0x1234\n\n decode_helper(\n\n &vec![0x68, 0x78, 0x56, 0x34, 0x12],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::PUSH, Operand::Literal32(0x12345678)),\n\n ); // PUSH 0x12345678\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 47, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_di() {\n\n decode_helper(\n\n &vec![0xDA, 0x00],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FIADD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Dword), None),\n\n ),\n\n ); // FIADD DWORD PTR [EAX}\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 48, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_psq() {\n\n decode_helper(\n\n &vec![0x0F, 0x2C, 0xCA],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CVTTPS2PI,\n\n Operand::Direct(Reg::MM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // CVTTPS2PI MM1, XMM2\n\n decode_helper(\n\n &vec![0x0F, 0x2C, 0x08],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CVTTPS2PI,\n\n Operand::Direct(Reg::MM1),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // CVTTPS2PI MM1, [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 49, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_bs() {\n\n decode_helper(\n\n &vec![0x6B, 0xC3, 0x12],\n\n Mode::Protected,\n\n &Instruction::new3(\n\n Mnemonic::IMUL,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Direct(Reg::EBX),\n\n Operand::Literal8(0x12),\n\n ),\n\n ); // IMUL EAX, EBX, 0x12\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 50, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_bcd() {\n\n decode_helper(\n\n &vec![0xDF, 0x20],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FBLD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Tbyte), None),\n\n ),\n\n ); // FBLD [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 51, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_er() {\n\n decode_helper(\n\n &vec![0xDB, 0x28],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FLD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Tbyte), None),\n\n ),\n\n ); // FLD TBYTE PTR [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 52, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_ptp() {\n\n decode_helper(\n\n &vec![0xFF, 0x10],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Dword), None),\n\n ),\n\n ); // CALL DWORD PTR [EAX]\n\n decode_helper(\n\n &vec![0xFF, 0x18],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Fword), None),\n\n ),\n\n ); // CALL FWORD PTR [EAX]\n\n\n\n // TODO I'm not 100% sure this is correct. It seems to be from the Intel docs, but GCC won't\n\n // seem to accept this form?\n\n decode_helper(\n\n &vec![0x48, 0xFF, 0x18],\n\n Mode::Long,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Tbyte), None),\n\n ),\n\n ); // CALL TBYTE PTR [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 53, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_zmm() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x48, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::ZMM1),\n\n Operand::Direct(Reg::ZMM2),\n\n Operand::Direct(Reg::ZMM3),\n\n ),\n\n ); // VADDPD ZMM1, ZMM2, ZMM3\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x48, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::ZMM1),\n\n Operand::Direct(Reg::ZMM2),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Zmmword), None),\n\n ),\n\n ); // VADDPD ZMM1, ZMM2, [EAX}\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 54, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_ds() {\n\n decode_helper(\n\n &vec![0xE8, 0x78, 0x56, 0x34, 0x12],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::CALL, Operand::Offset(0x12345678, None, None)),\n\n ); // CALL 0x12345678\n\n}\n\n\n\n// I've temporarily disabled this test as the decoding logic will need to lookahead in order to\n\n// distiguish between a standalone FWAIT instruction and an instruction prefixed with 0x9B.\n\n// #[test]\n\n// fn operand_type_e() {\n\n// decode_helper(&vec![0x9B, 0xD9, 0x30], Mode::Protected, &Instruction::new1(Mnemonic::FSTENV, Operand::Indirect(Reg::EAX, None, None))); // FSTENV [EAX]\n\n// }\n\n\n", "file_path": "src/test/decode.rs", "rank": 55, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_vqp() {\n\n decode_helper(\n\n &vec![0x66, 0x01, 0xC3],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::BX),\n\n Operand::Direct(Reg::AX),\n\n ),\n\n ); // ADD BX, AX\n\n decode_helper(\n\n &vec![0x01, 0xC3],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::EBX),\n\n Operand::Direct(Reg::EAX),\n\n ),\n\n ); // ADD EBX, EAX\n\n decode_helper(\n\n &vec![0x48, 0x01, 0xC3],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::RBX),\n\n Operand::Direct(Reg::RAX),\n\n ),\n\n ); // ADD RBX, RAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 56, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_dr() {\n\n decode_helper(\n\n &vec![0xDC, 0x00],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FADD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // FADD QWORD PTR [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 57, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_vq() {\n\n decode_helper(\n\n &vec![0x66, 0x50],\n\n Mode::Long,\n\n &Instruction::new1(Mnemonic::PUSH, Operand::Direct(Reg::AX)),\n\n ); // PUSH AX\n\n decode_helper(\n\n &vec![0x50],\n\n Mode::Long,\n\n &Instruction::new1(Mnemonic::PUSH, Operand::Direct(Reg::RAX)),\n\n ); // PUSH RAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 58, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_vds() {\n\n decode_helper(\n\n &vec![0x05, 0x34, 0x12],\n\n Mode::Real,\n\n &Instruction::new2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::AX),\n\n Operand::Literal16(0x1234),\n\n ),\n\n ); // ADD AX, 0x1234\n\n decode_helper(\n\n &vec![0x66, 0x05, 0x78, 0x56, 0x34, 0x12],\n\n Mode::Real,\n\n &Instruction::new2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Literal32(0x12345678),\n\n ),\n\n ); // ADD EAX, 0x12345678\n\n decode_helper(\n", "file_path": "src/test/decode.rs", "rank": 59, "score": 317044.3639582245 }, { "content": "#[test]\n\nfn operand_type_ps() {\n\n decode_helper(\n\n &vec![0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDPS XMM1, XMM2\n\n decode_helper(\n\n &vec![0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // ADDPS XMM1, [EAX]1\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 60, "score": 317044.3639582245 }, { "content": "fn build_test_instruction(def: &InstructionDefinition, op_defs: Vec<OperandDefinition>,\n\n addr_size: OperandSize) -> Instruction {\n\n\n\n let mut instr = Instruction {\n\n mnemonic: def.mnemonic.clone(),\n\n .. Default::default()\n\n };\n\n\n\n let first_op_not_mem = op_defs.iter().next().map(|o| !o.op_type.is_mem()).unwrap_or(true);\n\n if def.allow_mask && first_op_not_mem { instr.mask = Some(random_mask()); }\n\n if def.allow_merge_mode && first_op_not_mem { instr.merge_mode = Some(MergeMode::Zero) }\n\n\n\n if op_defs.iter().all(|d| !d.op_type.is_mem()) {\n\n if def.allow_rounding & op_defs.iter().all(\n\n |op_def| if let OperandType::Reg(_) = op_def.op_type { true } else { false })\n\n { instr.rounding_mode = Some(random_rounding_mode()); }\n\n else if def.allow_sae { instr.sae = true; }\n\n }\n\n\n\n let broadcast_size = op_defs.iter().filter_map(\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 61, "score": 316645.5576981066 }, { "content": "fn build_test_instructions(def: &InstructionDefinition, addr_size: OperandSize) -> Vec<Instruction> {\n\n let op_combinations = make_operand_combinations(def);\n\n op_combinations.into_iter().filter(filter_op_combination)\n\n .map(|op_c| build_test_instruction(def, op_c, addr_size)).collect()\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 62, "score": 315604.41124553315 }, { "content": "#[test]\n\nfn addressing_mode_mask_reg() {\n\n decode_helper(\n\n &vec![0xC5, 0xE5, 0x4A, 0xD4],\n\n Mode::Protected,\n\n &Instruction {\n\n mnemonic: Mnemonic::KADDB,\n\n operand1: Some(Operand::Direct(Reg::K2)),\n\n operand2: Some(Operand::Direct(Reg::K3)),\n\n operand3: Some(Operand::Direct(Reg::K4)),\n\n ..Default::default()\n\n },\n\n ); // KADDB K2, K3, K4\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 63, "score": 311921.61597882846 }, { "content": "#[test]\n\nfn addressing_mode_masked_reg() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x0A, 0x58, 0xCB],\n\n Mode::Protected,\n\n &Instruction {\n\n mnemonic: Mnemonic::VADDPD,\n\n operand1: Some(Operand::Direct(Reg::XMM1)),\n\n operand2: Some(Operand::Direct(Reg::XMM2)),\n\n operand3: Some(Operand::Direct(Reg::XMM3)),\n\n mask: Some(MaskReg::K2),\n\n merge_mode: Some(MergeMode::Merge),\n\n ..Default::default()\n\n },\n\n ); // VADDPD XMM1 {K2}, XMM2, XMM3\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 64, "score": 311921.61597882846 }, { "content": "#[test]\n\nfn operand_type_fpu_register() {\n\n decode_helper(\n\n &vec![0xD8, 0xC2],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::FADD,\n\n Operand::Direct(Reg::ST),\n\n Operand::Direct(Reg::ST2),\n\n ),\n\n ); // FADD ST(2)\n\n}\n", "file_path": "src/test/decode.rs", "rank": 65, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_xmm_or_ymm() {\n\n decode_helper(\n\n &vec![0xC5, 0xE9, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Direct(Reg::XMM3),\n\n ),\n\n ); // VADDPD XMM1, XMM2, XMM3\n\n decode_helper(\n\n &vec![0xC5, 0xED, 0x58, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::YMM1),\n\n Operand::Direct(Reg::YMM2),\n\n Operand::Direct(Reg::YMM3),\n\n ),\n\n ); // VADDPD YMM1, YMM2, YMM3\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 66, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_xmm_or_mem64() {\n\n decode_helper(\n\n &vec![0x0F, 0x5A, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::CVTPS2PD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // CVTPS2PD XMM1, XMM2\n\n decode_helper(\n\n &vec![0x0F, 0x5A, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::CVTPS2PD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // CVTPS2PD XMM1, QWORD PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 67, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_bound_or_mem() {\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x1A, 0xCA],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BNDMOV,\n\n Operand::Direct(Reg::BND1),\n\n Operand::Direct(Reg::BND2),\n\n ),\n\n ); // BNDMOV BND1, BND2\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x1A, 0x08],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BNDMOV,\n\n Operand::Direct(Reg::BND1),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // BNDMOV BND1, QWORD PTR [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 68, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_unsized_memory() {\n\n decode_helper(\n\n &vec![0x8D, 0x03],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::LEA,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Indirect(Reg::EBX, Some(OperandSize::Unsized), None),\n\n ),\n\n ); // LEA EAX, [EBX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 69, "score": 311894.7044301774 }, { "content": "#[test]\n\nfn operand_type_xmm_or_mem32() {\n\n decode_helper(\n\n &vec![0xC5, 0xEA, 0x5A, 0xCB],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VCVTSS2SD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Direct(Reg::XMM3),\n\n ),\n\n ); // VCVTSS2SD XMM1, XMM2\n\n decode_helper(\n\n &vec![0xC5, 0xEA, 0x5A, 0x08],\n\n Mode::Long,\n\n &Instruction::new3(\n\n Mnemonic::VCVTSS2SD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Dword), None),\n\n ),\n", "file_path": "src/test/decode.rs", "rank": 70, "score": 311894.7044301774 }, { "content": "fn parse_operand_encoding_opt(operand: &str) -> Option<(OperandEncoding, OperandAccess)> {\n\n if operand.len() != 0 {\n\n Some(parse_operand_encoding(operand.as_bytes()).unwrap().1)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nnamed!(instruction_sep, eat_separator!(&b\", \"[..]));\n\nnamed!(parse_token_list<Vec<Vec<InstructionToken>>>, separated_list!(instruction_sep, parse_instruction_part));\n\nnamed!(parse_instruction<&[u8], (String, Vec<InstructionToken>), u32>, do_parse!(\n\n mnemonic: alphanumeric >> opt!(instruction_sep) >>\n\n tokens: opt!(complete!(parse_token_list)) >>\n\n (build_result(mnemonic, tokens))\n\n )\n\n);\n\n\n", "file_path": "gen_defs/src/main.rs", "rank": 71, "score": 311486.28128727485 }, { "content": "fn random_reg_16() -> Reg\n\n { random_of(&[Reg::BX, Reg::CX, Reg::DX, Reg::SI, Reg::DI, Reg::SP, Reg::BP]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 72, "score": 311326.47033738723 }, { "content": "fn random_reg_64() -> Reg\n\n { random_of(&[Reg::RBX, Reg::RCX, Reg::RDX, Reg::RSI, Reg::RDI, Reg::RSP, Reg::RBP]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 73, "score": 311326.47033738723 }, { "content": "fn random_reg_32() -> Reg\n\n { random_of(&[Reg::EBX, Reg::ECX, Reg::EDX, Reg::ESI, Reg::EDI, Reg::ESP, Reg::EBP]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 74, "score": 311326.47033738723 }, { "content": "fn random_reg_32_no_stack() -> Reg\n\n { random_of(&[Reg::EAX, Reg::EBX, Reg::ECX, Reg::EDX, Reg::ESI, Reg::EDI]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 75, "score": 308105.8281911791 }, { "content": "fn random_reg_16_no_stack() -> Reg\n\n { random_of(&[Reg::AX, Reg::BX, Reg::CX, Reg::DX, Reg::SI, Reg::DI]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 76, "score": 308105.8281911791 }, { "content": "fn random_fpu_reg() -> Reg\n\n { random_of(&[Reg::ST1, Reg::ST2, Reg::ST3, Reg::ST4, Reg::ST5, Reg::ST6, Reg::ST7]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 77, "score": 308105.8281911791 }, { "content": "fn random_reg_64_no_stack() -> Reg\n\n { random_of(&[Reg::RAX, Reg::RBX, Reg::RCX, Reg::RDX, Reg::RSI, Reg::RDI]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 78, "score": 308105.8281911791 }, { "content": "fn random_mmx_reg() -> Reg\n\n { random_of(&[Reg::MM0, Reg::MM1, Reg::MM2, Reg::MM3, Reg::MM4, Reg::MM5, Reg::MM6, Reg::MM7]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 79, "score": 308105.8281911791 }, { "content": "#[test]\n\nfn infer_operand_size_16_32_instr() {\n\n encode32_helper2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::AX),\n\n Operand::Indirect(Reg::EBX, None, None),\n\n &vec![0x66, 0x03, 0x03],\n\n );\n\n encode32_helper2(\n\n Mnemonic::ADD,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Indirect(Reg::EBX, None, None),\n\n &vec![0x03, 0x03],\n\n );\n\n}\n\n\n", "file_path": "src/test/size_inference.rs", "rank": 80, "score": 306944.54334351444 }, { "content": "#[test]\n\nfn operand_type_ymm_or_mem_or_mem64() {\n\n decode_helper(\n\n &vec![0xC5, 0xFF, 0xE6, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2DQ,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::YMM2),\n\n ),\n\n ); // VCVTPD2DQ XMM1, YMM2\n\n decode_helper(\n\n &vec![0xC5, 0xFF, 0xE6, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2DQ,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Ymmword), None),\n\n ),\n\n ); // VCVTPD2DQ XMM1, YMMWORD PTR [RAX]\n\n decode_helper(\n", "file_path": "src/test/decode.rs", "rank": 81, "score": 306944.54334351444 }, { "content": "#[test]\n\nfn operand_type_ymm_or_mem_or_mem32() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0x7C, 0x48, 0x5A, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPS2PD,\n\n Operand::Direct(Reg::ZMM1),\n\n Operand::Direct(Reg::YMM2),\n\n ),\n\n ); // VCVTPS2PD ZMM1, YMM2\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0x7C, 0x48, 0x5A, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPS2PD,\n\n Operand::Direct(Reg::ZMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Ymmword), None),\n\n ),\n\n ); // VCVTPS2PD ZMM1, YMMWORD PTR [RAX]\n\n decode_helper(\n", "file_path": "src/test/decode.rs", "rank": 82, "score": 306944.54334351444 }, { "content": "#[test]\n\nfn operand_type_zmm_or_mem_or_mem64() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xFD, 0x48, 0x5A, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2PS,\n\n Operand::Direct(Reg::YMM1),\n\n Operand::Direct(Reg::ZMM2),\n\n ),\n\n ); // VCVTPD2PS YMM1, ZMM2\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xFD, 0x48, 0x5A, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2PS,\n\n Operand::Direct(Reg::YMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Zmmword), None),\n\n ),\n\n ); // VCVTPD2PS YMM1, ZMMWORD PTR [RAX]\n\n decode_helper(\n", "file_path": "src/test/decode.rs", "rank": 83, "score": 306944.54334351444 }, { "content": "fn encode(instr: &Instruction, def: &InstructionDefinition, addr_size: OperandSize)\n\n -> io::Result<Vec<u8>> {\n\n // Write instruction to file\n\n let mut test_file = File::create(\"test.s\")?;\n\n write!(test_file, \".intel_syntax noprefix\\n\")?;\n\n write!(test_file, \".code{}\\n\", match addr_size {\n\n OperandSize::Word => \"16\",\n\n OperandSize::Dword => \"32\",\n\n OperandSize::Qword => \"64\",\n\n _ => panic!(\"Invalid addressing size.\")\n\n })?;\n\n write_instruction(instr, def, &mut test_file)?;\n\n write!(test_file, \"\\n\")?;\n\n\n\n // Run assembler\n\n let as_result = Command::new(\"as\")\n\n .args(&[\"test.s\", \"-o\", \"test.out\"])\n\n .spawn()?\n\n .wait()?;\n\n if !as_result.success() {\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 84, "score": 303187.427868437 }, { "content": "#[test]\n\nfn addressing_mode_avx_reg_masked_rm() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xFF, 0x0D, 0x10, 0x10],\n\n Mode::Protected,\n\n &Instruction {\n\n mnemonic: Mnemonic::VMOVSD,\n\n operand1: Some(Operand::Direct(Reg::XMM2)),\n\n operand2: Some(Operand::Indirect(Reg::EAX, Some(OperandSize::Qword), None)),\n\n mask: Some(MaskReg::K5),\n\n merge_mode: Some(MergeMode::Merge),\n\n ..Default::default()\n\n },\n\n ); // VMOVSD XMM2 {K5}, [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 85, "score": 302208.39689775347 }, { "content": "#[test]\n\nfn operand_type_xmm_or_ymm_or_mem_or_mem64() {\n\n decode_helper(\n\n &vec![0xC5, 0xF9, 0x5A, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2PS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // VCVTPD2PS XMM1, XMM2\n\n decode_helper(\n\n &vec![0xC5, 0xF9, 0x5A, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::VCVTPD2PS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // VCVTPD2PS XMM1, XMMWORD PTR [RAX]\n\n decode_helper(\n", "file_path": "src/test/decode.rs", "rank": 86, "score": 302182.50809514255 }, { "content": "fn vfmsub213ss_2() {\n\n run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM2)), operand3: Some(IndirectDisplaced(ESI, 484419037, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 105, 171, 182, 221, 165, 223, 28], OperandSize::Dword)\n\n}\n\n\n", "file_path": "src/test/instruction_tests/vfmsub213ss.rs", "rank": 87, "score": 300752.634987277 }, { "content": "fn vfmsub213ss_3() {\n\n run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM7)), operand3: Some(Direct(XMM0)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 65, 171, 192], OperandSize::Qword)\n\n}\n\n\n", "file_path": "src/test/instruction_tests/vfmsub213ss.rs", "rank": 88, "score": 300752.63498727704 }, { "content": "fn vfmsub213ss_1() {\n\n run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM5)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 81, 171, 226], OperandSize::Dword)\n\n}\n\n\n", "file_path": "src/test/instruction_tests/vfmsub213ss.rs", "rank": 89, "score": 300752.63498727704 }, { "content": "fn vfmsub213ss_5() {\n\n run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM5)), operand3: Some(Direct(XMM6)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Up), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 242, 85, 222, 171, 254], OperandSize::Dword)\n\n}\n\n\n", "file_path": "src/test/instruction_tests/vfmsub213ss.rs", "rank": 90, "score": 300752.63498727704 }, { "content": "fn vfmsub213ss_4() {\n\n run_test(&Instruction { mnemonic: Mnemonic::VFMSUB213SS, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM2)), operand3: Some(IndirectScaledDisplaced(RBX, Eight, 48631653, Some(OperandSize::Dword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 226, 105, 171, 52, 221, 101, 15, 230, 2], OperandSize::Qword)\n\n}\n\n\n", "file_path": "src/test/instruction_tests/vfmsub213ss.rs", "rank": 91, "score": 300752.634987277 }, { "content": "fn random_reg_8() -> Reg { random_of(&[Reg::BL, Reg::CL, Reg::DL]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 92, "score": 299957.40658541943 }, { "content": "fn write_test<W: Write>(instr: &Instruction, encoded: &[u8], addr_size: OperandSize,\n\n writer: &mut W, test_count: &mut HashMap<String, u32>) -> io::Result<()> {\n\n let test_num = test_count.entry(instr.mnemonic.clone()).or_insert(0);\n\n *test_num += 1;\n\n\n\n write!(writer, \"#[test]\\nfn {}_{}() {{\\n run_test(&{:?}, &{:?}, {:?})\\n}}\\n\\n\",\n\n instr.mnemonic.to_lowercase(), test_num, instr, encoded, addr_size)\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 98, "score": 295845.2649296818 }, { "content": "fn random_bound_reg() -> Reg { random_of(&[Reg::BND0, Reg::BND1, Reg::BND2, Reg::BND3]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 99, "score": 294212.576199507 } ]
Rust
src/assets/texture.rs
jkugelman/roomdust
3b23685ab36b24dd76ba72f7ac099943e7bd4b5a
use std::collections::BTreeMap; use std::convert::TryInto; use std::ops::{Deref, Index}; use bytes::Buf; use crate::wad::{self, Lump, Wad}; #[derive(Clone, Debug)] pub struct TextureBank(BTreeMap<String, Texture>); impl TextureBank { pub fn load(wad: &Wad) -> wad::Result<Self> { let mut textures = BTreeMap::new(); for lump in Self::texture_lumps(wad)? { Self::load_from(&lump, &mut textures)?; } Ok(Self(textures)) } fn texture_lumps(wad: &Wad) -> wad::Result<Vec<Lump>> { let iter = Some(wad.lump("TEXTURE1")?).into_iter(); let iter = iter.chain(wad.try_lump("TEXTURE2")?); Ok(iter.collect()) } fn load_from(lump: &Lump, textures: &mut BTreeMap<String, Texture>) -> wad::Result<()> { let mut cursor = lump.cursor(); cursor.need(4)?; let count = cursor.get_u32_le(); let mut offsets = Vec::with_capacity(count.clamp(0, 1024) as usize); let need: usize = count .checked_mul(4) .ok_or_else(|| lump.error(format!("bad count {}", count)))? .try_into() .unwrap(); cursor.need(need)?; for _ in 0..count { offsets.push(cursor.get_u32_le()); } cursor.clear(); cursor.done()?; for offset in offsets { let texture = Texture::load(lump, offset.try_into().unwrap())?; textures.insert(texture.name.clone(), texture); } Ok(()) } pub fn get(&self, name: &str) -> Option<&Texture> { self.0.get(&name.to_ascii_uppercase()) } } impl Index<&str> for TextureBank { type Output = Texture; fn index(&self, name: &str) -> &Self::Output { self.get(name).expect("texture not found") } } impl Deref for TextureBank { type Target = BTreeMap<String, Texture>; fn deref(&self) -> &Self::Target { &self.0 } } #[derive(Clone, Debug)] pub struct Texture { pub name: String, pub width: u16, pub height: u16, patches: Vec<PatchPlacement>, } impl Texture { fn load(lump: &Lump, offset: usize) -> wad::Result<Self> { let mut cursor = lump.cursor(); cursor.skip(offset)?; cursor.need(22)?; let name = cursor.get_name(); let _flags = cursor.get_u16_le(); let _unused = cursor.get_u16_le(); let width = cursor.get_u16_le(); let height = cursor.get_u16_le(); let _unused = cursor.get_u32_le(); let patch_count: usize = cursor.get_u16_le().into(); let mut patches = Vec::with_capacity(patch_count.clamp(0, 64)); cursor.need(patch_count * 10)?; for _ in 0..patch_count { let x = cursor.get_u16_le(); let y = cursor.get_u16_le(); let patch = cursor.get_u16_le(); let _unused = cursor.get_u16_le(); let _unused = cursor.get_u16_le(); patches.push(PatchPlacement { x, y, patch }); } cursor.clear(); cursor.done()?; Ok(Self { name, width, height, patches, }) } } #[derive(Clone, Debug)] struct PatchPlacement { pub x: u16, pub y: u16, pub patch: u16, } #[cfg(test)] mod tests { use super::*; use crate::assets::PatchBank; use crate::wad::test::*; #[test] fn load() { let patches = PatchBank::load(&BIOTECH_WAD).unwrap(); let textures = TextureBank::load(&BIOTECH_WAD).unwrap(); let exit_door = textures.get("EXITDOOR").unwrap(); assert_eq!(exit_door.name, "EXITDOOR"); assert_eq!(exit_door.width, 128); assert_eq!(exit_door.height, 72); assert_eq!(exit_door.patches.len(), 4); assert_eq!(exit_door.patches[0].x, 0); assert_eq!(exit_door.patches[0].y, 0); assert_eq!( patches.get(exit_door.patches[0].patch).unwrap().name, "DOOR3_6" ); assert_eq!(exit_door.patches[1].x, 64); assert_eq!(exit_door.patches[1].y, 0); assert_eq!( patches.get(exit_door.patches[1].patch).unwrap().name, "DOOR3_4" ); assert_eq!(exit_door.patches[2].x, 88); assert_eq!(exit_door.patches[2].y, 0); assert_eq!( patches.get(exit_door.patches[2].patch).unwrap().name, "DOOR3_5" ); assert_eq!(exit_door.patches[3].x, 112); assert_eq!(exit_door.patches[3].y, 0); assert_eq!( patches.get(exit_door.patches[3].patch).unwrap().name, "T14_5" ); } }
use std::collections::BTreeMap; use std::convert::TryInto; use std::ops::{Deref, Index}; use bytes::Buf; use crate::wad::{self, Lump, Wad}; #[derive(Clone, Debug)] pub struct TextureBank(BTreeMap<String, Texture>); impl TextureBank { pub fn load(wad: &Wad) -> wad::Result<Self> { let mut textures = BTreeMap::new(); for lump in Self::texture_lumps(wad)? { Self::load_from(&lump, &mut textures)?; } Ok(Self(textures)) } fn texture_lumps(wad: &Wad) -> wad::Result<Vec<Lump>> { let iter = Some(wad.lump("TEXTURE1")?).into_iter(); let iter = iter.chain(wad.try_lump("TEXTURE2")?); Ok(iter.collect()) } fn load_from(lump: &Lump, textures: &mut BTreeMap<String, Texture>) -> wad::Result<()> { let mut cursor = lump.cursor(); cursor.need(4)?; let count = cursor.get_u32_le(); let mut offsets = Vec::with_capacity(count.clamp(0, 1024) as usize); let need: usize = count .checked_mul(4) .ok_or_else(|| lump.error(format!("bad count {}", count)))? .try_into() .unwrap(); cursor.need(need)?; for _ in 0..count { offsets.push(cursor.get_u32_le()); } cursor.clear(); cursor.done()?; for offset in offsets { let texture = Texture::load(lum
pub fn get(&self, name: &str) -> Option<&Texture> { self.0.get(&name.to_ascii_uppercase()) } } impl Index<&str> for TextureBank { type Output = Texture; fn index(&self, name: &str) -> &Self::Output { self.get(name).expect("texture not found") } } impl Deref for TextureBank { type Target = BTreeMap<String, Texture>; fn deref(&self) -> &Self::Target { &self.0 } } #[derive(Clone, Debug)] pub struct Texture { pub name: String, pub width: u16, pub height: u16, patches: Vec<PatchPlacement>, } impl Texture { fn load(lump: &Lump, offset: usize) -> wad::Result<Self> { let mut cursor = lump.cursor(); cursor.skip(offset)?; cursor.need(22)?; let name = cursor.get_name(); let _flags = cursor.get_u16_le(); let _unused = cursor.get_u16_le(); let width = cursor.get_u16_le(); let height = cursor.get_u16_le(); let _unused = cursor.get_u32_le(); let patch_count: usize = cursor.get_u16_le().into(); let mut patches = Vec::with_capacity(patch_count.clamp(0, 64)); cursor.need(patch_count * 10)?; for _ in 0..patch_count { let x = cursor.get_u16_le(); let y = cursor.get_u16_le(); let patch = cursor.get_u16_le(); let _unused = cursor.get_u16_le(); let _unused = cursor.get_u16_le(); patches.push(PatchPlacement { x, y, patch }); } cursor.clear(); cursor.done()?; Ok(Self { name, width, height, patches, }) } } #[derive(Clone, Debug)] struct PatchPlacement { pub x: u16, pub y: u16, pub patch: u16, } #[cfg(test)] mod tests { use super::*; use crate::assets::PatchBank; use crate::wad::test::*; #[test] fn load() { let patches = PatchBank::load(&BIOTECH_WAD).unwrap(); let textures = TextureBank::load(&BIOTECH_WAD).unwrap(); let exit_door = textures.get("EXITDOOR").unwrap(); assert_eq!(exit_door.name, "EXITDOOR"); assert_eq!(exit_door.width, 128); assert_eq!(exit_door.height, 72); assert_eq!(exit_door.patches.len(), 4); assert_eq!(exit_door.patches[0].x, 0); assert_eq!(exit_door.patches[0].y, 0); assert_eq!( patches.get(exit_door.patches[0].patch).unwrap().name, "DOOR3_6" ); assert_eq!(exit_door.patches[1].x, 64); assert_eq!(exit_door.patches[1].y, 0); assert_eq!( patches.get(exit_door.patches[1].patch).unwrap().name, "DOOR3_4" ); assert_eq!(exit_door.patches[2].x, 88); assert_eq!(exit_door.patches[2].y, 0); assert_eq!( patches.get(exit_door.patches[2].patch).unwrap().name, "DOOR3_5" ); assert_eq!(exit_door.patches[3].x, 112); assert_eq!(exit_door.patches[3].y, 0); assert_eq!( patches.get(exit_door.patches[3].patch).unwrap().name, "T14_5" ); } }
p, offset.try_into().unwrap())?; textures.insert(texture.name.clone(), texture); } Ok(()) }
function_block-function_prefixed
[ { "content": "#[derive(Debug)]\n\nstruct LumpLocation {\n\n pub offset: usize,\n\n pub size: usize,\n\n pub name: String,\n\n}\n\n\n\nimpl WadFile {\n\n /// Loads a WAD file from disk.\n\n pub fn load(path: impl AsRef<Path>) -> wad::Result<Arc<Self>> {\n\n let path = path.as_ref();\n\n let file = File::open(path)\n\n .map_err(|err| wad::Error::Io { path: path.to_owned(), source: err })?;\n\n Self::load_reader(path, file)\n\n }\n\n\n\n /// Loads a WAD file from a generic reader.\n\n ///\n\n /// The reader's current position doesn't matter. Reading WAD files requires seeking to\n\n /// arbitrary offsets throughout the file.\n\n ///\n", "file_path": "src/wad/file.rs", "rank": 0, "score": 78765.6606755585 }, { "content": "/// Reads a name from a raw 8-byte, NUL padded byte array.\n\n///\n\n/// This function does not check if the name contains only legal ASCII characters. Non-ASCII bytes\n\n/// are treated as Latin-1, where all bytes are valid and map 1-to-1 to the corresponding Unicode\n\n/// codepoints.\n\npub fn parse_name(raw: &[u8; 8]) -> String {\n\n let nul_index = raw.iter().position(|&ch| ch == b'\\0').unwrap_or(raw.len());\n\n let raw = &raw[..nul_index];\n\n raw.iter().copied().map(|b| b as char).collect()\n\n}\n", "file_path": "src/wad/name.rs", "rank": 1, "score": 65557.95764492045 }, { "content": "#[derive(Debug)]\n\nstruct Directory {\n\n pub lump_locations: Vec<LumpLocation>,\n\n pub lump_indices: HashMap<String, Vec<usize>>,\n\n}\n\n\n", "file_path": "src/wad/file.rs", "rank": 3, "score": 57070.95971287564 }, { "content": "#[derive(Debug)]\n\nstruct Header {\n\n pub kind: WadKind,\n\n pub lump_count: usize,\n\n pub directory_offset: usize,\n\n}\n\n\n\n/// WAD files can be either IWADs or PWADs.\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\n\npub enum WadKind {\n\n /// An IWAD or \"internal wad\" such as `doom.wad` that contains all of the data necessary to\n\n /// play.\n\n Iwad,\n\n\n\n /// A PWAD or \"patch wad\" containing extra levels, textures, or other assets that are overlaid\n\n /// on top of other wads.\n\n Pwad,\n\n}\n\n\n", "file_path": "src/wad/file.rs", "rank": 4, "score": 57070.95971287564 }, { "content": " /// Checks that there are at least `size` bytes remaining. Always call this before reading\n\n /// anything as [`Bytes`]'s methods will panic if there is insufficient data.\n\n pub fn need(&self, size: usize) -> wad::Result<()> {\n\n if self.len() >= size {\n\n Ok(())\n\n } else {\n\n Err(self.lump.error(\"not enough data\"))\n\n }\n\n }\n\n\n\n /// Checks that there are at least `count` bytes remaining, then calls `self.advance(count)`.\n\n pub fn skip(&mut self, count: usize) -> wad::Result<()> {\n\n self.need(count)?;\n\n self.advance(count);\n\n Ok(())\n\n }\n\n\n\n /// Checks if there is unread data, then drops the cursor. This function should always be called\n\n /// when parsing is finished to ensure there is no extra trailing data. You can [`clear`] the\n\n /// cursor if trailing data is expected.\n", "file_path": "src/wad/cursor.rs", "rank": 5, "score": 51068.28347288646 }, { "content": "/// #\n\n/// let mut cursor = lump.cursor();\n\n///\n\n/// cursor.need(12)?;\n\n/// let value = cursor.get_u32_le();\n\n/// let name = cursor.get_name();\n\n///\n\n/// cursor.done()?;\n\n/// #\n\n/// # Ok::<(), dusty_room::wad::Error>(())\n\n/// ```\n\n///\n\n/// Ignore unread trailing data:\n\n///\n\n/// ```no_run\n\n/// # use bytes::Buf;\n\n/// # let lump = dusty_room::wad::Wad::load(\"\")?.lump(\"\")?;\n\n/// # let mut cursor = lump.cursor();\n\n/// #\n\n/// cursor.clear();\n", "file_path": "src/wad/cursor.rs", "rank": 6, "score": 51067.07351673501 }, { "content": " ///\n\n /// [`clear`]: Bytes::clear\n\n pub fn done(self) -> wad::Result<()> {\n\n if self.is_empty() {\n\n Ok(())\n\n } else {\n\n Err(self.lump.error(\"too much data\"))\n\n }\n\n }\n\n\n\n /// Reads an 8-byte, NUL padded name.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```no_run\n\n /// # let lump = dusty_room::wad::Wad::load(\"\")?.lump(\"\")?;\n\n /// # let mut cursor = lump.cursor();\n\n /// #\n\n /// cursor.need(8)?;\n\n /// let name = cursor.get_name();\n", "file_path": "src/wad/cursor.rs", "rank": 7, "score": 51066.05985914253 }, { "content": " /// #\n\n /// # Ok::<(), dusty_room::wad::Error>(())\n\n /// ```\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if there are fewer than 8 bytes remaining.\n\n pub fn get_name(&mut self) -> String {\n\n parse_name(self.split_to(8).as_ref().try_into().unwrap())\n\n }\n\n}\n\n\n\nimpl Deref for Cursor<'_> {\n\n type Target = Bytes;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.data\n\n }\n\n}\n\n\n\nimpl DerefMut for Cursor<'_> {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.data\n\n }\n\n}\n", "file_path": "src/wad/cursor.rs", "rank": 8, "score": 51065.30323383092 }, { "content": "/// cursor.done()?;\n\n/// #\n\n/// # Ok::<(), dusty_room::wad::Error>(())\n\n///\n\n/// ```\n\n///\n\n/// [`done`]: Self::done\n\n/// [`clear`]: Bytes::clear\n\npub struct Cursor<'lump> {\n\n lump: &'lump Lump,\n\n data: Bytes,\n\n}\n\n\n\nimpl<'lump> Cursor<'lump> {\n\n pub(super) fn new(lump: &'lump Lump, data: Bytes) -> Self {\n\n Self { lump, data }\n\n }\n\n}\n\n\n\nimpl Cursor<'_> {\n", "file_path": "src/wad/cursor.rs", "rank": 9, "score": 51064.65396758673 }, { "content": "use std::convert::TryInto;\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse bytes::{Buf, Bytes};\n\n\n\nuse crate::wad::{self, parse_name, Lump};\n\n\n\n/// A moving cursor for reading data from a [`Lump`]. `Cursor` is a thin wrapper around [`Bytes`]\n\n/// that allows for checking if there's data available before reading it.\n\n///\n\n/// It is important to always call [`done`] when when parsing is finished to ensure there is no\n\n/// extra trailing data. You can [`clear`] the cursor if trailing data is expected.\n\n///\n\n/// # Examples\n\n///\n\n/// Read a 12-byte lump containing a 4-byte number and an 8-byte name:\n\n///\n\n/// ```no_run\n\n/// # use bytes::Buf;\n\n/// # let lump = dusty_room::wad::Wad::load(\"\")?.lump(\"\")?;\n", "file_path": "src/wad/cursor.rs", "rank": 10, "score": 51063.305577432446 }, { "content": "use std::borrow::Cow;\n\nuse std::ops::{Deref, DerefMut};\n\nuse std::sync::Arc;\n\nuse std::{fmt, slice, vec};\n\n\n\nuse bytes::Bytes;\n\n\n\nuse crate::wad::{self, Cursor, WadFile};\n\n\n\n/// A block of one or more [`Lump`]s from a [`Wad`] or [`WadFile`].\n\n///\n\n/// [`Wad`]: crate::wad::Wad\n\n#[derive(Clone, Debug)]\n\npub struct Lumps(Vec<Lump>);\n\n\n\nimpl Lumps {\n\n /// Creates a block of lumps.\n\n ///\n\n /// # Panics\n\n ///\n", "file_path": "src/wad/lump.rs", "rank": 11, "score": 50861.754604909 }, { "content": "impl<'a> IntoIterator for &'a Lumps {\n\n type Item = &'a Lump;\n\n type IntoIter = slice::Iter<'a, Lump>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.iter()\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for &'a mut Lumps {\n\n type Item = &'a mut Lump;\n\n type IntoIter = slice::IterMut<'a, Lump>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.iter_mut()\n\n }\n\n}\n\n\n\n/// A lump of data from a [`Wad`] or [`WadFile`].\n\n///\n", "file_path": "src/wad/lump.rs", "rank": 12, "score": 50859.31131702864 }, { "content": "\n\n /// The first lump in the block.\n\n pub fn first(&self) -> &Lump {\n\n self.0.first().unwrap()\n\n }\n\n\n\n /// The last lump in the block.\n\n pub fn last(&self) -> &Lump {\n\n self.0.last().unwrap()\n\n }\n\n\n\n /// Creates a [`wad::Error::Malformed`] blaming this block.\n\n pub fn error(&self, desc: impl Into<Cow<'static, str>>) -> wad::Error {\n\n self.first().file.error(desc)\n\n }\n\n}\n\n\n\nimpl Deref for Lumps {\n\n type Target = Vec<Lump>;\n\n\n", "file_path": "src/wad/lump.rs", "rank": 13, "score": 50856.78723236542 }, { "content": " fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl DerefMut for Lumps {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n\n }\n\n}\n\n\n\nimpl IntoIterator for Lumps {\n\n type Item = Lump;\n\n type IntoIter = vec::IntoIter<Self::Item>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.0.into_iter()\n\n }\n\n}\n\n\n", "file_path": "src/wad/lump.rs", "rank": 14, "score": 50856.427719953 }, { "content": " pub fn file(&self) -> &Arc<WadFile> {\n\n &self.file\n\n }\n\n\n\n /// The lump name, for example `VERTEXES` or `THINGS`.\n\n pub fn name(&self) -> &str {\n\n &self.name\n\n }\n\n\n\n /// The lump data, a binary blob.\n\n pub fn data(&self) -> &[u8] {\n\n &self.data\n\n }\n\n\n\n /// Returns a cursor that can be used to parse the lump data.\n\n pub fn cursor(&self) -> Cursor<'_> {\n\n Cursor::new(self, self.data.clone())\n\n }\n\n\n\n /// The size of the lump.\n", "file_path": "src/wad/lump.rs", "rank": 15, "score": 50856.15731690337 }, { "content": " /// Checks that the lump has the expected name.\n\n pub fn expect_name(&self, name: &str) -> wad::Result<&Self> {\n\n if self.name == name {\n\n Ok(self)\n\n } else {\n\n Err(self.error(format!(\"{} missing\", name)))\n\n }\n\n }\n\n\n\n /// Creates a [`wad::Error::Malformed`] blaming this lump.\n\n pub fn error(&self, desc: impl Into<Cow<'static, str>>) -> wad::Error {\n\n self.file.error(format!(\"{}: {}\", self.name(), desc.into()))\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Lump {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n let Self { file, block, name, data } = self;\n\n\n\n if let Some(block) = block {\n", "file_path": "src/wad/lump.rs", "rank": 16, "score": 50855.88386610728 }, { "content": " /// Panics if `lumps` is empty.\n\n pub(super) fn new(mut lumps: Vec<Lump>, is_named: bool) -> Self {\n\n assert!(!lumps.is_empty());\n\n\n\n if is_named {\n\n let name = lumps[0].name.clone();\n\n\n\n for lump in lumps.iter_mut() {\n\n *lump = lump.from_block(name.clone());\n\n }\n\n }\n\n\n\n Self(lumps)\n\n }\n\n\n\n /// The file containing the lumps.\n\n pub fn file(&self) -> &Arc<WadFile> {\n\n // It doesn't matter which lump we look at. They all come from the same file.\n\n self.first().file()\n\n }\n", "file_path": "src/wad/lump.rs", "rank": 17, "score": 50854.773101050145 }, { "content": "/// [`Wad`]: crate::wad::Wad\n\n#[derive(Clone)]\n\npub struct Lump {\n\n file: Arc<WadFile>,\n\n block: Option<String>,\n\n name: String,\n\n data: Bytes,\n\n}\n\n\n\nimpl Lump {\n\n /// Creates a lump pointing at a slice of data from a `WadFile`.\n\n pub(super) fn new(file: Arc<WadFile>, name: String, data: Bytes) -> Self {\n\n Self { file, block: None, name, data }\n\n }\n\n\n\n pub(super) fn from_block(&self, block: String) -> Self {\n\n Self { block: Some(block), ..self.clone() }\n\n }\n\n\n\n /// The file containing the lump.\n", "file_path": "src/wad/lump.rs", "rank": 18, "score": 50852.66142722469 }, { "content": " ///\n\n /// This is equivalent to `self.data().len()`.\n\n pub fn size(&self) -> usize {\n\n self.data.len()\n\n }\n\n\n\n /// Returns `true` if this is a marker lump with no data.\n\n ///\n\n /// This is equivalent to `self.data.len() == 0`.\n\n pub fn is_empty(&self) -> bool {\n\n self.data.is_empty()\n\n }\n\n\n\n /// Returns `true` if the lump contains data.\n\n ///\n\n /// This is equivalent to `!self.is_empty()`.\n\n pub fn has_data(&self) -> bool {\n\n !self.is_empty()\n\n }\n\n\n", "file_path": "src/wad/lump.rs", "rank": 19, "score": 50850.27192114 }, { "content": " write!(fmt, \"{} \", block)?;\n\n }\n\n write!(fmt, \"{} ({} bytes) from {}\", name, data.len(), file)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Lump {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n if let Some(ref block) = self.block {\n\n write!(fmt, \"{} \", block)?;\n\n }\n\n write!(fmt, \"{}\", self.name)\n\n }\n\n}\n", "file_path": "src/wad/lump.rs", "rank": 20, "score": 50848.817495816525 }, { "content": "#[derive(Clone)]\n\nstruct Column {\n\n posts: Vec<Post>,\n\n}\n\n\n", "file_path": "src/assets/patch.rs", "rank": 21, "score": 34395.95600575392 }, { "content": "#[derive(Clone)]\n\nstruct Post {\n\n y_offset: u16,\n\n pixels: Bytes,\n\n}\n\n\n\nimpl Patch {\n\n /// Loads a patch from a lump.\n\n pub fn load(lump: &Lump) -> wad::Result<Self> {\n\n let mut cursor = lump.cursor();\n\n\n\n cursor.need(8)?;\n\n let name = lump.name().to_owned();\n\n let width = cursor.get_u16_le();\n\n let height = cursor.get_u16_le();\n\n let y = cursor.get_i16_le();\n\n let x = cursor.get_i16_le();\n\n\n\n // Read column offsets. The WAD is untrusted so clamp how much memory is pre-allocated.\n\n let mut column_offsets = Vec::with_capacity(width.clamp(0, 512).into());\n\n cursor.need((4 * width).into())?;\n", "file_path": "src/assets/patch.rs", "rank": 22, "score": 34395.95600575392 }, { "content": " /// [PWAD]: WadKind::Pwad\n\n /// [`patch`]: Self::patch\n\n /// [`expect_kind`]: WadFile::expect_kind\n\n pub fn add(&self, file: Arc<WadFile>) -> wad::Result<Self> {\n\n let mut clone = self.clone();\n\n clone.patches.push(file);\n\n Ok(clone)\n\n }\n\n\n\n /// Returns an iterator over all the files in this `Wad`. The files are in the order they were\n\n /// added: first the initial [`WadFile`], then each of the patches in turn. You can [reverse]\n\n /// the iterator if you want to see the files in the order lump lookups occur, from last to\n\n /// first.\n\n ///\n\n /// One should not normally need to call this function. It is mainly useful for debugging, or\n\n /// just to get a peek under the hood.\n\n ///\n\n /// [reverse]: Iterator::rev\n\n pub fn files(&self) -> impl Iterator<Item = &WadFile> + DoubleEndedIterator {\n\n let initial = once(&*self.initial);\n", "file_path": "src/wad/wad.rs", "rank": 23, "score": 30897.957458036017 }, { "content": " }\n\n\n\n /// Retrieves a block of lumps between start and end markers. The marker lumps are included in\n\n /// the result. Blocks in later wads override entire blocks from earlier files.\n\n ///\n\n /// Returns `Ok(None)` if the block is missing.\n\n pub fn try_lumps_between(&self, start: &str, end: &str) -> wad::Result<Option<Lumps>> {\n\n self.try_lookup(|file| file.try_lumps_between(start, end))\n\n }\n\n\n\n fn lookup<T>(\n\n &self,\n\n try_lookup: impl Fn(&Arc<WadFile>) -> wad::Result<Option<T>>,\n\n lookup: impl FnOnce(&Arc<WadFile>) -> wad::Result<T>,\n\n ) -> wad::Result<T> {\n\n for patch in self.patches.iter().rev() {\n\n if let Some(value) = try_lookup(patch)? {\n\n return Ok(value);\n\n }\n\n }\n", "file_path": "src/wad/wad.rs", "rank": 24, "score": 30895.117060791537 }, { "content": "use std::iter::once;\n\nuse std::{path::Path, sync::Arc};\n\n\n\nuse crate::wad::{self, Lump, Lumps, WadFile, WadKind};\n\n\n\n/// A stack of WAD files layered on top of each other, with later files overlaying earlier ones.\n\n/// A `Wad` usually consists of an [IWAD] overlaid with zero or more [PWADs], an ordering which is\n\n/// enforced by the [`load`] and [`patch`] constructors. There are a set of unchecked constructors\n\n/// if you want to bypass this constraint.\n\n///\n\n/// [IWAD]: WadKind::Iwad\n\n/// [PWADs]: WadKind::Pwad\n\n/// [`load`]: Self::load\n\n/// [`patch`]: Self::patch\n\n#[derive(Clone, Debug)]\n\n#[must_use]\n\npub struct Wad {\n\n initial: Arc<WadFile>,\n\n patches: Vec<Arc<WadFile>>,\n\n}\n", "file_path": "src/wad/wad.rs", "rank": 25, "score": 30893.74541682961 }, { "content": " ],\n\n );\n\n assert_eq!(DOOM2_WAD.lumps_between(\"S_START\", \"S_END\").unwrap().len(), 1383);\n\n\n\n let wad = DOOM2_WAD.patch(BIOTECH_WAD_PATH).unwrap();\n\n assert_eq!(wad.lump(\"DEMO3\").unwrap().size(), 9490);\n\n assert_eq!(\n\n wad.lumps_following(\"MAP01\", 11)\n\n .unwrap()\n\n .iter()\n\n .map(|lump| (lump.name(), lump.size()))\n\n .collect::<Vec<_>>(),\n\n [\n\n (\"MAP01\", 0),\n\n (\"THINGS\", 1050),\n\n (\"LINEDEFS\", 5040),\n\n (\"SIDEDEFS\", 17400),\n\n (\"VERTEXES\", 1372),\n\n (\"SEGS\", 7536),\n\n (\"SSECTORS\", 984),\n", "file_path": "src/wad/wad.rs", "rank": 26, "score": 30892.91948635291 }, { "content": " use super::*;\n\n use crate::wad::test::*;\n\n\n\n #[test]\n\n fn not_a_wad() {\n\n assert_matches!(Wad::load(\"test/killer.txt\"), Err(wad::Error::Malformed { .. }));\n\n }\n\n\n\n #[test]\n\n fn lump_data() {\n\n assert_eq!(DOOM_WAD.lump(\"DEMO1\").unwrap().size(), 20118);\n\n assert_eq!(DOOM_WAD.lump(\"E1M1\").unwrap().size(), 0);\n\n }\n\n\n\n #[test]\n\n fn detect_duplicates() {\n\n assert_matches!(DOOM_WAD.lump(\"E1M1\"), Ok(_));\n\n assert_matches!(DOOM_WAD.lump(\"THINGS\"), Err(_));\n\n assert_matches!(DOOM_WAD.lump(\"VERTEXES\"), Err(_));\n\n assert_matches!(DOOM_WAD.lump(\"SECTORS\"), Err(_));\n", "file_path": "src/wad/wad.rs", "rank": 27, "score": 30892.239100637675 }, { "content": " assert_eq!(DOOM2_WAD.lump(\"DEMO3\").unwrap().size(), 17898);\n\n assert_eq!(\n\n DOOM2_WAD\n\n .lumps_following(\"MAP01\", 11)\n\n .unwrap()\n\n .iter()\n\n .map(|lump| (lump.name(), lump.size()))\n\n .collect::<Vec<_>>(),\n\n [\n\n (\"MAP01\", 0),\n\n (\"THINGS\", 690),\n\n (\"LINEDEFS\", 5180),\n\n (\"SIDEDEFS\", 15870),\n\n (\"VERTEXES\", 1532),\n\n (\"SEGS\", 7212),\n\n (\"SSECTORS\", 776),\n\n (\"NODES\", 5404),\n\n (\"SECTORS\", 1534),\n\n (\"REJECT\", 436),\n\n (\"BLOCKMAP\", 6418),\n", "file_path": "src/wad/wad.rs", "rank": 28, "score": 30892.10293259138 }, { "content": " /// Returns `Ok(None)` if the block is missing.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if `size == 0`.\n\n pub fn try_lumps_following(&self, start: &str, size: usize) -> wad::Result<Option<Lumps>> {\n\n self.try_lookup(|file| file.try_lumps_following(start, size))\n\n }\n\n\n\n /// Retrieves a block of lumps between start and end markers. The marker lumps are included in\n\n /// the result. Blocks in later wads override entire blocks from earlier files.\n\n ///\n\n /// # Errors\n\n ///\n\n /// It is an error if the block is missing.\n\n pub fn lumps_between(&self, start: &str, end: &str) -> wad::Result<Lumps> {\n\n self.lookup(\n\n |patch| patch.try_lumps_between(start, end),\n\n |initial| initial.lumps_between(start, end),\n\n )\n", "file_path": "src/wad/wad.rs", "rank": 29, "score": 30890.747447778616 }, { "content": "\n\n lookup(&self.initial)\n\n }\n\n\n\n fn try_lookup<T>(\n\n &self,\n\n try_lookup: impl Fn(&Arc<WadFile>) -> wad::Result<Option<T>>,\n\n ) -> wad::Result<Option<T>> {\n\n for patch in self.patches.iter().rev() {\n\n if let Some(value) = try_lookup(patch)? {\n\n return Ok(Some(value));\n\n }\n\n }\n\n\n\n try_lookup(&self.initial)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/wad/wad.rs", "rank": 30, "score": 30890.736681034025 }, { "content": "\n\nimpl Wad {\n\n /// Loads an initial [IWAD].\n\n ///\n\n /// [IWAD]: WadKind::Iwad\n\n pub fn load(path: impl AsRef<Path>) -> wad::Result<Self> {\n\n let file = WadFile::load(path.as_ref())?;\n\n file.expect_kind(WadKind::Iwad)?;\n\n Self::new(file)\n\n }\n\n\n\n /// Loads an initial WAD without checking if it's an [IWAD].\n\n ///\n\n /// [IWAD]: WadKind::Iwad\n\n pub fn load_unchecked(path: impl AsRef<Path>) -> wad::Result<Self> {\n\n let file = WadFile::load(path.as_ref())?;\n\n Self::new(file)\n\n }\n\n\n\n /// Creates a stack with an initial, already loaded WAD file, which does not need to be an\n", "file_path": "src/wad/wad.rs", "rank": 31, "score": 30890.67788326912 }, { "content": " }\n\n\n\n #[test]\n\n fn lumps_between() {\n\n let sprites = DOOM_WAD.lumps_between(\"S_START\", \"S_END\").unwrap();\n\n assert_eq!(sprites.first().name(), \"S_START\");\n\n assert_eq!(sprites.last().name(), \"S_END\");\n\n assert_eq!(sprites.len(), 485);\n\n assert_eq!(sprites[100].name(), \"SARGB4B6\");\n\n\n\n // Backwards.\n\n assert_matches!(DOOM_WAD.lumps_between(\"S_END\", \"S_START\"), Err(_));\n\n }\n\n\n\n #[test]\n\n fn lumps_following() {\n\n let map = DOOM_WAD.lumps_following(\"E1M8\", 11).unwrap();\n\n assert_eq!(map.len(), 11);\n\n assert_eq!(\n\n map.iter().map(Lump::name).collect::<Vec<_>>(),\n", "file_path": "src/wad/wad.rs", "rank": 32, "score": 30890.605342859133 }, { "content": " let patches = self.patches.iter().map(|p| &**p);\n\n initial.chain(patches)\n\n }\n\n\n\n /// Retrieves a unique lump by name. Lumps in later files override lumps from earlier ones.\n\n ///\n\n /// # Errors\n\n ///\n\n /// It is an error if the lump is missing.\n\n pub fn lump(&self, name: &str) -> wad::Result<Lump> {\n\n self.lookup(|patch| patch.try_lump(name), |initial| initial.lump(name))\n\n }\n\n\n\n /// Retrieves a unique lump by name. Lumps in later files override lumps from earlier ones.\n\n ///\n\n /// Returns `Ok(None)` if the lump is missing.\n\n pub fn try_lump(&self, name: &str) -> wad::Result<Option<Lump>> {\n\n self.try_lookup(|file| file.try_lump(name))\n\n }\n\n\n", "file_path": "src/wad/wad.rs", "rank": 33, "score": 30890.51717668755 }, { "content": " (\"NODES\", 6860),\n\n (\"SECTORS\", 2184),\n\n (\"REJECT\", 882),\n\n (\"BLOCKMAP\", 4362),\n\n ],\n\n );\n\n assert_eq!(wad.lumps_between(\"S_START\", \"S_END\").unwrap().len(), 1383);\n\n assert_eq!(wad.lumps_between(\"SS_START\", \"SS_END\").unwrap().len(), 265);\n\n }\n\n\n\n // Make sure `Wad` is `Send` and `Sync`.\n", "file_path": "src/wad/wad.rs", "rank": 34, "score": 30889.816632165945 }, { "content": " [\n\n \"E1M8\", \"THINGS\", \"LINEDEFS\", \"SIDEDEFS\", \"VERTEXES\", \"SEGS\", \"SSECTORS\", \"NODES\",\n\n \"SECTORS\", \"REJECT\", \"BLOCKMAP\"\n\n ],\n\n );\n\n\n\n // Check in and out of bounds sizes.\n\n assert_matches!(DOOM_WAD.try_lumps_following(\"E1M1\", 1), Ok(Some(_)));\n\n assert_matches!(DOOM_WAD.try_lumps_following(\"E1M1\", 9999), Err(_));\n\n }\n\n\n\n #[test]\n\n fn iwad_then_pwads() {\n\n // IWAD + PWAD = success.\n\n let _ = Wad::load(DOOM_WAD_PATH).unwrap().patch(KILLER_WAD_PATH).unwrap();\n\n\n\n // IWAD + IWAD = error.\n\n let wad = Wad::load(DOOM_WAD_PATH).unwrap();\n\n assert_matches!(wad.patch(DOOM2_WAD_PATH), Err(_));\n\n\n", "file_path": "src/wad/wad.rs", "rank": 35, "score": 30889.253911537766 }, { "content": " file.expect_kind(WadKind::Pwad)?;\n\n self.add(file)\n\n }\n\n\n\n /// Overlays a WAD without checking if it's a [PWAD].\n\n ///\n\n /// [PWAD]: WadKind::Pwad\n\n pub fn patch_unchecked(&self, path: impl AsRef<Path>) -> wad::Result<Self> {\n\n let file = WadFile::load(path.as_ref())?;\n\n self.add(file)\n\n }\n\n\n\n /// Overlays an already loaded WAD file, which does not need to be a [PWAD].\n\n ///\n\n /// This is a low-level method. It's usually easier to call [`patch`] instead and avoid dealing\n\n /// directly with [`WadFile`].\n\n ///\n\n /// `add` does not require the file to be a PWAD. If you want to check you can call\n\n /// [`expect_kind`] first.\n\n ///\n", "file_path": "src/wad/wad.rs", "rank": 36, "score": 30888.68783515924 }, { "content": " /// Retrieves a block of `size > 0` lumps following a unique named marker. The marker lump is\n\n /// included in the result. Blocks in later files override entire blocks from earlier files.\n\n ///\n\n /// # Errors\n\n ///\n\n /// It is an error if the block is missing.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if `size == 0`.\n\n pub fn lumps_following(&self, start: &str, size: usize) -> wad::Result<Lumps> {\n\n self.lookup(\n\n |patch| patch.try_lumps_following(start, size),\n\n |initial| initial.lumps_following(start, size),\n\n )\n\n }\n\n\n\n /// Retrieves a block of `size > 0` lumps following a unique named marker. The marker lump is\n\n /// included in the result. Blocks in later files override entire blocks from earlier files.\n\n ///\n", "file_path": "src/wad/wad.rs", "rank": 37, "score": 30888.62744968597 }, { "content": " /// [IWAD].\n\n ///\n\n /// This is a low-level method. It's usually easier to call [`load`] instead and avoid dealing\n\n /// directly with [`WadFile`].\n\n ///\n\n /// `new` does not require the file to be an IWAD. If you want to check you can call\n\n /// [`expect_kind`] first.\n\n ///\n\n /// [IWAD]: WadKind::Iwad\n\n /// [`load`]: Self::load\n\n /// [`expect_kind`]: WadFile::expect_kind\n\n pub fn new(file: Arc<WadFile>) -> wad::Result<Self> {\n\n Ok(Self { initial: file, patches: Vec::new() })\n\n }\n\n\n\n /// Overlays a [PWAD].\n\n ///\n\n /// [PWAD]: WadKind::Pwad\n\n pub fn patch(&self, path: impl AsRef<Path>) -> wad::Result<Self> {\n\n let file = WadFile::load(path.as_ref())?;\n", "file_path": "src/wad/wad.rs", "rank": 38, "score": 30887.507451376332 }, { "content": " // Can't start with a PWAD.\n\n assert_matches!(Wad::load(KILLER_WAD_PATH), Err(_));\n\n }\n\n\n\n #[test]\n\n fn no_type_checking() -> wad::Result<()> {\n\n // Nonsensical ordering.\n\n let silly_wad = Wad::load_unchecked(KILLER_WAD_PATH)?\n\n .patch_unchecked(DOOM2_WAD_PATH)?\n\n .patch_unchecked(DOOM_WAD_PATH)?\n\n .patch_unchecked(BIOTECH_WAD_PATH)?;\n\n\n\n assert_matches!(silly_wad.lump(\"E1M1\"), Ok(_));\n\n assert_matches!(silly_wad.lump(\"MAP01\"), Ok(_));\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn layering() {\n", "file_path": "src/wad/wad.rs", "rank": 39, "score": 30886.52413072857 }, { "content": " // Make sure `Wad` is `Send` and `Sync`.\n\n trait IsSendAndSync: Send + Sync {}\n\n impl IsSendAndSync for Wad {}\n\n}\n", "file_path": "src/wad/wad.rs", "rank": 48, "score": 26949.910821944137 }, { "content": " let directory_offset = u32::from_le_bytes(raw[8..12].try_into().unwrap());\n\n\n\n Ok(Header {\n\n kind,\n\n lump_count: lump_count.try_into().unwrap(),\n\n directory_offset: directory_offset.try_into().unwrap(),\n\n })\n\n }\n\n\n\n fn read_directory(\n\n raw: &[u8],\n\n lump_count: usize,\n\n directory_offset: usize,\n\n ) -> Result<Directory, String> {\n\n let mut cursor = raw\n\n .get(directory_offset..)\n\n .ok_or_else(|| format!(\"lump directory at bad offset {}\", directory_offset))?;\n\n\n\n // The WAD is untrusted so clamp how much memory is pre-allocated. For comparison,\n\n // `doom.wad` has 1,264 lumps and `doom2.wad` has 2,919.\n", "file_path": "src/wad/file.rs", "rank": 49, "score": 23977.126777075635 }, { "content": " Lump::new(file, name, data)\n\n }\n\n\n\n /// Reads one or more lumps from the raw data, pulling out slices.\n\n fn read_lumps(self: &Arc<Self>, indices: Range<usize>, is_named: bool) -> Lumps {\n\n assert!(!indices.is_empty());\n\n let lumps = indices.map(|index| self.read_lump(index)).collect();\n\n Lumps::new(lumps, is_named)\n\n }\n\n\n\n /// Retrieves all of the lumps in the file.\n\n ///\n\n /// An unordered dump of all lumps is rarely useful. This can be useful for debugging, or just\n\n /// to inspect the contents of a WAD. It's not used by any of the asset loading code.\n\n pub fn lumps(self: &Arc<Self>) -> impl Iterator<Item = Lump> + DoubleEndedIterator {\n\n self.read_lumps(0..self.lump_indices.len(), false).into_iter()\n\n }\n\n\n\n /// Creates a [`wad::Error::Malformed`] blaming this file.\n\n pub fn error(&self, desc: impl Into<Cow<'static, str>>) -> wad::Error {\n", "file_path": "src/wad/file.rs", "rank": 50, "score": 23975.66857559023 }, { "content": "\n\n // Check lump bounds now so we don't have to later.\n\n let offset: usize = offset.try_into().unwrap();\n\n let size: usize = size.try_into().unwrap();\n\n\n\n if offset >= raw.len() {\n\n return Err(format!(\"{} at bad offset {}\", name, offset));\n\n }\n\n if offset + size >= raw.len() {\n\n return Err(format!(\"{} has bad size {}\", name, size));\n\n }\n\n\n\n lump_locations.push(LumpLocation { offset, size, name });\n\n }\n\n\n\n // Build a map of lump names -> indices for fast lookup.\n\n let mut lump_indices = HashMap::new();\n\n\n\n for (index, location) in lump_locations.iter().enumerate() {\n\n lump_indices\n", "file_path": "src/wad/file.rs", "rank": 51, "score": 23972.992413333588 }, { "content": " let mut lump_locations = Vec::with_capacity(lump_count.clamp(0, 4096));\n\n\n\n for _ in 0..lump_count {\n\n // Read the entry and advance the read cursor.\n\n let entry = &cursor\n\n .get(..16)\n\n .ok_or_else(|| format!(\"lump directory has bad count {}\", lump_count))?;\n\n cursor = &cursor[16..];\n\n\n\n let offset = u32::from_le_bytes(entry[0..4].try_into().unwrap());\n\n let size = u32::from_le_bytes(entry[4..8].try_into().unwrap());\n\n let name: [u8; 8] = entry[8..16].try_into().unwrap();\n\n let name = parse_name(&name);\n\n\n\n // Verify that this is a legal name.\n\n let has_illegal_char = name\n\n .contains(|ch| !matches!(ch, 'A'..='Z' | '0'..='9' | '[' | ']' | '-' | '_' | '\\\\'));\n\n if name.is_empty() || has_illegal_char {\n\n return Err(format!(\"bad lump name {:?}\", name));\n\n }\n", "file_path": "src/wad/file.rs", "rank": 52, "score": 23971.171193569982 }, { "content": " let index = self.try_lump_index(name)?;\n\n if index.is_none() {\n\n return Ok(None);\n\n }\n\n let index = index.unwrap();\n\n\n\n Ok(Some(self.read_lump(index)))\n\n }\n\n\n\n /// Retrieves a block of `size > 0` lumps following a unique named marker. The marker lump is\n\n /// included in the result.\n\n ///\n\n /// # Errors\n\n ///\n\n /// It is an error if the block is missing.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if `size == 0`.\n\n pub fn lumps_following(self: &Arc<Self>, start: &str, size: usize) -> wad::Result<Lumps> {\n", "file_path": "src/wad/file.rs", "rank": 53, "score": 23970.077076336078 }, { "content": "pub use error::*;\n\npub use file::*;\n\npub use lump::*;\n\npub use name::*;\n\npub use wad::*;\n\n\n\n#[cfg(test)]\n\npub(crate) mod test;\n\n\n\nmod cursor;\n\nmod error;\n\nmod file;\n\nmod lump;\n\nmod name;\n\n#[allow(clippy::module_inception)]\n\nmod wad;\n", "file_path": "src/wad/mod.rs", "rank": 54, "score": 23970.03578576439 }, { "content": " wad::Error::malformed(&self.path, desc)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for WadFile {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n let Self { path, raw, kind, lump_locations, lump_indices } = self;\n\n\n\n fmt.debug_struct(\"WadFile\")\n\n .field(\"path\", &path)\n\n .field(\"raw\", &format!(\"<{} bytes>\", raw.len()))\n\n .field(\"kind\", &kind)\n\n .field(\"lump_locations\", &lump_locations)\n\n .field(\"lump_indices\", &lump_indices)\n\n .finish()\n\n }\n\n}\n\n\n\nimpl fmt::Display for WadFile {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n write!(fmt, \"{}\", self.path.display())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n //! This file is covered by tests in [`crate::wad::wad`].\n\n}\n", "file_path": "src/wad/file.rs", "rank": 55, "score": 23969.774738286087 }, { "content": " .entry(location.name.clone())\n\n .and_modify(|indices: &mut Vec<usize>| indices.push(index))\n\n .or_insert_with(|| vec![index]);\n\n }\n\n\n\n Ok(Directory { lump_locations, lump_indices })\n\n }\n\n\n\n /// The file's path on disk.\n\n pub fn path(&self) -> &Path {\n\n &self.path\n\n }\n\n\n\n /// Returns whether this is an IWAD or PWAD.\n\n pub fn kind(&self) -> WadKind {\n\n self.kind\n\n }\n\n\n\n /// Checks that the file is the correct kind.\n\n pub fn expect_kind(self: &Arc<Self>, expected: WadKind) -> wad::Result<()> {\n", "file_path": "src/wad/file.rs", "rank": 56, "score": 23969.736817351848 }, { "content": " indices.iter().map(|&index| self.read_lump(index)).collect();\n\n lumps.dedup_by(|l1, l2| l1.data() == l2.data());\n\n\n\n if lumps.len() == 1 && lumps[0].has_data() {\n\n Ok(Some(*indices.last().unwrap()))\n\n } else {\n\n Err(self.error(format!(\"{} found {} times\", name, indices.len())))\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// Reads a lump from the raw data, pulling out a slice.\n\n fn read_lump(self: &Arc<Self>, index: usize) -> Lump {\n\n let location = &self.lump_locations[index];\n\n\n\n let file = Arc::clone(self);\n\n let name = location.name.clone();\n\n let data = self.raw.slice(location.offset..location.offset + location.size);\n\n\n", "file_path": "src/wad/file.rs", "rank": 57, "score": 23969.71301617271 }, { "content": "///\n\n/// This is a lower level type. Code outside the [`wad`] module should mainly use the [`Wad`]\n\n/// struct, which has a similar interface with the added capability of being able to add patch WADs\n\n/// on top of the base game WAD.\n\n///\n\n/// [`Wad`]: crate::wad::Wad\n\npub struct WadFile {\n\n path: PathBuf,\n\n raw: Bytes,\n\n kind: WadKind,\n\n lump_locations: Vec<LumpLocation>,\n\n lump_indices: HashMap<String, Vec<usize>>,\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/wad/file.rs", "rank": 58, "score": 23969.12112150309 }, { "content": " let size: usize = size.try_into().unwrap();\n\n\n\n // Reserve an extra byte to avoid an undesirable doubling of capacity. See:\n\n // https://users.rust-lang.org/t/vec-with-capacity-read-to-end-overallocation/65023\n\n let mut raw = Vec::with_capacity(size + 1);\n\n file.rewind()?;\n\n file.read_to_end(&mut raw)?;\n\n let raw = Bytes::from(raw);\n\n\n\n Ok(raw)\n\n }\n\n\n\n /// Loads a WAD file from a raw byte buffer.\n\n ///\n\n /// The `path` only used for display purposes, such as in error messages. It doesn't need to\n\n /// point to an actual file on disk.\n\n pub fn load_raw(path: impl AsRef<Path>, raw: Bytes) -> wad::Result<Arc<Self>> {\n\n Self::load_raw_impl(path.as_ref(), raw)\n\n .map_err(|desc: String| wad::Error::malformed(path, desc))\n\n }\n", "file_path": "src/wad/file.rs", "rank": 59, "score": 23969.11351494647 }, { "content": "\n\n // Non-generic helper to minimize the amount of code subject to monomorphization.\n\n fn load_raw_impl(path: &Path, raw: Bytes) -> Result<Arc<Self>, String> {\n\n let Header { kind, lump_count, directory_offset } = Self::read_header(&raw)?;\n\n\n\n let Directory { lump_locations, lump_indices } =\n\n Self::read_directory(&raw, lump_count, directory_offset)?;\n\n\n\n Ok(Arc::new(Self { path: path.to_owned(), raw, kind, lump_locations, lump_indices }))\n\n }\n\n\n\n fn read_header(raw: &[u8]) -> Result<Header, String> {\n\n let raw = raw.get(0..12).ok_or_else(|| \"not a WAD file\".to_owned())?;\n\n\n\n let kind = match &raw[0..4] {\n\n b\"IWAD\" => WadKind::Iwad,\n\n b\"PWAD\" => WadKind::Pwad,\n\n _ => return Err(\"not a WAD file\".to_owned()),\n\n };\n\n let lump_count = u32::from_le_bytes(raw[4..8].try_into().unwrap());\n", "file_path": "src/wad/file.rs", "rank": 60, "score": 23968.63086519874 }, { "content": "//! Load WAD files into memory and read their data lumps.\n\n//!\n\n//! # Examples\n\n//!\n\n//! Load a custom level:\n\n//!\n\n//! ```no_run\n\n//! use dusty_room::wad::Wad;\n\n//!\n\n//! let game_wad = Wad::load(\"doom.wad\")?;\n\n//! let my_wad = game_wad.patch(\"killer.wad\")?;\n\n//!\n\n//! let my_level = my_wad.lumps_following(\"E1M1\", 11)?;\n\n//! let things_lump = my_level[1].expect_name(\"THINGS\")?;\n\n//! let sectors_lump = my_level[8].expect_name(\"SECTORS\")?;\n\n//! #\n\n//! # Ok::<(), dusty_room::wad::Error>(())\n\n//! ```\n\n\n\npub use cursor::*;\n", "file_path": "src/wad/mod.rs", "rank": 61, "score": 23967.915705299612 }, { "content": " /// [Unofficial Doom Specs]: http://edge.sourceforge.net/edit_guide/doom_specs.htm\n\n fn try_lump_index(self: &Arc<Self>, name: &str) -> wad::Result<Option<usize>> {\n\n let mut name = Cow::from(name);\n\n\n\n // Convert the name to uppercase like DOOM does. We have to emulate this because\n\n // `doom.wad` and `doom2.wad` include a lowercase `w94_1` in their `PNAMES`.\n\n if name.contains(|ch: char| ch.is_ascii_lowercase()) {\n\n name.to_mut().make_ascii_uppercase();\n\n }\n\n\n\n match self.lump_indices.get(name.as_ref()).map(Vec::as_slice) {\n\n // Not found.\n\n None => Ok(None),\n\n\n\n // Unique index.\n\n Some(&[index]) => Ok(Some(index)),\n\n\n\n // Multiple indices.\n\n Some(indices) => {\n\n let mut lumps: Vec<_> =\n", "file_path": "src/wad/file.rs", "rank": 62, "score": 23967.616450929512 }, { "content": " if start_index.is_none() {\n\n return Ok(None);\n\n }\n\n let start_index = start_index.unwrap();\n\n\n\n if start_index + size >= self.lump_indices.len() {\n\n return Err(self.error(format!(\"{} missing lumps\", start)));\n\n }\n\n\n\n Ok(Some(self.read_lumps(start_index..start_index + size, true)))\n\n }\n\n\n\n /// Retrieves a block of lumps between unique start and end markers. The marker lumps are\n\n /// included in the result.\n\n ///\n\n /// # Errors\n\n ///\n\n /// It is an error if the block is missing.\n\n pub fn lumps_between(self: &Arc<Self>, start: &str, end: &str) -> wad::Result<Lumps> {\n\n self.try_lumps_between(start, end)?\n", "file_path": "src/wad/file.rs", "rank": 63, "score": 23967.464809567795 }, { "content": " self.try_lumps_following(start, size)?\n\n .ok_or_else(|| self.error(format!(\"{} missing\", start)))\n\n }\n\n\n\n /// Retrieves a block of `size > 0` lumps following a unique named marker. The marker lump is\n\n /// included in the result.\n\n ///\n\n /// Returns `Ok(None)` if the block is missing.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if `size == 0`.\n\n pub fn try_lumps_following(\n\n self: &Arc<Self>,\n\n start: &str,\n\n size: usize,\n\n ) -> wad::Result<Option<Lumps>> {\n\n assert!(size > 0);\n\n\n\n let start_index = self.try_lump_index(start)?;\n", "file_path": "src/wad/file.rs", "rank": 64, "score": 23966.697579929496 }, { "content": "//! Place test WADs in a public crate-wide location so tests don't waste time loading the same\n\n//! files over and over.\n\n\n\nuse crate::wad::Wad;\n\n\n\npub static DOOM_WAD_PATH: &str = \"test/doom.wad\";\n\npub static DOOM2_WAD_PATH: &str = \"test/doom2.wad\";\n\npub static KILLER_WAD_PATH: &str = \"test/killer.wad\";\n\npub static BIOTECH_WAD_PATH: &str = \"test/biotech.wad\";\n\n\n\nlazy_static! {\n\n pub static ref DOOM_WAD: Wad = Wad::load(DOOM_WAD_PATH).unwrap();\n\n pub static ref DOOM2_WAD: Wad = Wad::load(DOOM2_WAD_PATH).unwrap();\n\n pub static ref KILLER_WAD: Wad = DOOM_WAD.patch(KILLER_WAD_PATH).unwrap();\n\n pub static ref BIOTECH_WAD: Wad = DOOM2_WAD.patch(BIOTECH_WAD_PATH).unwrap();\n\n}\n", "file_path": "src/wad/test.rs", "rank": 65, "score": 23966.098181633202 }, { "content": " .ok_or_else(|| self.error(format!(\"{} and {} missing\", start, end)))\n\n }\n\n\n\n /// Retrieves a block of lumps between unique start and end markers. The marker lumps are\n\n /// included in the result.\n\n ///\n\n /// Returns `Ok(None)` if the block is missing.\n\n pub fn try_lumps_between(\n\n self: &Arc<Self>,\n\n start: &str,\n\n end: &str,\n\n ) -> wad::Result<Option<Lumps>> {\n\n let start_index = self.try_lump_index(start)?;\n\n let end_index = self.try_lump_index(end)?;\n\n\n\n match (start_index, end_index) {\n\n (Some(_), Some(_)) => {}\n\n\n\n (None, None) => {\n\n return Ok(None);\n", "file_path": "src/wad/file.rs", "rank": 66, "score": 23965.892412449834 }, { "content": " }\n\n\n\n (Some(_), None) => {\n\n return Err(self.error(format!(\"{} without {}\", start, end)));\n\n }\n\n\n\n (None, Some(_)) => {\n\n return Err(self.error(format!(\"{} without {}\", end, start)));\n\n }\n\n }\n\n\n\n let start_index = start_index.unwrap();\n\n let end_index = end_index.unwrap();\n\n\n\n if start_index > end_index {\n\n return Err(self.error(format!(\"{} after {}\", start, end)));\n\n }\n\n\n\n Ok(Some(self.read_lumps(start_index..end_index + 1, false)))\n\n }\n", "file_path": "src/wad/file.rs", "rank": 67, "score": 23964.46005650724 }, { "content": " /// The `path` only used for display purposes, such as in error messages. It doesn't need to\n\n /// point to an actual file on disk.\n\n pub fn load_reader(path: impl AsRef<Path>, file: impl Read + Seek) -> wad::Result<Arc<Self>> {\n\n let path = path.as_ref();\n\n let raw = Self::read_bytes(file)\n\n .map_err(|err| wad::Error::Io { path: path.to_owned(), source: err })?;\n\n Self::load_raw(path, raw)\n\n }\n\n\n\n fn read_bytes(mut file: impl Read + Seek) -> io::Result<Bytes> {\n\n let size = file.seek(SeekFrom::End(0))?;\n\n // If the file is really large it may not fit into memory. Individual allocations can never\n\n // exceed `isize::MAX` bytes, which is just 2GB on a 32-bit system.\n\n //\n\n // This won't catch all panics. Ideally we could check if `Vec::with_capacity` fails, but in\n\n // stable Rust there's no way to do that. Nightly offers `Vec::try_reserve`, so hope is on\n\n // the horizon.\n\n if isize::try_from(size).is_err() {\n\n return Err(io::Error::new(io::ErrorKind::OutOfMemory, \"file too large\"));\n\n }\n", "file_path": "src/wad/file.rs", "rank": 68, "score": 23964.024626356822 }, { "content": " if self.kind() == expected {\n\n Ok(())\n\n } else {\n\n Err(wad::Error::WrongType { path: self.path().to_owned(), expected })\n\n }\n\n }\n\n\n\n /// Retrieves a unique lump by name.\n\n ///\n\n /// # Errors\n\n ///\n\n /// It is an error if the lump is missing.\n\n pub fn lump(self: &Arc<Self>, name: &str) -> wad::Result<Lump> {\n\n self.try_lump(name)?.ok_or_else(|| self.error(format!(\"{} missing\", name)))\n\n }\n\n\n\n /// Retrieves a unique lump by name.\n\n ///\n\n /// Returns `Ok(None)` if the lump is missing.\n\n pub fn try_lump(self: &Arc<Self>, name: &str) -> wad::Result<Option<Lump>> {\n", "file_path": "src/wad/file.rs", "rank": 69, "score": 23963.764228798056 }, { "content": "use std::borrow::Cow;\n\nuse std::collections::HashMap;\n\nuse std::convert::TryInto;\n\n\n\nuse std::convert::TryFrom;\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::io::Seek;\n\nuse std::io::SeekFrom;\n\nuse std::ops::Range;\n\nuse std::path::{Path, PathBuf};\n\nuse std::sync::Arc;\n\nuse std::{fmt, io};\n\n\n\nuse bytes::Bytes;\n\n\n\nuse crate::wad::parse_name;\n\nuse crate::wad::{self, Lump, Lumps};\n\n\n\n/// A single IWAD or PWAD.\n", "file_path": "src/wad/file.rs", "rank": 70, "score": 23963.66198092269 }, { "content": "\n\n /// Looks up a lump's index.\n\n ///\n\n /// Returns `Ok(None)` if there is no such lump.\n\n ///\n\n /// # Uniqueness\n\n ///\n\n /// If the lump name isn't unique then that's an error--unless the duplicated lumps have\n\n /// identical content. As the [Unofficial Doom Specs] explain, some of the official DOOM wads\n\n /// shipped with accidental duplications:\n\n ///\n\n /// > There are some imperfections in the `DOOM.WAD` file. All versions up to 1.666 have the\n\n /// > `SW18_7` lump included twice. Versions before 1.666 have the `COMP03_8` lump twice. And\n\n /// > with version 1.666 somebody really messed up, because every single `DP*` and `DS*` and\n\n /// > `D_*` lump that's in the shareware `DOOM1.WAD` is in the registered `DOOM.WAD` twice. The\n\n /// > error doesn't adversely affect play in any way, but it does take up an unnecessary 800k on\n\n /// > the hard drive.\n\n ///\n\n /// When this happens the last index returned.\n\n ///\n", "file_path": "src/wad/file.rs", "rank": 71, "score": 23963.5983872799 }, { "content": "use std::borrow::Cow;\n\nuse std::io;\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse thiserror::Error;\n\n\n\nuse crate::wad::WadKind;\n\n\n\n/// A specialized [`Result`] type for [`Wad`] and [`WadFile`] operations. This typedef is used to\n\n/// avoid writing out [`wad::Error`] directly and is otherwise a direct mapping to [`Result`].\n\n///\n\n/// [`Result`]: std::result::Result\n\n/// [`Wad`]: crate::wad::Wad\n\n/// [`WadFile`]: crate::wad::WadFile\n\n/// [`wad::Error`]: crate::wad::Error\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// The error type when loading and searching [`Wad`]s and [`WadFile`]s. Errors are always tied to a\n\n/// particular file.\n\n///\n", "file_path": "src/wad/error.rs", "rank": 72, "score": 23963.58751430257 }, { "content": " path: PathBuf,\n\n /// A description of the error.\n\n desc: Cow<'static, str>,\n\n },\n\n\n\n /// An [IWAD] was received when expecting a [PWAD], or vice versa.\n\n ///\n\n /// [IWAD]: WadKind::Iwad\n\n /// [PWAD]: WadKind::Pwad\n\n #[error(\"{}: not {}\", path.display(), match expected {\n\n WadKind::Iwad => \"an IWAD\",\n\n WadKind::Pwad => \"a PWAD\",\n\n })]\n\n WrongType {\n\n /// The file path.\n\n path: PathBuf,\n\n /// The WAD kind that was expected.\n\n expected: WadKind,\n\n },\n\n}\n\n\n\nimpl Error {\n\n /// Creates an [`Error::Malformed`]. Accepts both `&'static str` literals and owned `String`s.\n\n pub fn malformed(path: impl AsRef<Path>, desc: impl Into<Cow<'static, str>>) -> Self {\n\n Self::Malformed { path: path.as_ref().to_owned(), desc: desc.into() }\n\n }\n\n}\n", "file_path": "src/wad/error.rs", "rank": 73, "score": 23962.186709357153 }, { "content": "/// [`Wad`]: crate::wad::Wad\n\n/// [`WadFile`]: crate::wad::WadFile\n\n#[derive(Error, Debug)]\n\npub enum Error {\n\n /// An I/O error from a [`std::io`] operation.\n\n #[error(\"{}: {source}\", path.display())]\n\n Io {\n\n /// The path of the file where the I/O error occurred.\n\n path: PathBuf,\n\n /// The source I/O error.\n\n source: io::Error,\n\n },\n\n\n\n /// A [`Wad`] or [`WadFile`] is malformed or missing data.\n\n ///\n\n /// [`Wad`]: crate::wad::Wad\n\n /// [`WadFile`]: crate::wad::WadFile\n\n #[error(\"{}: {desc}\", path.display())]\n\n Malformed {\n\n /// The path of the malformed file.\n", "file_path": "src/wad/error.rs", "rank": 74, "score": 23962.033112877463 }, { "content": "/// Reads a name from a raw 8-byte, NUL padded byte array.\n\n///\n\n/// This function does not check if the name contains only legal ASCII characters. Non-ASCII bytes\n\n/// are treated as Latin-1, where all bytes are valid and map 1-to-1 to the corresponding Unicode\n\n/// codepoints.\n", "file_path": "src/wad/name.rs", "rank": 75, "score": 23955.142235721654 }, { "content": " /// Patch names are listed in the `PNAMES` lump, and each patch is loaded from the lump of that\n\n /// name.\n\n pub fn load(wad: &Wad) -> wad::Result<Self> {\n\n let lump = wad.lump(\"PNAMES\")?;\n\n let mut cursor = lump.cursor();\n\n\n\n cursor.need(4)?;\n\n let count = cursor.get_u32_le();\n\n\n\n let mut patches = Vec::with_capacity(count.clamp(0, 1024) as usize);\n\n cursor.need((count * 8).try_into().unwrap())?;\n\n\n\n for _ in 0..count {\n\n let name = cursor.get_name();\n\n let lump = wad.try_lump(&name)?;\n\n let patch = lump.as_ref().map(Patch::load).transpose()?;\n\n patches.push((name, patch));\n\n }\n\n\n\n cursor.done()?;\n", "file_path": "src/assets/patch.rs", "rank": 76, "score": 23.73990919693247 }, { "content": "#[derive(Debug, Clone)]\n\npub struct Palette {\n\n raw: Bytes,\n\n}\n\n\n\nconst PALETTE_COLORS: usize = 256;\n\nconst PALETTE_BYTES: usize = 3 * PALETTE_COLORS;\n\n\n\nimpl Palette {\n\n fn load(cursor: &mut Cursor) -> wad::Result<Self> {\n\n cursor.need(PALETTE_BYTES)?;\n\n let raw = cursor.split_to(PALETTE_BYTES);\n\n Ok(Self { raw })\n\n }\n\n}\n\n\n\nimpl Index<u8> for Palette {\n\n type Output = (u8, u8, u8);\n\n\n\n fn index(&self, index: u8) -> &Self::Output {\n", "file_path": "src/assets/palette.rs", "rank": 77, "score": 21.54121048004035 }, { "content": " /// Loads a bank of color palettes from the `PLAYPAL` lump.\n\n pub fn load(wad: &Wad) -> wad::Result<Self> {\n\n let lump = wad.lump(\"PLAYPAL\")?;\n\n let mut cursor = lump.cursor();\n\n\n\n let mut palettes = Vec::with_capacity(lump.size() / PALETTE_BYTES);\n\n cursor.need(1)?;\n\n\n\n while cursor.has_remaining() {\n\n palettes.push(Palette::load(&mut cursor)?);\n\n }\n\n\n\n cursor.done()?;\n\n\n\n Ok(PaletteBank { palettes, active: 0 })\n\n }\n\n\n\n /// The number of palettes in the bank.\n\n pub fn count(&self) -> usize {\n\n self.palettes.len()\n", "file_path": "src/assets/palette.rs", "rank": 78, "score": 21.16535129950162 }, { "content": "impl Flat {\n\n /// Loads a flat from a lump.\n\n pub fn load(lump: &Lump) -> wad::Result<Self> {\n\n let width: usize = Self::width().into();\n\n let height: usize = Self::height().into();\n\n\n\n let mut cursor = lump.cursor();\n\n let name = lump.name().to_owned();\n\n cursor.need(width * height)?;\n\n let pixels = cursor.split_to(width * height);\n\n cursor.done()?;\n\n\n\n Ok(Self { name, pixels })\n\n }\n\n\n\n /// Width in pixels. Flats are always 64x64.\n\n pub const fn width() -> u16 {\n\n 64\n\n }\n\n\n", "file_path": "src/assets/flat.rs", "rank": 79, "score": 19.51787864157796 }, { "content": "use std::convert::TryInto;\n\nuse std::mem;\n\nuse std::ops::Index;\n\n\n\nuse bytes::{Buf, Bytes};\n\n\n\nuse crate::wad::{self, Cursor, Wad};\n\n\n\n/// A bank of color palettes from the `PLAYPAL` lump. The bank always has an [active] palette, which\n\n/// can be [switched] at any time.\n\n///\n\n/// [active]: Self::active\n\n/// [switched]: Self::switch\n\n#[derive(Debug)]\n\npub struct PaletteBank {\n\n palettes: Vec<Palette>,\n\n active: usize,\n\n}\n\n\n\nimpl PaletteBank {\n", "file_path": "src/assets/palette.rs", "rank": 80, "score": 19.061693325359542 }, { "content": "use std::ops::{Deref, Index};\n\n\n\nuse bytes::Buf;\n\n\n\nuse crate::assets::{Assets, Texture};\n\nuse crate::map::{Map, Sector, Sectors};\n\nuse crate::wad::{self, Lumps};\n\n\n\n/// A list of [sidedefs] for a particular [map], indexed by number.\n\n///\n\n/// [sidedefs]: Sidedef\n\n/// [map]: crate::map::Map\n\n#[derive(Debug)]\n\npub struct Sidedefs(Vec<Sidedef>);\n\n\n\nimpl Sidedefs {\n\n /// Loads a map's sidedefs from its `SIDEDEFS` lump.\n\n pub fn load(lumps: &Lumps, assets: &Assets, sectors: &Sectors) -> wad::Result<Self> {\n\n let lump = lumps[3].expect_name(\"SIDEDEFS\")?;\n\n\n", "file_path": "src/map/sidedef.rs", "rank": 81, "score": 18.86715657575143 }, { "content": "\n\n for _ in 0..width {\n\n column_offsets.push(cursor.get_u32_le());\n\n }\n\n\n\n cursor.clear();\n\n cursor.done()?;\n\n\n\n // Read columns. The WAD is untrusted so clamp how much memory is pre-allocated.\n\n let mut columns = Vec::with_capacity(width.clamp(0, 512).into());\n\n for offset in column_offsets {\n\n columns.push(Self::read_column(lump, offset.try_into().unwrap())?);\n\n }\n\n\n\n Ok(Self { name, width, height, x, y, columns })\n\n }\n\n\n\n fn read_column(lump: &Lump, offset: usize) -> wad::Result<Column> {\n\n let mut cursor = lump.cursor();\n\n cursor.skip(offset)?;\n", "file_path": "src/assets/patch.rs", "rank": 82, "score": 18.851397096977383 }, { "content": "use std::collections::BTreeMap;\n\nuse std::fmt;\n\nuse std::ops::{Deref, Index};\n\n\n\nuse bytes::Bytes;\n\n\n\nuse crate::wad::{self, Lump, Wad};\n\n\n\n/// A bank of [sector] floor and ceiling textures, indexed by name.\n\n///\n\n/// [sector]: crate::map::Sector\n\n#[derive(Clone)]\n\npub struct FlatBank(BTreeMap<String, Flat>);\n\n\n\nimpl FlatBank {\n\n /// Loads all the flats from a [`Wad`] found between the `F_START` and `F_END` marker lumps.\n\n pub fn load(wad: &Wad) -> wad::Result<Self> {\n\n let lumps = wad.lumps_between(\"F_START\", \"F_END\")?;\n\n let mut flats = BTreeMap::new();\n\n\n", "file_path": "src/assets/flat.rs", "rank": 83, "score": 18.42906448335126 }, { "content": "use std::ops::{Deref, Index};\n\n\n\nuse bytes::Buf;\n\n\n\nuse crate::map::{Map, Sidedef, Sidedefs, Vertex, Vertexes};\n\nuse crate::wad::{self, Lumps};\n\n\n\n/// A list of [linedefs] for a particular [map], indexed by number.\n\n///\n\n/// [linedefs]: Linedef\n\n/// [map]: crate::map::Map\n\n#[derive(Debug)]\n\npub struct Linedefs(Vec<Linedef>);\n\n\n\nimpl Linedefs {\n\n /// Loads a map's linedefs from its `LINEDEFS` lump.\n\n pub fn load(lumps: &Lumps, vertexes: &Vertexes, sidedefs: &Sidedefs) -> wad::Result<Self> {\n\n let lump = lumps[2].expect_name(\"LINEDEFS\")?;\n\n\n\n let mut linedefs = Vec::with_capacity(lump.size() / 14);\n", "file_path": "src/map/linedef.rs", "rank": 84, "score": 18.393310010271417 }, { "content": "use std::convert::TryInto;\n\nuse std::ops::Index;\n\n\n\nuse bytes::Buf;\n\n\n\nuse crate::assets::{Assets, Flat};\n\nuse crate::wad::{self, Lumps};\n\n\n\n/// A list of [sectors] for a particular [map], indexed by number.\n\n///\n\n/// [sectors]: Sector\n\n/// [map]: crate::map::Map\n\n#[derive(Debug)]\n\npub struct Sectors(Vec<Sector>);\n\n\n\nimpl Sectors {\n\n /// Loads a map's sectors from its `SECTORS` lump.\n\n pub fn load(lumps: &Lumps, assets: &Assets) -> wad::Result<Self> {\n\n let lump = lumps[8].expect_name(\"SECTORS\")?;\n\n\n", "file_path": "src/map/sector.rs", "rank": 85, "score": 17.567441123810152 }, { "content": "use std::convert::TryInto;\n\nuse std::fmt;\n\nuse std::ops::{Deref, Index};\n\n\n\nuse bytes::{Buf, Bytes};\n\n\n\nuse crate::wad::{self, Lump, Wad};\n\n\n\n/// A bank of patches from the `PNAMES` lump.\n\n///\n\n/// The patches are all optional because sometimes `PNAMES` lists missing patches. The shareware\n\n/// version of `doom.wad` is missing the `TEXTURE2` textures from the registered game, yet `PNAMES`\n\n/// still lists all of the patches. It still loads because none of the textures in `TEXTURE1` use\n\n/// the missing patches.\n\n#[derive(Clone, Debug)]\n\npub struct PatchBank(Vec<(String, Option<Patch>)>);\n\n\n\nimpl PatchBank {\n\n /// Loads all the patches from a [`Wad`].\n\n ///\n", "file_path": "src/assets/patch.rs", "rank": 86, "score": 17.366030751047436 }, { "content": " fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n write!(fmt, \"{} ({}x{})\", self.name, self.width, self.height)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Post {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n let Self { y_offset, pixels } = self;\n\n\n\n fmt.debug_struct(\"Post\")\n\n .field(\"y_offset\", &y_offset)\n\n .field(\"height\", &pixels.len())\n\n .finish()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::wad::test::*;\n", "file_path": "src/assets/patch.rs", "rank": 87, "score": 17.353443582927486 }, { "content": "use std::ops::{Deref, Index};\n\n\n\nuse bytes::Buf;\n\n\n\nuse crate::wad::{self, Lumps};\n\n\n\n/// A list of [vertexes] for a particular map, indexed by number.\n\n///\n\n/// Wannabe pedants should note that according to [Merriam-Webster] the plural of \"vertex\" can be\n\n/// either \"vertices\" or \"vertexes\". In this codebase we use id Software's spelling.\n\n///\n\n/// [vertexes]: Vertex\n\n/// [map]: crate::map::Map\n\n/// [Merriam-Webster]: https://www.merriam-webster.com/dictionary/vertex\n\n#[derive(Debug)]\n\npub struct Vertexes(Vec<Vertex>);\n\n\n\nimpl Vertexes {\n\n /// Loads a map's vertexes from its `VERTEXES` lump.\n\n pub fn load(lumps: &Lumps) -> wad::Result<Self> {\n", "file_path": "src/map/vertex.rs", "rank": 88, "score": 16.035199697619298 }, { "content": " let index: usize = index.into();\n\n let rgb: &[u8; 3] = self.raw[index * 3..index * 3 + 3].try_into().unwrap();\n\n // SAFETY: `[u8; 3]` and `(u8, u8, u8)` have the same size and layout.\n\n unsafe { mem::transmute(rgb) }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::wad::test::*;\n\n\n\n #[test]\n\n fn load() {\n\n let mut palettes = PaletteBank::load(&DOOM_WAD).unwrap();\n\n\n\n assert_eq!(palettes.count(), 14);\n\n\n\n let p0 = palettes.switch(0);\n\n assert_eq!(p0[0], (0, 0, 0));\n\n assert_eq!(p0[255], (167, 107, 107));\n\n\n\n let p13 = palettes.switch(13);\n\n assert_eq!(p13[0], (0, 32, 0));\n\n assert_eq!(p13[255], (147, 125, 94));\n\n }\n\n}\n", "file_path": "src/assets/palette.rs", "rank": 89, "score": 15.948140353008922 }, { "content": " let lump = lumps[4].expect_name(\"VERTEXES\")?;\n\n\n\n let mut vertexes = Vec::with_capacity(lump.size() / 4);\n\n let mut cursor = lump.cursor();\n\n\n\n while cursor.has_remaining() {\n\n cursor.need(4)?;\n\n let x = cursor.get_i16_le();\n\n let y = cursor.get_i16_le();\n\n vertexes.push(Vertex { x, y });\n\n }\n\n\n\n cursor.done()?;\n\n\n\n Ok(Self(vertexes))\n\n }\n\n\n\n /// Looks up a vertex number.\n\n pub fn get(&self, number: u16) -> Option<&Vertex> {\n\n self.0.get(usize::from(number))\n", "file_path": "src/map/vertex.rs", "rank": 90, "score": 15.69811469248252 }, { "content": "use std::fmt;\n\n\n\nuse crate::assets::Assets;\n\nuse crate::map::{Linedefs, Sectors, Sidedefs, Vertexes};\n\nuse crate::wad::{self, Lump, Wad};\n\n\n\n/// Contains all the level geometry, monsters, items, and other things that make up a map.\n\n#[derive(Debug)]\n\npub struct Map {\n\n /// Map name such as `E1M1` or `MAP01`.\n\n pub name: String,\n\n\n\n /// A list of things indexed by number.\n\n pub things: (),\n\n\n\n /// A list of vertexes indexed by number.\n\n pub vertexes: Vertexes,\n\n\n\n /// A list of sidedefs indexed by number.\n\n pub sidedefs: Sidedefs,\n", "file_path": "src/map/map.rs", "rank": 91, "score": 15.284024717129176 }, { "content": "use crate::assets::FlatBank;\n\nuse crate::assets::PaletteBank;\n\nuse crate::assets::TextureBank;\n\nuse crate::wad::{self, Wad};\n\n\n\n/// Holds all of the fixed assets loaded from a [`Wad`]: graphics, sounds, music, text strings, etc.\n\n/// Map data is stored [elsewhere] since typically only one map is loaded at a time.\n\n///\n\n/// [elsewhere]: crate::map::Map\n\n#[derive(Debug)]\n\npub struct Assets {\n\n pub palette_bank: PaletteBank,\n\n pub flat_bank: FlatBank,\n\n pub texture_bank: TextureBank,\n\n}\n\n\n\nimpl Assets {\n\n /// Loads assets from a [`Wad`].\n\n pub fn load(wad: &Wad) -> wad::Result<Self> {\n\n let palette_bank = PaletteBank::load(wad)?;\n", "file_path": "src/assets/assets.rs", "rank": 92, "score": 15.110601890349898 }, { "content": " };\n\n\n\n // Helper function to verify a sector number.\n\n let sector_number = |sector: u16| -> wad::Result<u16> {\n\n sectors.get(sector).ok_or_else(|| {\n\n lump.error(format!(\n\n \"sidedef #{} has invalid sector #{}\",\n\n sidedefs.len(),\n\n sector\n\n ))\n\n })?;\n\n\n\n Ok(sector)\n\n };\n\n\n\n cursor.need(30)?;\n\n let x_offset = cursor.get_i16_le();\n\n let y_offset = cursor.get_i16_le();\n\n let upper_texture = texture_name(cursor.get_name(), \"upper\")?;\n\n let lower_texture = texture_name(cursor.get_name(), \"lower\")?;\n", "file_path": "src/map/sidedef.rs", "rank": 93, "score": 14.924883528445495 }, { "content": " let mut sidedefs = Vec::with_capacity(lump.size() / 30);\n\n let mut cursor = lump.cursor();\n\n\n\n while cursor.has_remaining() {\n\n // Helper function to verify a texture name.\n\n let texture_name = |name: String, which: &str| -> wad::Result<Option<String>> {\n\n if name == \"-\" {\n\n return Ok(None);\n\n }\n\n\n\n assets.texture_bank.get(&name).ok_or_else(|| {\n\n lump.error(format!(\n\n \"sidedef #{} has invalid {} texture {:?}\",\n\n sidedefs.len(),\n\n which,\n\n name\n\n ))\n\n })?;\n\n\n\n Ok(Some(name))\n", "file_path": "src/map/sidedef.rs", "rank": 94, "score": 14.218968945376382 }, { "content": " }\n\n\n\n /// Returns the active palette.\n\n pub fn active(&self) -> &Palette {\n\n &self.palettes[self.active]\n\n }\n\n\n\n /// Switches the active palette.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if `index` is out of range.\n\n pub fn switch(&mut self, index: usize) -> &Palette {\n\n assert!(index < self.count());\n\n self.active = index;\n\n self.active()\n\n }\n\n}\n\n\n\n/// A 256-color palette. Part of a [`PaletteBank`].\n", "file_path": "src/assets/palette.rs", "rank": 95, "score": 13.58277566739088 }, { "content": " /// If the index is valid but the patch is missing, returns `Err(Some(name))` with the name of\n\n /// the missing patch. This happens with the shareware version of `doom.wad`.\n\n pub fn get(&self, index: u16) -> Result<&Patch, Option<&str>> {\n\n let (name, patch): &(String, Option<Patch>) = self.0.get(usize::from(index)).ok_or(None)?;\n\n patch.as_ref().ok_or(Some(name))\n\n }\n\n}\n\n\n\nimpl Index<u16> for PatchBank {\n\n type Output = Patch;\n\n\n\n /// Looks up a patch number.\n\n fn index(&self, index: u16) -> &Self::Output {\n\n self.0[usize::from(index)].1.as_ref().unwrap()\n\n }\n\n}\n\n\n\nimpl Deref for PatchBank {\n\n type Target = Vec<(String, Option<Patch>)>;\n\n\n", "file_path": "src/assets/patch.rs", "rank": 96, "score": 13.52653296606602 }, { "content": " }\n\n\n\n // The common case.\n\n (y_offset, _) => y_offset,\n\n };\n\n\n\n cursor.need(1)?;\n\n let length = cursor.get_u8() as usize;\n\n\n\n cursor.need(length + 2)?;\n\n let _unused = cursor.get_u8();\n\n let pixels = cursor.split_to(length);\n\n let _unused = cursor.get_u8();\n\n\n\n posts.push(Post { y_offset, pixels });\n\n last_y_offset = Some(y_offset);\n\n }\n\n\n\n cursor.clear();\n\n cursor.done()?;\n", "file_path": "src/assets/patch.rs", "rank": 97, "score": 13.293004509816935 }, { "content": "\n\n /// A list of linedefs indexed by number.\n\n pub linedefs: Linedefs,\n\n\n\n /// A list of sectors indexed by number.\n\n pub sectors: Sectors,\n\n}\n\n\n\nimpl Map {\n\n /// Loads a map. Maps are typically named `ExMy` for DOOM or `MAPnn` for DOOM II.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns `Ok(None)` if the map is missing.\n\n pub fn load(wad: &Wad, name: &str, assets: &Assets) -> wad::Result<Option<Self>> {\n\n let lumps = match wad.try_lumps_following(name, 11)? {\n\n Some(lumps) => lumps,\n\n None => return Ok(None),\n\n };\n\n\n", "file_path": "src/map/map.rs", "rank": 98, "score": 13.166877452688858 }, { "content": " let mut sectors = Vec::with_capacity(lump.size() / 26);\n\n let mut cursor = lump.cursor();\n\n\n\n while cursor.has_remaining() {\n\n // Helper function to verify a flat name.\n\n let flat_name = |name: String, which: &str| -> wad::Result<String> {\n\n assets.flat_bank.get(&name).ok_or_else(|| {\n\n lump.error(format!(\n\n \"sector #{} has invalid {} flat {:?}\",\n\n sectors.len(),\n\n which,\n\n name\n\n ))\n\n })?;\n\n\n\n Ok(name)\n\n };\n\n\n\n cursor.need(26)?;\n\n let floor_height = cursor.get_i16_le();\n", "file_path": "src/map/sector.rs", "rank": 99, "score": 12.838661816897622 } ]
Rust
src/dataflow-types/src/client/controller/storage.rs
pjmore/materialize
5a46f9b30910679d504c94438b1d1c5ace4e80c7
use std::collections::BTreeMap; use differential_dataflow::lattice::Lattice; use timely::progress::{Antichain, Timestamp}; use tracing::error; use super::SinceUpperMap; use crate::client::SourceConnector; use crate::client::{Client, Command, StorageCommand}; use crate::Update; use mz_expr::GlobalId; use mz_expr::PartitionId; pub struct StorageControllerState<T> { source_descriptions: BTreeMap<GlobalId, Option<(crate::sources::SourceDesc, Antichain<T>)>>, pub(super) since_uppers: SinceUpperMap<T>, } pub struct StorageController<'a, C, T> { pub(super) storage: &'a mut StorageControllerState<T>, pub(super) client: &'a mut C, } #[derive(Debug)] pub enum StorageError { SourceIdReused(GlobalId), } impl<T> StorageControllerState<T> { pub(super) fn new() -> Self { Self { source_descriptions: BTreeMap::default(), since_uppers: SinceUpperMap::default(), } } } impl<'a, C: Client<T>, T: Timestamp + Lattice> StorageController<'a, C, T> { pub async fn create_sources( &mut self, mut bindings: Vec<(GlobalId, (crate::sources::SourceDesc, Antichain<T>))>, ) -> Result<(), StorageError> { bindings.sort_by_key(|b| b.0); bindings.dedup(); for pos in 1..bindings.len() { if bindings[pos - 1].0 == bindings[pos].0 { Err(StorageError::SourceIdReused(bindings[pos].0))?; } } for (id, description_since) in bindings.iter() { match self.storage.source_descriptions.get(&id) { Some(None) => Err(StorageError::SourceIdReused(*id))?, Some(Some(prior_description)) => { if prior_description != description_since { Err(StorageError::SourceIdReused(*id))? } } None => { } } } for (id, (description, since)) in bindings.iter() { self.storage .source_descriptions .insert(*id, Some((description.clone(), since.clone()))); self.storage.since_uppers.insert( *id, (since.clone(), Antichain::from_elem(Timestamp::minimum())), ); } self.client .send(Command::Storage(StorageCommand::CreateSources(bindings))) .await; Ok(()) } pub async fn drop_sources(&mut self, identifiers: Vec<GlobalId>) { for id in identifiers.iter() { if !self.storage.source_descriptions.contains_key(id) { error!("Source id {} dropped without first being created", id); } else { self.storage.source_descriptions.insert(*id, None); } } self.client .send(Command::Storage(StorageCommand::DropSources(identifiers))) .await } pub async fn table_insert(&mut self, id: GlobalId, updates: Vec<Update<T>>) { self.client .send(Command::Storage(StorageCommand::Insert { id, updates })) .await } pub async fn update_durability_frontiers(&mut self, updates: Vec<(GlobalId, Antichain<T>)>) { self.client .send(Command::Storage(StorageCommand::DurabilityFrontierUpdates( updates, ))) .await } pub async fn add_source_timestamping( &mut self, id: GlobalId, connector: SourceConnector, bindings: Vec<(PartitionId, T, crate::sources::MzOffset)>, ) { self.client .send(Command::Storage(StorageCommand::AddSourceTimestamping { id, connector, bindings, })) .await } pub async fn allow_source_compaction(&mut self, frontiers: Vec<(GlobalId, Antichain<T>)>) { for (id, frontier) in frontiers.iter() { self.storage.since_uppers.advance_since_for(*id, frontier); } self.client .send(Command::Storage(StorageCommand::AllowSourceCompaction( frontiers, ))) .await } pub async fn drop_source_timestamping(&mut self, id: GlobalId) { self.client .send(Command::Storage(StorageCommand::DropSourceTimestamping { id, })) .await } pub async fn advance_all_table_timestamps(&mut self, advance_to: T) { self.client .send(Command::Storage(StorageCommand::AdvanceAllLocalInputs { advance_to, })) .await } }
use std::collections::BTreeMap; use differential_dataflow::lattice::Lattice; use timely::progress::{Antichain, Timestamp}; use tracing::error; use super::SinceUpperMap; use crate::client::SourceConnector; use crate::client::{Client, Command, StorageCommand}; use crate::Update; use mz_expr::GlobalId; use mz_expr::PartitionId; pub struct StorageControllerState<T> { source_descriptions: BTreeMap<GlobalId, Option<(crate::sources::SourceDesc, Antichain<T>)>>, pub(super) since_uppers: SinceUpperMap<T>, } pub struct StorageController<'a, C, T> { pub(super) storage: &'a mut StorageControllerState<T>, pub(super) client: &'a mut C, } #[derive(Debug)] pub enum StorageError { SourceIdReused(GlobalId), } impl<T> StorageControllerState<T> { pub(super) fn new() -> Self { Self { source_descriptions: BTreeMap::default(), since_uppers: SinceUpperMap::default(), } } } impl<'a, C: Client<T>, T: Timestamp + Lattice> StorageController<'a, C, T> { pub async fn create_sources( &mut self, mut bindings: Vec<(GlobalId, (crate::sources::SourceDesc, Antichain<T>))>, ) -> Result<(), StorageError> { bindings.sort_by_key(|b| b.0); bindings.dedup(); for pos in 1..bindings.len() { if bindings[pos - 1].0 == bindings[pos].0 { Err(StorageError::SourceIdReused(bindings[pos].0))?; } } for (id, description_since) in bindings.iter() { match self.storage.source_descriptions.get(&id) { Some(None) => Err(StorageError::SourceIdReused(*id))?, Some(Some(prior_description)) => {
} None => { } } } for (id, (description, since)) in bindings.iter() { self.storage .source_descriptions .insert(*id, Some((description.clone(), since.clone()))); self.storage.since_uppers.insert( *id, (since.clone(), Antichain::from_elem(Timestamp::minimum())), ); } self.client .send(Command::Storage(StorageCommand::CreateSources(bindings))) .await; Ok(()) } pub async fn drop_sources(&mut self, identifiers: Vec<GlobalId>) { for id in identifiers.iter() { if !self.storage.source_descriptions.contains_key(id) { error!("Source id {} dropped without first being created", id); } else { self.storage.source_descriptions.insert(*id, None); } } self.client .send(Command::Storage(StorageCommand::DropSources(identifiers))) .await } pub async fn table_insert(&mut self, id: GlobalId, updates: Vec<Update<T>>) { self.client .send(Command::Storage(StorageCommand::Insert { id, updates })) .await } pub async fn update_durability_frontiers(&mut self, updates: Vec<(GlobalId, Antichain<T>)>) { self.client .send(Command::Storage(StorageCommand::DurabilityFrontierUpdates( updates, ))) .await } pub async fn add_source_timestamping( &mut self, id: GlobalId, connector: SourceConnector, bindings: Vec<(PartitionId, T, crate::sources::MzOffset)>, ) { self.client .send(Command::Storage(StorageCommand::AddSourceTimestamping { id, connector, bindings, })) .await } pub async fn allow_source_compaction(&mut self, frontiers: Vec<(GlobalId, Antichain<T>)>) { for (id, frontier) in frontiers.iter() { self.storage.since_uppers.advance_since_for(*id, frontier); } self.client .send(Command::Storage(StorageCommand::AllowSourceCompaction( frontiers, ))) .await } pub async fn drop_source_timestamping(&mut self, id: GlobalId) { self.client .send(Command::Storage(StorageCommand::DropSourceTimestamping { id, })) .await } pub async fn advance_all_table_timestamps(&mut self, advance_to: T) { self.client .send(Command::Storage(StorageCommand::AdvanceAllLocalInputs { advance_to, })) .await } }
if prior_description != description_since { Err(StorageError::SourceIdReused(*id))? }
if_condition
[ { "content": "pub fn build_compression(cmd: &mut BuiltinCommand) -> Result<Compression, anyhow::Error> {\n\n match cmd.args.opt_string(\"compression\") {\n\n Some(s) => s.parse(),\n\n None => Ok(Compression::None),\n\n }\n\n}\n\n\n", "file_path": "src/testdrive/src/action/file.rs", "rank": 0, "score": 336063.9367070321 }, { "content": "pub fn build_regex(mut cmd: BuiltinCommand) -> Result<RegexAction, anyhow::Error> {\n\n let regex = cmd.args.parse(\"match\")?;\n\n let replacement = cmd\n\n .args\n\n .opt_string(\"replacement\")\n\n .unwrap_or_else(|| DEFAULT_REGEX_REPLACEMENT.into());\n\n cmd.args.done()?;\n\n Ok(RegexAction { regex, replacement })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for RegexAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n state.regex = Some(self.regex.clone());\n\n state.regex_replacement = self.replacement.clone();\n\n Ok(ControlFlow::Continue)\n\n }\n\n}\n\n\n\npub struct SqlTimeoutAction {\n\n duration: Option<Duration>,\n\n}\n\n\n", "file_path": "src/testdrive/src/action/set.rs", "rank": 1, "score": 331335.76112902263 }, { "content": "pub fn build_append(mut cmd: BuiltinCommand) -> Result<AppendAction, anyhow::Error> {\n\n let path = build_path(&mut cmd)?;\n\n let compression = build_compression(&mut cmd)?;\n\n let trailing_newline = cmd.args.opt_bool(\"trailing-newline\")?.unwrap_or(true);\n\n cmd.args.done()?;\n\n let mut contents = vec![];\n\n for line in cmd.input {\n\n contents.extend(bytes::unescape(line.as_bytes())?);\n\n contents.push(b'\\n');\n\n }\n\n if !trailing_newline {\n\n contents.pop();\n\n }\n\n Ok(AppendAction {\n\n path,\n\n contents,\n\n compression,\n\n })\n\n}\n\n\n", "file_path": "src/testdrive/src/action/file.rs", "rank": 2, "score": 331335.76112902263 }, { "content": "pub fn build_sleep(mut cmd: BuiltinCommand) -> Result<SleepAction, anyhow::Error> {\n\n let arg = cmd.args.string(\"duration\")?;\n\n let duration = mz_repr::util::parse_duration(&arg).context(\"parsing duration\")?;\n\n Ok(SleepAction {\n\n duration,\n\n random: false,\n\n })\n\n}\n\n\n\nimpl SyncAction for SleepAction {\n\n fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n fn redo(&self, _: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let sleep = if self.random {\n\n let mut rng = rand::thread_rng();\n\n rng.gen_range(Duration::from_secs(0)..self.duration)\n\n } else {\n\n self.duration\n\n };\n\n println!(\"Sleeping for {:?}\", sleep);\n\n thread::sleep(sleep);\n\n Ok(ControlFlow::Continue)\n\n }\n\n}\n", "file_path": "src/testdrive/src/action/sleep.rs", "rank": 3, "score": 331335.7611290227 }, { "content": "pub fn build_delete(mut cmd: BuiltinCommand) -> Result<DeleteAction, anyhow::Error> {\n\n let path = build_path(&mut cmd)?;\n\n cmd.args.done()?;\n\n Ok(DeleteAction { path })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for DeleteAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let path = state.temp_path.join(&self.path);\n\n println!(\"Deleting file {}\", path.display());\n\n fs::remove_file(&path).await?;\n\n Ok(ControlFlow::Continue)\n\n }\n\n}\n", "file_path": "src/testdrive/src/action/file.rs", "rank": 4, "score": 331335.76112902263 }, { "content": "pub fn build_sql(mut cmd: SqlCommand) -> Result<SqlAction, anyhow::Error> {\n\n let stmts = mz_sql_parser::parser::parse_statements(&cmd.query)\n\n .with_context(|| format!(\"unable to parse SQL: {}\", cmd.query))?;\n\n if stmts.len() != 1 {\n\n bail!(\"expected one statement, but got {}\", stmts.len());\n\n }\n\n if let SqlOutput::Full { expected_rows, .. } = &mut cmd.expected_output {\n\n // TODO(benesch): one day we'll support SQL queries where order matters.\n\n expected_rows.sort();\n\n }\n\n Ok(SqlAction {\n\n cmd,\n\n stmt: stmts.into_element(),\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for SqlAction {\n\n async fn undo(&self, state: &mut State) -> Result<(), anyhow::Error> {\n\n match &self.stmt {\n", "file_path": "src/testdrive/src/action/sql.rs", "rank": 5, "score": 331335.76112902263 }, { "content": "pub fn build_execute(mut cmd: BuiltinCommand) -> Result<ExecuteAction, anyhow::Error> {\n\n let command = cmd.args.string(\"command\")?;\n\n Ok(ExecuteAction {\n\n command,\n\n expected_output: cmd.input.join(\"\\n\"),\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ExecuteAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let output = Command::new(\"psql\")\n\n .args(&[\n\n \"--pset\",\n\n \"footer=off\",\n\n \"--command\",\n", "file_path": "src/testdrive/src/action/psql.rs", "rank": 6, "score": 331335.7611290227 }, { "content": "pub fn build_request(mut cmd: BuiltinCommand) -> Result<RequestAction, anyhow::Error> {\n\n Ok(RequestAction {\n\n url: cmd.args.string(\"url\")?,\n\n method: cmd.args.parse(\"method\")?,\n\n content_type: cmd.args.opt_string(\"content-type\"),\n\n body: cmd.input.join(\"\\n\"),\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for RequestAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, _: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n println!(\"$ http-request {} {}\\n{}\", self.method, self.url, self.body);\n\n\n\n let client = reqwest::Client::new();\n\n\n", "file_path": "src/testdrive/src/action/http.rs", "rank": 7, "score": 331335.76112902263 }, { "content": "/// Sanity check whether we can decode the Blob's persisted meta object, and delete\n\n/// all data if the encoded version is less than what the current implementation supports.\n\n///\n\n/// TODO: this is a hack and we will need to get rid of this once we have a\n\n/// proper backwards compatibility policy.\n\npub fn check_meta_version_maybe_delete_data<B: Blob>(b: &mut B) -> Result<(), Error> {\n\n let meta = match block_on(b.get(\"META\"))? {\n\n None => return Ok(()),\n\n Some(bytes) => bytes,\n\n };\n\n\n\n let current_version = ProtoMeta::ENCODING_VERSION;\n\n let persisted_version = ProtoMeta::encoded_version(&meta)?;\n\n\n\n if current_version == persisted_version {\n\n // Nothing to do here, everything is working as expected.\n\n Ok(())\n\n } else if current_version > persisted_version {\n\n // Delete all the keys, as we are upgrading to a new version.\n\n info!(\n\n \"Persistence beta detected version mismatch. Deleting all previously persisted data as part of upgrade from version {} to {}.\",\n\n persisted_version,\n\n current_version\n\n );\n\n let keys = block_on(b.list_keys())?;\n", "file_path": "src/persist/src/storage.rs", "rank": 8, "score": 331317.6436133818 }, { "content": "/// Retrieve number of partitions for a given `topic` using the given `client`\n\npub fn get_partitions<C: ClientContext>(\n\n client: &Client<C>,\n\n topic: &str,\n\n timeout: Duration,\n\n) -> Result<Vec<i32>, anyhow::Error> {\n\n let meta = client.fetch_metadata(Some(&topic), timeout)?;\n\n if meta.topics().len() != 1 {\n\n bail!(\n\n \"topic {} has {} metadata entries; expected 1\",\n\n topic,\n\n meta.topics().len()\n\n );\n\n }\n\n let meta_topic = meta.topics().into_element();\n\n if meta_topic.name() != topic {\n\n bail!(\n\n \"got results for wrong topic {} (expected {})\",\n\n meta_topic.name(),\n\n topic\n\n );\n\n }\n\n\n\n if meta_topic.partitions().len() == 0 {\n\n bail!(\"topic {} does not exist\", topic);\n\n }\n\n\n\n Ok(meta_topic.partitions().iter().map(|x| x.id()).collect())\n\n}\n", "file_path": "src/kafka-util/src/client.rs", "rank": 9, "score": 329453.96568990266 }, { "content": "pub fn build_ingest(mut cmd: BuiltinCommand) -> Result<IngestAction, anyhow::Error> {\n\n let topic_prefix = format!(\"testdrive-{}\", cmd.args.string(\"topic\")?);\n\n let partition = cmd.args.opt_parse::<i32>(\"partition\")?;\n\n let start_iteration = cmd.args.opt_parse::<isize>(\"start-iteration\")?.unwrap_or(0);\n\n let repeat = cmd.args.opt_parse::<isize>(\"repeat\")?.unwrap_or(1);\n\n let publish = cmd.args.opt_bool(\"publish\")?.unwrap_or(false);\n\n let format = match cmd.args.string(\"format\")?.as_str() {\n\n \"avro\" => Format::Avro {\n\n schema: cmd.args.string(\"schema\")?,\n\n confluent_wire_format: cmd.args.opt_bool(\"confluent-wire-format\")?.unwrap_or(true),\n\n },\n\n \"protobuf\" => {\n\n let descriptor_file = cmd.args.string(\"descriptor-file\")?;\n\n let message = cmd.args.string(\"message\")?;\n\n Format::Protobuf {\n\n descriptor_file,\n\n message,\n\n // This was introduced after the avro format's confluent-wire-format, so it defaults to\n\n // false\n\n confluent_wire_format: cmd.args.opt_bool(\"confluent-wire-format\")?.unwrap_or(false),\n", "file_path": "src/testdrive/src/action/kafka/ingest.rs", "rank": 10, "score": 326809.5075148716 }, { "content": "pub fn build_ingest(mut cmd: BuiltinCommand) -> Result<IngestAction, anyhow::Error> {\n\n let stream_prefix = format!(\"testdrive-{}\", cmd.args.string(\"stream\")?);\n\n match cmd.args.string(\"format\")?.as_str() {\n\n \"bytes\" => (),\n\n f => bail!(\"unsupported message format for Kinesis: {}\", f),\n\n }\n\n cmd.args.done()?;\n\n\n\n Ok(IngestAction {\n\n stream_prefix,\n\n rows: cmd.input,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for IngestAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n", "file_path": "src/testdrive/src/action/kinesis/ingest.rs", "rank": 11, "score": 326809.5075148716 }, { "content": "pub fn build_execute(mut cmd: BuiltinCommand) -> Result<ExecuteAction, anyhow::Error> {\n\n let connection = cmd.args.string(\"connection\")?;\n\n cmd.args.done()?;\n\n Ok(ExecuteAction {\n\n connection,\n\n queries: cmd.input,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ExecuteAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let client;\n\n let client = if self.connection.starts_with(\"postgres://\") {\n\n client = postgres_client(&self.connection).await?;\n\n &client\n", "file_path": "src/testdrive/src/action/postgres/execute.rs", "rank": 12, "score": 326809.5075148716 }, { "content": "pub fn build_random_sleep(mut cmd: BuiltinCommand) -> Result<SleepAction, anyhow::Error> {\n\n let arg = cmd.args.string(\"duration\")?;\n\n let duration = mz_repr::util::parse_duration(&arg).context(\"parsing duration\")?;\n\n Ok(SleepAction {\n\n duration,\n\n random: true,\n\n })\n\n}\n\n\n", "file_path": "src/testdrive/src/action/sleep.rs", "rank": 13, "score": 326809.5075148716 }, { "content": "pub fn build_verify(mut cmd: BuiltinCommand) -> Result<VerifyAction, anyhow::Error> {\n\n let stream_prefix = cmd.args.string(\"stream\")?;\n\n cmd.args.done()?;\n\n Ok(VerifyAction {\n\n stream_prefix,\n\n expected_records: cmd.input.into_iter().collect(),\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for VerifyAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let stream_name = format!(\"testdrive-{}-{}\", self.stream_prefix, state.seed);\n\n\n\n let mut shard_iterators = get_shard_iterators(&state.kinesis_client, &stream_name).await?;\n\n let timer = Instant::now();\n", "file_path": "src/testdrive/src/action/kinesis/verify.rs", "rank": 14, "score": 326809.50751487166 }, { "content": "pub fn build_verify(mut cmd: BuiltinCommand) -> Result<VerifyAction, anyhow::Error> {\n\n let sink = cmd.args.string(\"sink\")?;\n\n let expected = cmd.input;\n\n cmd.args.done()?;\n\n if sink.contains(path::MAIN_SEPARATOR) {\n\n // The goal isn't security, but preventing mistakes.\n\n bail!(\"separators in file sink names are forbidden\");\n\n }\n\n Ok(VerifyAction { sink, expected })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for VerifyAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let path = Retry::default()\n\n .max_duration(state.default_timeout)\n", "file_path": "src/testdrive/src/action/avro_ocf.rs", "rank": 15, "score": 326809.5075148716 }, { "content": "pub fn build_connect(mut cmd: BuiltinCommand) -> Result<ConnectAction, anyhow::Error> {\n\n let name = cmd.args.string(\"name\")?;\n\n if name.starts_with(\"postgres://\") {\n\n bail!(\"connection name can not be url\");\n\n }\n\n\n\n let url = cmd.args.string(\"url\")?;\n\n cmd.args.done()?;\n\n Ok(ConnectAction { name, url })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ConnectAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let client = postgres_client(&self.url).await?;\n\n state.postgres_clients.insert(self.name.clone(), client);\n\n Ok(ControlFlow::Continue)\n\n }\n\n}\n", "file_path": "src/testdrive/src/action/postgres/connect.rs", "rank": 16, "score": 326809.5075148716 }, { "content": "pub fn build_publish(mut cmd: BuiltinCommand) -> Result<PublishAction, anyhow::Error> {\n\n let subject = cmd.args.string(\"subject\")?;\n\n let schema_type = match cmd.args.string(\"schema-type\")?.as_str() {\n\n \"avro\" => SchemaType::Avro,\n\n \"json\" => SchemaType::Json,\n\n \"protobuf\" => SchemaType::Protobuf,\n\n s => bail!(\"unknown schema type: {}\", s),\n\n };\n\n let references = match cmd.args.opt_string(\"references\") {\n\n None => vec![],\n\n Some(s) => s.split(',').map(|s| s.into()).collect(),\n\n };\n\n Ok(PublishAction {\n\n subject,\n\n schema: cmd.input.join(\"\\n\"),\n\n schema_type,\n\n references,\n\n })\n\n}\n\n\n", "file_path": "src/testdrive/src/action/schema_registry.rs", "rank": 17, "score": 326809.5075148716 }, { "content": "pub fn build_append(mut cmd: BuiltinCommand) -> Result<AppendAction, anyhow::Error> {\n\n let path = cmd.args.string(\"path\")?;\n\n let records = cmd.input;\n\n cmd.args.done()?;\n\n if path.contains(path::MAIN_SEPARATOR) {\n\n // The goal isn't security, but preventing mistakes.\n\n bail!(\"separators in paths are forbidden\");\n\n }\n\n Ok(AppendAction { path, records })\n\n}\n\n\n\nimpl SyncAction for AppendAction {\n\n fn undo(&self, _state: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let path = state.temp_path.join(&self.path);\n\n println!(\"Appending to {}\", path.display());\n\n let file = OpenOptions::new().read(true).write(true).open(path)?;\n\n let mut writer = Writer::append_to(file)?;\n\n write_records(&mut writer, &self.records)?;\n\n Ok(ControlFlow::Continue)\n\n }\n\n}\n\n\n", "file_path": "src/testdrive/src/action/avro_ocf.rs", "rank": 18, "score": 326809.5075148716 }, { "content": "pub fn build_execute(mut cmd: BuiltinCommand) -> Result<ExecuteAction, anyhow::Error> {\n\n let name = cmd.args.string(\"name\")?;\n\n cmd.args.done()?;\n\n Ok(ExecuteAction {\n\n name,\n\n queries: cmd.input,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ExecuteAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let conn = state\n\n .mysql_clients\n\n .get_mut(&self.name)\n\n .ok_or_else(|| anyhow!(\"MySQL connection '{}' not found\", &self.name))?;\n", "file_path": "src/testdrive/src/action/mysql/execute.rs", "rank": 19, "score": 326809.5075148716 }, { "content": "pub fn build_write(mut cmd: BuiltinCommand) -> Result<WriteAction, anyhow::Error> {\n\n let path = cmd.args.string(\"path\")?;\n\n let schema = cmd.args.string(\"schema\")?;\n\n let codec = cmd.args.opt_parse(\"codec\")?;\n\n\n\n let records = cmd.input;\n\n cmd.args.done()?;\n\n if path.contains(path::MAIN_SEPARATOR) {\n\n // The goal isn't security, but preventing mistakes.\n\n bail!(\"separators in paths are forbidden\");\n\n }\n\n Ok(WriteAction {\n\n path,\n\n schema,\n\n records,\n\n codec,\n\n })\n\n}\n\n\n\nimpl SyncAction for WriteAction {\n", "file_path": "src/testdrive/src/action/avro_ocf.rs", "rank": 20, "score": 326809.5075148716 }, { "content": "pub fn build_connect(mut cmd: BuiltinCommand) -> Result<ConnectAction, anyhow::Error> {\n\n let name = cmd.args.string(\"name\")?;\n\n let url = cmd.args.string(\"url\")?;\n\n // We allow the password to be specified outside of the URL\n\n // in case it contains special characters\n\n let password = cmd.args.opt_string(\"password\");\n\n cmd.args.done()?;\n\n\n\n Ok(ConnectAction {\n\n name,\n\n url,\n\n password,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ConnectAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n", "file_path": "src/testdrive/src/action/mysql/connect.rs", "rank": 21, "score": 326809.5075148716 }, { "content": "pub fn build_verify(mut cmd: BuiltinCommand) -> Result<VerifyAction, anyhow::Error> {\n\n let format = match cmd.args.string(\"format\")?.as_str() {\n\n \"avro\" => SinkFormat::Avro,\n\n \"json\" => SinkFormat::Json {\n\n key: cmd.args.parse(\"key\")?,\n\n },\n\n f => bail!(\"unknown format: {}\", f),\n\n };\n\n let sink = cmd.args.string(\"sink\")?;\n\n let consistency = match cmd.args.opt_string(\"consistency\").as_deref() {\n\n Some(\"debezium\") => Some(SinkConsistencyFormat::Debezium),\n\n Some(s) => bail!(\"unknown sink consistency format {}\", s),\n\n None => None,\n\n };\n\n\n\n let sort_messages = cmd.args.opt_bool(\"sort-messages\")?.unwrap_or(false);\n\n let expected_messages = cmd.input;\n\n if expected_messages.len() == 0 {\n\n // verify with 0 messages doesn't check that no messages have been written -\n\n // it 'verifies' 0 messages and trivially returns true\n", "file_path": "src/testdrive/src/action/kafka/verify.rs", "rank": 22, "score": 326809.5075148716 }, { "content": "pub fn build_create_bucket(mut cmd: BuiltinCommand) -> Result<CreateBucketAction, anyhow::Error> {\n\n let bucket_prefix = format!(\"testdrive-{}\", cmd.args.string(\"bucket\")?);\n\n cmd.args.done()?;\n\n Ok(CreateBucketAction { bucket_prefix })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for CreateBucketAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let bucket = format!(\"{}-{}\", self.bucket_prefix, state.seed);\n\n println!(\"Creating S3 bucket {}\", bucket);\n\n\n\n match state\n\n .s3_client\n\n .create_bucket()\n\n .bucket(&bucket)\n", "file_path": "src/testdrive/src/action/s3.rs", "rank": 23, "score": 322472.51133988117 }, { "content": "pub fn build_delete_object(mut cmd: BuiltinCommand) -> Result<DeleteObjectAction, anyhow::Error> {\n\n let bucket_prefix = format!(\"testdrive-{}\", cmd.args.string(\"bucket\")?);\n\n cmd.args.done()?;\n\n Ok(DeleteObjectAction {\n\n bucket_prefix,\n\n keys: cmd.input,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for DeleteObjectAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let bucket = format!(\"{}-{}\", self.bucket_prefix, state.seed);\n\n println!(\"Deleting S3 objects {}: {}\", bucket, self.keys.join(\", \"));\n\n state\n\n .s3_client\n", "file_path": "src/testdrive/src/action/s3.rs", "rank": 24, "score": 322472.5113398811 }, { "content": "pub fn build_execute(mut cmd: BuiltinCommand) -> Result<ExecuteAction, anyhow::Error> {\n\n let name = cmd.args.string(\"name\")?;\n\n cmd.args.done()?;\n\n Ok(ExecuteAction {\n\n name,\n\n queries: cmd.input,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ExecuteAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let client = state\n\n .sql_server_clients\n\n .get_mut(&self.name)\n\n .ok_or_else(|| anyhow!(\"connection {} not found\", self.name.quoted()))?;\n", "file_path": "src/testdrive/src/action/sql_server/execute.rs", "rank": 25, "score": 322472.5113398811 }, { "content": "pub fn build_put_object(mut cmd: BuiltinCommand) -> Result<PutObjectAction, anyhow::Error> {\n\n let bucket_prefix = format!(\"testdrive-{}\", cmd.args.string(\"bucket\")?);\n\n let key = cmd.args.string(\"key\")?;\n\n let compression = build_compression(&mut cmd)?;\n\n let contents = cmd.input.join(\"\\n\");\n\n cmd.args.done()?;\n\n Ok(PutObjectAction {\n\n bucket_prefix,\n\n key,\n\n compression,\n\n contents,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for PutObjectAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n", "file_path": "src/testdrive/src/action/s3.rs", "rank": 26, "score": 322472.5113398811 }, { "content": "pub fn build_connect(mut cmd: BuiltinCommand) -> Result<ConnectAction, anyhow::Error> {\n\n let name = cmd.args.string(\"name\")?;\n\n cmd.args.done()?;\n\n\n\n let ado_string = cmd.input.join(\"\\n\");\n\n\n\n let config = Config::from_ado_string(&ado_string).context(\"parsing ADO string: {}\")?;\n\n\n\n Ok(ConnectAction { name, config })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for ConnectAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let tcp = TcpStream::connect(self.config.get_addr())\n\n .await\n", "file_path": "src/testdrive/src/action/sql_server/connect.rs", "rank": 27, "score": 322472.5113398811 }, { "content": "pub fn build_wait(mut cmd: BuiltinCommand) -> Result<WaitSchemaAction, anyhow::Error> {\n\n let schema = cmd.args.string(\"schema\")?;\n\n cmd.args.done()?;\n\n Ok(WaitSchemaAction { schema })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for WaitSchemaAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n Retry::default()\n\n .initial_backoff(Duration::from_millis(50))\n\n .factor(1.5)\n\n .max_duration(state.timeout)\n\n .retry_async(|_| async {\n\n state\n\n .ccsr_client\n\n .get_schema_by_subject(&self.schema)\n\n .await\n\n .context(\"fetching schema\")\n\n .and(Ok(()))\n\n })\n\n .await?;\n\n Ok(ControlFlow::Continue)\n\n }\n\n}\n", "file_path": "src/testdrive/src/action/schema_registry.rs", "rank": 28, "score": 322472.5113398811 }, { "content": "pub fn build_sql_timeout(mut cmd: BuiltinCommand) -> Result<SqlTimeoutAction, anyhow::Error> {\n\n let duration = cmd.args.string(\"duration\")?;\n\n let duration = if duration.to_lowercase() == \"default\" {\n\n None\n\n } else {\n\n Some(mz_repr::util::parse_duration(&duration).context(\"parsing duration\")?)\n\n };\n\n cmd.args.done()?;\n\n Ok(SqlTimeoutAction { duration })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for SqlTimeoutAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n state.timeout = cmp::max(\n\n self.duration.unwrap_or(state.default_timeout),\n\n state.default_timeout,\n\n );\n\n Ok(ControlFlow::Continue)\n\n }\n\n}\n", "file_path": "src/testdrive/src/action/set.rs", "rank": 29, "score": 322472.5113398811 }, { "content": "pub fn build_verify_schema(mut cmd: BuiltinCommand) -> Result<VerifySchemaAction, anyhow::Error> {\n\n let format = match cmd.args.string(\"format\")?.as_str() {\n\n \"avro\" => SinkFormat::Avro,\n\n \"json\" => SinkFormat::Json {\n\n key: cmd.args.parse(\"key\")?,\n\n },\n\n f => bail!(\"unknown format: {}\", f),\n\n };\n\n let sink = cmd.args.string(\"sink\")?;\n\n\n\n let (key, value) = match &cmd.input[..] {\n\n [value] => (None, value.clone()),\n\n [key, value] => (Some(key.clone()), value.clone()),\n\n _ => bail!(\"unable to read key/value schema inputs\"),\n\n };\n\n\n\n cmd.args.done()?;\n\n Ok(VerifySchemaAction {\n\n sink,\n\n format,\n", "file_path": "src/testdrive/src/action/kafka/verify.rs", "rank": 30, "score": 318313.145481018 }, { "content": "pub fn build_create_stream(mut cmd: BuiltinCommand) -> Result<CreateStreamAction, anyhow::Error> {\n\n let stream_name = format!(\"testdrive-{}\", cmd.args.string(\"stream\")?);\n\n let shard_count = cmd.args.parse(\"shards\")?;\n\n cmd.args.done()?;\n\n\n\n Ok(CreateStreamAction {\n\n stream_name,\n\n shard_count,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for CreateStreamAction {\n\n async fn undo(&self, _state: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let stream_name = format!(\"{}-{}\", self.stream_name, state.seed);\n\n println!(\"Creating Kinesis stream {}\", stream_name);\n", "file_path": "src/testdrive/src/action/kinesis/create_stream.rs", "rank": 31, "score": 314320.7161264024 }, { "content": "pub fn build_verify_slot(mut cmd: BuiltinCommand) -> Result<VerifySlotAction, anyhow::Error> {\n\n let connection = cmd.args.string(\"connection\")?;\n\n let slot = cmd.args.string(\"slot\")?;\n\n let active: bool = cmd.args.parse(\"active\")?;\n\n cmd.args.done()?;\n\n Ok(VerifySlotAction {\n\n connection,\n\n slot,\n\n active,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for VerifySlotAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let (client, conn) = tokio_postgres::connect(&self.connection, NoTls)\n", "file_path": "src/testdrive/src/action/postgres/verify_slot.rs", "rank": 32, "score": 314320.7161264024 }, { "content": "pub fn build_create_topic(mut cmd: BuiltinCommand) -> Result<CreateTopicAction, anyhow::Error> {\n\n let topic_prefix = format!(\"testdrive-{}\", cmd.args.string(\"topic\")?);\n\n let partitions = cmd.args.opt_parse(\"partitions\")?;\n\n\n\n let replication_factor = cmd.args.opt_parse(\"replication-factor\")?.unwrap_or(1);\n\n let compression = cmd\n\n .args\n\n .opt_string(\"compression\")\n\n .unwrap_or_else(|| \"producer\".into());\n\n let compaction = cmd.args.opt_parse(\"compaction\")?.unwrap_or(false);\n\n cmd.args.done()?;\n\n\n\n Ok(CreateTopicAction {\n\n topic_prefix,\n\n partitions,\n\n replication_factor,\n\n compression,\n\n compaction,\n\n })\n\n}\n", "file_path": "src/testdrive/src/action/kafka/create_topic.rs", "rank": 33, "score": 314320.7161264024 }, { "content": "pub fn build_add_partitions(mut cmd: BuiltinCommand) -> Result<AddPartitionsAction, anyhow::Error> {\n\n let topic_prefix = format!(\"testdrive-{}\", cmd.args.string(\"topic\")?);\n\n let partitions = cmd.args.opt_parse(\"total-partitions\")?.unwrap_or(1);\n\n cmd.args.done()?;\n\n\n\n Ok(AddPartitionsAction {\n\n topic_prefix,\n\n partitions,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for AddPartitionsAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let topic_name = format!(\"{}-{}\", self.topic_prefix, state.seed);\n\n println!(\n", "file_path": "src/testdrive/src/action/kafka/add_partitions.rs", "rank": 34, "score": 314320.7161264024 }, { "content": "/// Ensures [`Numeric`] values are:\n\n/// - Within `Numeric`'s max precision ([`NUMERIC_DATUM_MAX_PRECISION`]), or errors if not.\n\n/// - Never possible but invalid representations (i.e. never -Nan or -0).\n\n///\n\n/// Should be called after any operation that can change an [`Numeric`]'s scale or\n\n/// generate negative values (except addition and subtraction).\n\npub fn munge_numeric(n: &mut Numeric) -> Result<(), anyhow::Error> {\n\n rescale_within_max_precision(n)?;\n\n if (n.is_zero() || n.is_nan()) && n.is_negative() {\n\n cx_datum().neg(n);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/repr/src/adt/numeric.rs", "rank": 35, "score": 311224.2959034738 }, { "content": "/// Propagates information about monotonic inputs through views.\n\npub fn optimize_dataflow_monotonic(dataflow: &mut DataflowDesc) -> Result<(), TransformError> {\n\n let mut monotonic = std::collections::HashSet::new();\n\n for (source_id, source) in dataflow.source_imports.iter_mut() {\n\n if let mz_dataflow_types::sources::SourceConnector::External {\n\n envelope: mz_dataflow_types::sources::SourceEnvelope::None(_),\n\n ..\n\n } = source.description.connector\n\n {\n\n monotonic.insert(source_id.clone());\n\n }\n\n }\n\n\n\n let monotonic_flag = MonotonicFlag::default();\n\n\n\n // Propagate predicate information from outputs to inputs.\n\n for build_desc in dataflow.objects_to_build.iter_mut() {\n\n monotonic_flag.apply(\n\n build_desc.view.as_inner_mut(),\n\n &monotonic,\n\n &mut HashSet::new(),\n\n )?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/transform/src/dataflow.rs", "rank": 36, "score": 309687.8559265449 }, { "content": "pub fn bench_avro(c: &mut Criterion) {\n\n let schema_str = r#\"\n\n{\n\n \"type\": \"record\",\n\n \"name\": \"Envelope\",\n\n \"namespace\": \"tpch.tpch.lineitem\",\n\n \"fields\": [\n\n {\n\n \"name\": \"before\",\n\n \"type\": [\n\n \"null\",\n\n {\n\n \"type\": \"record\",\n\n \"name\": \"Value\",\n\n \"fields\": [\n\n {\n\n \"name\": \"l_orderkey\",\n\n \"type\": \"int\"\n\n },\n\n {\n", "file_path": "src/interchange/benches/avro.rs", "rank": 37, "score": 303349.5024679663 }, { "content": "pub fn bench_pack(c: &mut Criterion) {\n\n let num_rows = 10_000;\n\n\n\n let mut rng = seeded_rng();\n\n let int_rows = (0..num_rows)\n\n .map(|_| {\n\n vec![\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n ]\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let mut rng = seeded_rng();\n\n let byte_data = (0..num_rows)\n\n .map(|_| {\n", "file_path": "src/repr/benches/row.rs", "rank": 38, "score": 303349.5024679663 }, { "content": "pub fn bench_sort(c: &mut Criterion) {\n\n let num_rows = 10_000;\n\n\n\n let mut rng = seeded_rng();\n\n let int_rows = (0..num_rows)\n\n .map(|_| {\n\n vec![\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n Datum::Int32(rng.gen()),\n\n ]\n\n })\n\n .collect::<Vec<_>>();\n\n let numeric_rows = (0..num_rows)\n\n .map(|_| {\n\n vec![\n\n Datum::Numeric(rng.gen::<i32>().into()),\n", "file_path": "src/repr/benches/row.rs", "rank": 39, "score": 303349.5024679663 }, { "content": "pub fn bench_persist(c: &mut Criterion) {\n\n // Override the default of \"info\" here because the s3 library is chatty on\n\n // info while initializing. It's good info to have in mz logs, but ends\n\n // being as spammy in these benchmarks.\n\n mz_ore::test::init_logging_default(\"warn\");\n\n let data = DataGenerator::default();\n\n\n\n end_to_end::bench_load(&data, &mut c.benchmark_group(\"end_to_end/load\"));\n\n end_to_end::bench_replay(&data, &mut c.benchmark_group(\"end_to_end/replay\"));\n\n\n\n snapshot::bench_blob_get(&data, &mut c.benchmark_group(\"snapshot/blob_get\"));\n\n snapshot::bench_mem(&data, &mut c.benchmark_group(\"snapshot/mem\"));\n\n snapshot::bench_file(&data, &mut c.benchmark_group(\"snapshot/file\"));\n\n\n\n writer::bench_log(&mut c.benchmark_group(\"writer/log\"));\n\n writer::bench_blob_set(&data, &mut c.benchmark_group(\"writer/blob_set\"));\n\n writer::bench_encode_batch(&data, &mut c.benchmark_group(\"writer/encode_batch\"));\n\n writer::bench_blob_cache_set_unsealed_batch(\n\n &data,\n\n &mut c.benchmark_group(\"writer/blob_cache_set_unsealed_batch\"),\n", "file_path": "src/persist/benches/benches.rs", "rank": 40, "score": 303349.5024679663 }, { "content": "pub fn bench_protobuf(c: &mut Criterion) {\n\n let value = Value {\n\n l_orderkey: 155_190,\n\n l_suppkey: 7706,\n\n l_linenumber: 1,\n\n l_quantity: 17.0,\n\n l_extendedprice: 21168.23,\n\n l_discount: 0.04,\n\n l_tax: 0.02,\n\n l_returnflag: \"N\".into(),\n\n l_linestatus: \"O\".into(),\n\n l_shipdate: 9567,\n\n l_commitdate: 9537,\n\n l_receiptdate: 9537,\n\n l_shipinstruct: \"DELIVER IN PERSON\".into(),\n\n l_shipmode: \"TRUCK\".into(),\n\n l_comment: \"egular courts above the\".into(),\n\n ..Default::default()\n\n };\n\n\n", "file_path": "src/interchange/benches/protobuf.rs", "rank": 41, "score": 303349.5024679663 }, { "content": "/// Creates a `reqwest` client that obeys the system proxy configuration.\n\n///\n\n/// For details about the system proxy configuration, see the\n\n/// [crate documentation](crate).\n\npub fn client() -> reqwest::Client {\n\n client_builder()\n\n .build()\n\n .expect(\"reqwest::Client known to be valid\")\n\n}\n", "file_path": "src/http-proxy/src/reqwest.rs", "rank": 42, "score": 300982.4866858497 }, { "content": "pub fn bench_ilike(c: &mut Criterion) {\n\n bench_op(c, \"ilike\", |pattern| {\n\n like_pattern::compile(pattern, true).unwrap()\n\n });\n\n}\n\n\n", "file_path": "src/expr/benches/like_pattern.rs", "rank": 43, "score": 299388.5396632252 }, { "content": "pub fn bench_like(c: &mut Criterion) {\n\n bench_op(c, \"like\", |pattern| {\n\n like_pattern::compile(pattern, false).unwrap()\n\n });\n\n}\n\n\n\ncriterion_group!(benches, bench_like, bench_ilike);\n\ncriterion_main!(benches);\n", "file_path": "src/expr/benches/like_pattern.rs", "rank": 44, "score": 299388.5396632252 }, { "content": "/// A general implementation for name resolution on AST elements.\n\n///\n\n/// This implementation is appropriate Whenever:\n\n/// - You don't need to export the name resolution outside the `sql` crate and\n\n/// the extra typing isn't too onerous.\n\n/// - Discovered dependencies should extend `qcx.ids`.\n\npub fn resolve_names_extend_qcx_ids<F, T>(qcx: &mut QueryContext, f: F) -> Result<T, PlanError>\n\nwhere\n\n F: FnOnce(&mut NameResolver) -> T,\n\n{\n\n let mut n = NameResolver::new(qcx.scx.catalog);\n\n let result = f(&mut n);\n\n n.status?;\n\n qcx.ids.extend(n.ids.iter());\n\n Ok(result)\n\n}\n\n\n\n// Used when displaying a view's source for human creation. If the name\n\n// specified is the same as the name in the catalog, we don't use the ID format.\n\n#[derive(Debug)]\n\npub struct NameSimplifier<'a> {\n\n pub catalog: &'a dyn SessionCatalog,\n\n}\n\n\n\nimpl<'ast, 'a> VisitMut<'ast, Aug> for NameSimplifier<'a> {\n\n fn visit_object_name_mut(&mut self, name: &mut ResolvedObjectName) {\n", "file_path": "src/sql/src/names.rs", "rank": 45, "score": 298848.32210042863 }, { "content": "/// Converts stream into JSON if `type_name` refers to an enum or struct\n\n///\n\n/// Returns `Ok(Some(string))` if `type_name` refers to an enum or struct, and\n\n/// there are no stream conversion errors.\n\n/// Returns `Ok(None)` if `type_name` does not refer to an enum or struct.\n\nfn parse_as_enum_or_struct<I, C>(\n\n first_arg: TokenTree,\n\n rest_of_stream: &mut I,\n\n type_name: &str,\n\n rti: &ReflectedTypeInfo,\n\n ctx: &mut C,\n\n) -> Result<Option<String>, String>\n\nwhere\n\n C: TestDeserializeContext,\n\n I: Iterator<Item = TokenTree>,\n\n{\n\n if rti.enum_dict.contains_key(type_name) || rti.struct_dict.contains_key(type_name) {\n\n // An enum or a struct can be specified as `(arg1 .. argn)` or\n\n // `only_arg`. The goal here is to feed the enum/struct specification\n\n // into a common inner method that takes\n\n // `(first_token_of_spec, rest_of_tokens_comprising_spec)`\n\n match first_arg {\n\n TokenTree::Group(group) if group.delimiter() == Delimiter::Parenthesis => {\n\n let mut inner_iter = group.stream().into_iter();\n\n match inner_iter.next() {\n", "file_path": "src/lowertest/src/lib.rs", "rank": 46, "score": 295714.78581057134 }, { "content": "/// Rescale `n` as an `OrderedDecimal` with the described scale, or error if:\n\n/// - Rescaling exceeds max precision\n\n/// - `n` requires > [`NUMERIC_DATUM_MAX_PRECISION`] - `scale` digits of precision\n\n/// left of the decimal point\n\npub fn rescale(n: &mut Numeric, scale: u8) -> Result<(), anyhow::Error> {\n\n let mut cx = cx_datum();\n\n cx.rescale(n, &Numeric::from(-i32::from(scale)));\n\n if cx.status().invalid_operation() || get_precision(n) > u32::from(NUMERIC_DATUM_MAX_PRECISION)\n\n {\n\n bail!(\n\n \"numeric value {} exceed maximum precision {}\",\n\n n,\n\n NUMERIC_DATUM_MAX_PRECISION\n\n )\n\n }\n\n munge_numeric(n)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/repr/src/adt/numeric.rs", "rank": 47, "score": 294994.38748255273 }, { "content": "struct StartFn(Box<dyn FnMut(UnreliableHandle) -> Result<RuntimeClient, Error>>);\n\n\n\nimpl fmt::Debug for StartFn {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"StartFn\").finish_non_exhaustive()\n\n }\n\n}\n\n\n", "file_path": "src/persist/src/nemesis/direct.rs", "rank": 48, "score": 292164.3618519469 }, { "content": "/// Normalizes an unresolved object name.\n\npub fn unresolved_object_name(mut name: UnresolvedObjectName) -> Result<PartialName, PlanError> {\n\n if name.0.len() < 1 || name.0.len() > 3 {\n\n return Err(PlanError::MisqualifiedName(name.to_string()));\n\n }\n\n let out = PartialName {\n\n item: ident(\n\n name.0\n\n .pop()\n\n .expect(\"name checked to have at least one component\"),\n\n ),\n\n schema: name.0.pop().map(ident),\n\n database: name.0.pop().map(ident),\n\n };\n\n assert!(name.0.is_empty());\n\n Ok(out)\n\n}\n\n\n", "file_path": "src/sql/src/normalize.rs", "rank": 49, "score": 291406.8560951038 }, { "content": "fn parse_as_enum_or_struct_inner<I, C>(\n\n first_arg: TokenTree,\n\n rest_of_stream: &mut I,\n\n type_name: &str,\n\n rti: &ReflectedTypeInfo,\n\n ctx: &mut C,\n\n) -> Result<Option<String>, String>\n\nwhere\n\n C: TestDeserializeContext,\n\n I: Iterator<Item = TokenTree>,\n\n{\n\n if let Some(result) = ctx.override_syntax(first_arg.clone(), rest_of_stream, type_name, rti)? {\n\n Ok(Some(result))\n\n } else if let Some((f_names, f_types)) = rti.struct_dict.get(type_name).map(|r| r.clone()) {\n\n Ok(Some(to_json_fields(\n\n type_name,\n\n &mut (&mut std::iter::once(first_arg)).chain(rest_of_stream),\n\n f_names,\n\n f_types,\n\n rti,\n", "file_path": "src/lowertest/src/lib.rs", "rank": 50, "score": 290115.8290487102 }, { "content": "/// Converts serialized JSON to the syntax that [to_json] handles.\n\n///\n\n/// `json` is assumed to have been produced by serializing an object of type\n\n/// `type_name`.\n\n/// `ctx` is responsible for converting serialized JSON to any syntax\n\n/// extensions or overrides.\n\npub fn from_json<C>(json: &Value, type_name: &str, rti: &ReflectedTypeInfo, ctx: &mut C) -> String\n\nwhere\n\n C: TestDeserializeContext,\n\n{\n\n let type_name = normalize_type_name(type_name);\n\n if let Some(result) = ctx.reverse_syntax_override(json, &type_name, rti) {\n\n return result;\n\n }\n\n if let Some((names, types)) = rti.struct_dict.get(&type_name[..]) {\n\n if types.is_empty() {\n\n \"\".to_string()\n\n } else {\n\n format!(\"({})\", from_json_fields(json, names, types, rti, ctx))\n\n }\n\n } else if let Some(enum_dict) = rti.enum_dict.get(&type_name[..]) {\n\n match json {\n\n // A unit enum in JSON is `\"variant\"`. In the spec it is `variant`.\n\n Value::String(s) => unquote(s),\n\n // An enum with fields is `{\"variant\": <fields>}` in JSON. In the\n\n // spec it is `(variant field1 .. fieldn).\n", "file_path": "src/lowertest/src/lib.rs", "rank": 51, "score": 288428.5280148403 }, { "content": "pub fn read_long<R: Read>(reader: &mut R) -> Result<i64, AvroError> {\n\n zag_i64(reader)\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 52, "score": 286907.1079412905 }, { "content": "pub fn zag_i32<R: Read>(reader: &mut R) -> Result<i32, AvroError> {\n\n let i = zag_i64(reader)?;\n\n if i < i64::from(i32::min_value()) || i > i64::from(i32::max_value()) {\n\n Err(AvroError::Decode(DecodeError::I32OutOfRange(i)))\n\n } else {\n\n Ok(i as i32)\n\n }\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 53, "score": 286907.1079412904 }, { "content": "pub fn zag_i64<R: Read>(reader: &mut R) -> Result<i64, AvroError> {\n\n let z = decode_variable(reader)?;\n\n Ok(if z & 0x1 == 0 {\n\n (z >> 1) as i64\n\n } else {\n\n !(z >> 1) as i64\n\n })\n\n}\n\n\n", "file_path": "src/avro/src/util.rs", "rank": 54, "score": 286907.1079412905 }, { "content": "/// Constructs a new AWS SQS client that respects the\n\n/// [system proxy configuration](mz_http_proxy#system-proxy-configuration).\n\npub fn client(config: &AwsConfig) -> Client {\n\n let mut builder = aws_sdk_sqs::config::Builder::from(config.inner());\n\n if let Some(endpoint) = config.endpoint() {\n\n builder = builder.endpoint_resolver(endpoint.clone());\n\n }\n\n Client::from_conf_conn(builder.build(), util::connector())\n\n}\n", "file_path": "src/aws-util/src/sqs.rs", "rank": 55, "score": 286550.1962608885 }, { "content": "/// Constructs a new AWS STS client that respects the\n\n/// [system proxy configuration](mz_http_proxy#system-proxy-configuration).\n\npub fn client(config: &AwsConfig) -> Client {\n\n let mut builder = aws_sdk_sts::config::Builder::from(config.inner());\n\n if let Some(endpoint) = config.endpoint() {\n\n builder = builder.endpoint_resolver(endpoint.clone());\n\n }\n\n Client::from_conf_conn(builder.build(), util::connector())\n\n}\n", "file_path": "src/aws-util/src/sts.rs", "rank": 56, "score": 286550.1962608885 }, { "content": "/// Constructs a new AWS Kinesis client that respects the\n\n/// [system proxy configuration](mz_http_proxy#system-proxy-configuration).\n\npub fn client(config: &AwsConfig) -> Client {\n\n let mut builder = aws_sdk_kinesis::config::Builder::from(config.inner());\n\n if let Some(endpoint) = config.endpoint() {\n\n builder = builder.endpoint_resolver(endpoint.clone());\n\n }\n\n Client::from_conf_conn(builder.build(), util::connector())\n\n}\n\n\n\n/// Lists the shards of the named Kinesis stream.\n\n///\n\n/// This function wraps the `ListShards` API call. It returns all shards in a\n\n/// given Kinesis stream, automatically handling pagination if required.\n\n///\n\n/// # Errors\n\n///\n\n/// Any errors from the underlying `GetShardIterator` API call are surfaced\n\n/// directly.\n\npub async fn list_shards(\n\n client: &Client,\n\n stream_name: &str,\n", "file_path": "src/aws-util/src/kinesis.rs", "rank": 57, "score": 286550.1962608885 }, { "content": "/// Constructs a new AWS S3 client that respects the\n\n/// [system proxy configuration](mz_http_proxy#system-proxy-configuration).\n\npub fn client(config: &AwsConfig) -> Client {\n\n let mut builder = aws_sdk_s3::config::Builder::from(config.inner());\n\n if let Some(endpoint) = config.endpoint() {\n\n builder = builder.endpoint_resolver(endpoint.clone());\n\n }\n\n Client::from_conf_conn(builder.build(), util::connector())\n\n}\n", "file_path": "src/aws-util/src/s3.rs", "rank": 58, "score": 286550.1962608885 }, { "content": "/// Converts the next part of the stream into JSON deserializable into an object\n\n/// of type `type_name`.\n\n///\n\n/// If the object is a zero-arg struct, this method will return\n\n/// `Ok(Some(\"null\"))` without looking at the stream.\n\n///\n\n/// Otherwise, it will try to convert the next `TokenTree` in the stream.\n\n/// If end of stream has been reached, this method returns `Ok(None)`\n\n///\n\n/// The JSON string should be deserializable into an object of type\n\n/// `type_name`.\n\n///\n\n/// Default object syntax:\n\n/// * An enum is represented as `(enum_variant_snake_case <arg1> <arg2> ..)`,\n\n/// unless it is a unit enum, in which case it can also be represented as\n\n/// `enum_variant_snake_case`. Enums can have optional arguments, which should\n\n/// all come at the end.\n\n/// * A struct is represented as `(<arg1> <arg2> ..)`, unless it has no\n\n/// arguments, in which case it is represented by the empty string.\n\n/// * A vec or tuple is represented as `[<elem1> <elem2> ..]`\n\n/// * None/null is represented as `null`\n\n/// * true (resp. false) is represented as `true` (resp. `false`)\n\n/// * Strings are represented as `\"something with quotations\"`.\n\n/// * A numeric value like -1 or 1.1 is represented as is.\n\n/// * You can delimit arguments and elements using whitespace and/or commas.\n\n///\n\n/// `ctx` will extend and/or override the default syntax.\n\npub fn to_json<I, C>(\n\n stream_iter: &mut I,\n\n type_name: &str,\n\n rti: &ReflectedTypeInfo,\n\n ctx: &mut C,\n\n) -> Result<Option<String>, String>\n\nwhere\n\n C: TestDeserializeContext,\n\n I: Iterator<Item = TokenTree>,\n\n{\n\n let type_name = &normalize_type_name(type_name);\n\n if let Some((_, f_types)) = rti.struct_dict.get(&type_name[..]) {\n\n if f_types.is_empty() {\n\n return Ok(Some(\"null\".to_string()));\n\n }\n\n }\n\n if let Some(first_arg) = stream_iter.next() {\n\n // If the type refers to an enum or struct defined by us, go to a\n\n // special branch that allows reuse of code paths for the\n\n // `(<arg1>..<argn>)` syntax as well as the `<only_arg>` syntax.\n", "file_path": "src/lowertest/src/lib.rs", "rank": 59, "score": 281351.935417128 }, { "content": "/// Parses a `NaiveDateTime` from `s`.\n\npub fn parse_timestamp(s: &str) -> Result<NaiveDateTime, ParseError> {\n\n match parse_timestamp_string(s) {\n\n Ok((date, time, _)) => Ok(date.and_time(time)),\n\n Err(e) => Err(ParseError::invalid_input_syntax(\"timestamp\", s).with_details(e)),\n\n }\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 60, "score": 280004.12682665203 }, { "content": "pub fn decode_cdcv2<G: Scope<Timestamp = Timestamp>>(\n\n stream: &Stream<G, SourceOutput<Option<Vec<u8>>, Option<Vec<u8>>>>,\n\n schema: &str,\n\n registry: Option<mz_ccsr::ClientConfig>,\n\n confluent_wire_format: bool,\n\n) -> (Collection<G, Row, Diff>, Box<dyn Any>) {\n\n // We will have already checked validity of the schema by now, so this can't fail.\n\n let mut resolver = ConfluentAvroResolver::new(schema, registry, confluent_wire_format).unwrap();\n\n let channel = Rc::new(RefCell::new(VecDeque::new()));\n\n let activator: Rc<RefCell<Option<SyncActivator>>> = Rc::new(RefCell::new(None));\n\n let mut vector = Vec::new();\n\n stream.sink(\n\n SourceOutput::<Option<Vec<u8>>, Option<Vec<u8>>>::position_value_contract(),\n\n \"CDCv2-Decode\",\n\n {\n\n let channel = Rc::clone(&channel);\n\n let activator = Rc::clone(&activator);\n\n move |input| {\n\n input.for_each(|_time, data| {\n\n data.swap(&mut vector);\n", "file_path": "src/dataflow/src/decode/mod.rs", "rank": 61, "score": 276817.3326697275 }, { "content": "#[async_trait(?Send)]\n\npub trait Client<T = mz_repr::Timestamp> {\n\n /// Sends a command to the dataflow server.\n\n async fn send(&mut self, cmd: Command<T>);\n\n\n\n /// Receives the next response from the dataflow server.\n\n ///\n\n /// This method blocks until the next response is available, or, if the\n\n /// dataflow server has been shut down, returns `None`.\n\n async fn recv(&mut self) -> Option<Response<T>>;\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl Client for Box<dyn Client> {\n\n async fn send(&mut self, cmd: Command) {\n\n (**self).send(cmd).await\n\n }\n\n async fn recv(&mut self) -> Option<Response> {\n\n (**self).recv().await\n\n }\n\n}\n", "file_path": "src/dataflow-types/src/client.rs", "rank": 62, "score": 276573.00642383535 }, { "content": "pub fn transform_expr(scx: &StatementContext, expr: &mut Expr<Raw>) -> Result<(), PlanError> {\n\n run_transforms(scx, |t, expr| t.visit_expr_mut(expr), expr)\n\n}\n\n\n\npub(crate) fn run_transforms<F, A>(\n\n scx: &StatementContext,\n\n mut f: F,\n\n ast: &mut A,\n\n) -> Result<(), PlanError>\n\nwhere\n\n F: for<'ast> FnMut(&mut dyn VisitMut<'ast, Raw>, &'ast mut A),\n\n{\n\n let mut func_rewriter = FuncRewriter::new(scx);\n\n f(&mut func_rewriter, ast);\n\n func_rewriter.status?;\n\n\n\n let mut desugarer = Desugarer::new();\n\n f(&mut desugarer, ast);\n\n desugarer.status\n\n}\n\n\n", "file_path": "src/sql/src/plan/transform_ast.rs", "rank": 63, "score": 276044.27197124425 }, { "content": "fn decode_bind(mut buf: Cursor) -> Result<FrontendMessage, io::Error> {\n\n let portal_name = buf.read_cstr()?.to_string();\n\n let statement_name = buf.read_cstr()?.to_string();\n\n\n\n let mut param_formats = Vec::new();\n\n for _ in 0..buf.read_i16()? {\n\n param_formats.push(buf.read_format()?);\n\n }\n\n\n\n let mut raw_params = Vec::new();\n\n for _ in 0..buf.read_i16()? {\n\n let len = buf.read_i32()?;\n\n if len == -1 {\n\n raw_params.push(None); // NULL\n\n } else {\n\n // TODO(benesch): this should use bytes::Bytes to avoid the copy.\n\n let mut value = Vec::new();\n\n for _ in 0..len {\n\n value.push(buf.read_byte()?);\n\n }\n", "file_path": "src/pgwire/src/codec.rs", "rank": 64, "score": 275514.17122123635 }, { "content": "fn parse_sql(line_reader: &mut LineReader) -> Result<SqlCommand, PosError> {\n\n let (_, line1) = line_reader.next().unwrap();\n\n let query = line1[1..].trim().to_owned();\n\n let line2 = slurp_one(line_reader);\n\n let line3 = slurp_one(line_reader);\n\n let mut column_names = None;\n\n let mut expected_rows = Vec::new();\n\n lazy_static! {\n\n static ref HASH_REGEX: Regex = Regex::new(r\"^(\\S+) values hashing to (\\S+)$\").unwrap();\n\n }\n\n match (line2, line3) {\n\n (Some((pos2, line2)), Some((pos3, line3))) => {\n\n if line3.len() >= 3 && line3.chars().all(|c| c == '-') {\n\n column_names = Some(split_line(pos2, &line2)?);\n\n } else {\n\n expected_rows.push(split_line(pos2, &line2)?);\n\n expected_rows.push(split_line(pos3, &line3)?);\n\n }\n\n }\n\n (Some((pos2, line2)), None) => match HASH_REGEX.captures(&line2) {\n", "file_path": "src/testdrive/src/parser.rs", "rank": 65, "score": 272581.603616236 }, { "content": "fn parse_builtin(line_reader: &mut LineReader) -> Result<BuiltinCommand, PosError> {\n\n let (pos, line) = line_reader.next().unwrap();\n\n let mut builtin_reader = BuiltinReader::new(&line, pos);\n\n let name = match builtin_reader.next() {\n\n Some(Ok((_, s))) => s,\n\n Some(Err(e)) => return Err(e),\n\n None => {\n\n return Err(PosError {\n\n source: anyhow!(\"command line is missing command name\"),\n\n pos: Some(pos),\n\n });\n\n }\n\n };\n\n let mut args = HashMap::new();\n\n for el in builtin_reader {\n\n let (pos, token) = el?;\n\n let pieces: Vec<_> = token.splitn(2, '=').collect();\n\n let pieces = match pieces.as_slice() {\n\n [key, value] => vec![*key, *value],\n\n [key] => vec![*key, \"\"],\n", "file_path": "src/testdrive/src/parser.rs", "rank": 66, "score": 272581.603616236 }, { "content": "fn build_path(cmd: &mut BuiltinCommand) -> Result<String, anyhow::Error> {\n\n let path = cmd.args.string(\"path\")?;\n\n if path.contains(path::MAIN_SEPARATOR) {\n\n // The goal isn't security, but preventing mistakes.\n\n bail!(\"separators in paths are forbidden\")\n\n } else {\n\n Ok(path)\n\n }\n\n}\n\n\n", "file_path": "src/testdrive/src/action/file.rs", "rank": 67, "score": 271347.52996908134 }, { "content": "/// Initiates a timely dataflow computation, processing materialized commands.\n\npub fn serve(config: Config) -> Result<(Server, LocalClient), anyhow::Error> {\n\n assert!(config.workers > 0);\n\n\n\n let server_metrics = ServerMetrics::register_with(&config.metrics_registry);\n\n let source_metrics = SourceBaseMetrics::register_with(&config.metrics_registry);\n\n let sink_metrics = SinkBaseMetrics::register_with(&config.metrics_registry);\n\n\n\n // Construct endpoints for each thread that will receive the coordinator's\n\n // sequenced command stream and send the responses to the coordinator.\n\n //\n\n // TODO(benesch): package up this idiom of handing out ownership of N items\n\n // to the N timely threads that will be spawned. The Mutex<Vec<Option<T>>>\n\n // is hard to read through.\n\n let (response_txs, response_rxs): (Vec<_>, Vec<_>) = (0..config.workers)\n\n .map(|_| mpsc::unbounded_channel())\n\n .unzip();\n\n let (command_txs, command_rxs): (Vec<_>, Vec<_>) = (0..config.workers)\n\n .map(|_| crossbeam_channel::unbounded())\n\n .unzip();\n\n // A mutex around a vector of optional (take-able) pairs of (tx, rx) for worker/client communication.\n", "file_path": "src/dataflow/src/server.rs", "rank": 68, "score": 270155.22013136867 }, { "content": "/// Deserialize the next `TokenTree` into a `D` object.\n\n///\n\n/// See [`to_json`] for the object spec syntax.\n\n///\n\n/// `type_name` should be `D` in string form.\n\n///\n\n/// `stream_iter` will advance by one `TokenTree` no matter the result.\n\npub fn deserialize<D, I, C>(\n\n stream_iter: &mut I,\n\n type_name: &'static str,\n\n rti: &ReflectedTypeInfo,\n\n ctx: &mut C,\n\n) -> Result<D, String>\n\nwhere\n\n C: TestDeserializeContext,\n\n D: DeserializeOwned,\n\n I: Iterator<Item = TokenTree>,\n\n{\n\n deserialize_optional(stream_iter, type_name, rti, ctx)?\n\n .ok_or_else(|| format!(\"Empty spec for type {}\", type_name))\n\n}\n\n\n", "file_path": "src/lowertest/src/lib.rs", "rank": 69, "score": 269556.46607549605 }, { "content": "fn parse_explain_sql(line_reader: &mut LineReader) -> Result<SqlCommand, PosError> {\n\n let (_, line1) = line_reader.next().unwrap();\n\n // This is a bit of a hack to extract the next chunk of the file with\n\n // blank lines intact. Ideally the `LineReader` would expose the API we\n\n // need directly, but that would require a large refactor.\n\n let mut expected_output: String = line_reader\n\n .inner\n\n .lines()\n\n .take_while(|l| !is_sigil(l.chars().next()))\n\n .map(|l| format!(\"{}\\n\", l))\n\n .collect();\n\n slurp_all(line_reader);\n\n while expected_output.ends_with(\"\\n\\n\") {\n\n expected_output.pop();\n\n }\n\n Ok(SqlCommand {\n\n query: line1[1..].trim().to_owned(),\n\n expected_output: SqlOutput::Full {\n\n column_names: None,\n\n expected_rows: vec![vec![expected_output]],\n\n },\n\n })\n\n}\n\n\n", "file_path": "src/testdrive/src/parser.rs", "rank": 70, "score": 268798.77852716565 }, { "content": "/// Decodes a BlobUnsealedBatch from the Arrow file format.\n\n///\n\n/// NB: This is currently unused, but it's here because we may want to use it\n\n/// for the local cache and so we can easily compare arrow vs parquet.\n\npub fn decode_unsealed_arrow<R: Read + Seek>(r: &mut R) -> Result<BlobUnsealedBatch, Error> {\n\n let file_meta = read_file_metadata(r)?;\n\n let (format, meta) =\n\n decode_unsealed_inline_meta(file_meta.schema().metadata().get(INLINE_METADATA_KEY))?;\n\n\n\n let updates = match format {\n\n ProtoBatchFormat::Unknown => return Err(\"unknown format\".into()),\n\n ProtoBatchFormat::ArrowKvtd => decode_arrow_file_kvtd(r, file_meta)?,\n\n ProtoBatchFormat::ParquetKvtd => {\n\n return Err(\"ParquetKvtd format not supported in arrow\".into())\n\n }\n\n };\n\n\n\n let ret = BlobUnsealedBatch {\n\n desc: SeqNo(meta.seqno_lower)..SeqNo(meta.seqno_upper),\n\n updates,\n\n };\n\n ret.validate()?;\n\n Ok(ret)\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/arrow.rs", "rank": 71, "score": 267584.61489130784 }, { "content": "/// Decodes a BlobTraceBatch from the Arrow file format.\n\n///\n\n/// NB: This is currently unused, but it's here because we may want to use it\n\n/// for the local cache and so we can easily compare arrow vs parquet.\n\npub fn decode_trace_arrow<R: Read + Seek>(r: &mut R) -> Result<BlobTraceBatch, Error> {\n\n let file_meta = read_file_metadata(r)?;\n\n let (format, meta) =\n\n decode_trace_inline_meta(file_meta.schema().metadata().get(INLINE_METADATA_KEY))?;\n\n\n\n let updates = match format {\n\n ProtoBatchFormat::Unknown => return Err(\"unknown format\".into()),\n\n ProtoBatchFormat::ArrowKvtd => decode_arrow_file_kvtd(r, file_meta)?,\n\n ProtoBatchFormat::ParquetKvtd => {\n\n return Err(\"ParquetKvtd format not supported in arrow\".into())\n\n }\n\n };\n\n\n\n let ret = BlobTraceBatch {\n\n desc: meta.desc.map_or_else(\n\n || {\n\n Description::new(\n\n Antichain::from_elem(u64::minimum()),\n\n Antichain::from_elem(u64::minimum()),\n\n Antichain::from_elem(u64::minimum()),\n\n )\n\n },\n\n |x| x.into(),\n\n ),\n\n updates,\n\n };\n\n ret.validate()?;\n\n Ok(ret)\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/arrow.rs", "rank": 72, "score": 267584.61489130784 }, { "content": "/// Encodes an BlobUnsealedBatch into the Arrow file format.\n\n///\n\n/// NB: This is currently unused, but it's here because we may want to use it\n\n/// for the local cache and so we can easily compare arrow vs parquet.\n\npub fn encode_unsealed_arrow<W: Write>(w: &mut W, batch: &BlobUnsealedBatch) -> Result<(), Error> {\n\n let mut metadata = HashMap::with_capacity(1);\n\n metadata.insert(\n\n INLINE_METADATA_KEY.into(),\n\n encode_unsealed_inline_meta(batch, ProtoBatchFormat::ArrowKvtd),\n\n );\n\n let schema = Schema::new_from(SCHEMA_ARROW_KVTD.fields().clone(), metadata);\n\n let options = WriteOptions { compression: None };\n\n let mut writer = FileWriter::try_new(w, &schema, options)?;\n\n for records in batch.updates.iter() {\n\n writer.write(&encode_arrow_batch_kvtd(records))?;\n\n }\n\n writer.finish()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/arrow.rs", "rank": 73, "score": 267584.61489130784 }, { "content": "/// Encodes an BlobTraceBatch into the Arrow file format.\n\n///\n\n/// NB: This is currently unused, but it's here because we may want to use it\n\n/// for the local cache and so we can easily compare arrow vs parquet.\n\npub fn encode_trace_arrow<W: Write>(w: &mut W, batch: &BlobTraceBatch) -> Result<(), Error> {\n\n let mut metadata = HashMap::with_capacity(1);\n\n metadata.insert(\n\n INLINE_METADATA_KEY.into(),\n\n encode_trace_inline_meta(batch, ProtoBatchFormat::ArrowKvtd),\n\n );\n\n let schema = Schema::new_from(SCHEMA_ARROW_KVTD.fields().clone(), metadata);\n\n let options = WriteOptions { compression: None };\n\n let mut writer = FileWriter::try_new(w, &schema, options)?;\n\n for records in batch.updates.iter() {\n\n writer.write(&encode_arrow_batch_kvtd(&records))?;\n\n }\n\n writer.finish()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/arrow.rs", "rank": 74, "score": 267584.6148913078 }, { "content": "/// Decodes a BlobUnsealedBatch from the Parquet format.\n\npub fn decode_unsealed_parquet<R: Read + Seek>(r: &mut R) -> Result<BlobUnsealedBatch, Error> {\n\n let metadata = read_metadata(r).map_err(|err| err.to_string())?;\n\n let metadata = metadata\n\n .key_value_metadata()\n\n .as_ref()\n\n .and_then(|x| x.iter().find(|x| x.key == INLINE_METADATA_KEY));\n\n let (format, meta) = decode_unsealed_inline_meta(metadata.and_then(|x| x.value.as_ref()))?;\n\n\n\n let updates = match format {\n\n ProtoBatchFormat::Unknown => return Err(\"unknown format\".into()),\n\n ProtoBatchFormat::ArrowKvtd => {\n\n return Err(\"ArrowKvtd format not supported in parquet\".into())\n\n }\n\n ProtoBatchFormat::ParquetKvtd => decode_parquet_file_kvtd(r)?,\n\n };\n\n\n\n let ret = BlobUnsealedBatch {\n\n desc: SeqNo(meta.seqno_lower)..SeqNo(meta.seqno_upper),\n\n updates,\n\n };\n\n ret.validate()?;\n\n Ok(ret)\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/parquet.rs", "rank": 75, "score": 267578.6105694588 }, { "content": "/// Encodes an BlobTraceBatch into the Parquet format.\n\npub fn encode_trace_parquet<W: Write>(w: &mut W, batch: &BlobTraceBatch) -> Result<(), Error> {\n\n encode_parquet_kvtd(\n\n w,\n\n encode_trace_inline_meta(batch, ProtoBatchFormat::ParquetKvtd),\n\n &batch.updates,\n\n )\n\n}\n\n\n", "file_path": "src/persist/src/indexed/columnar/parquet.rs", "rank": 76, "score": 267578.6105694588 }, { "content": "/// Decodes a BlobTraceBatch from the Parquet format.\n\npub fn decode_trace_parquet<R: Read + Seek>(r: &mut R) -> Result<BlobTraceBatch, Error> {\n\n let metadata = read_metadata(r).map_err(|err| err.to_string())?;\n\n let metadata = metadata\n\n .key_value_metadata()\n\n .as_ref()\n\n .and_then(|x| x.iter().find(|x| x.key == INLINE_METADATA_KEY));\n\n let (format, meta) = decode_trace_inline_meta(metadata.and_then(|x| x.value.as_ref()))?;\n\n\n\n let updates = match format {\n\n ProtoBatchFormat::Unknown => return Err(\"unknown format\".into()),\n\n ProtoBatchFormat::ArrowKvtd => {\n\n return Err(\"ArrowKVTD format not supported in parquet\".into())\n\n }\n\n ProtoBatchFormat::ParquetKvtd => decode_parquet_file_kvtd(r)?,\n\n };\n\n\n\n let ret = BlobTraceBatch {\n\n desc: meta.desc.map_or_else(\n\n || {\n\n Description::new(\n", "file_path": "src/persist/src/indexed/columnar/parquet.rs", "rank": 77, "score": 267578.6105694588 }, { "content": "pub fn build_skip_if(cmd: BuiltinCommand) -> Result<SkipIfAction, anyhow::Error> {\n\n Ok(SkipIfAction {\n\n query: cmd.input.join(\"\\n\"),\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for SkipIfAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n Ok(())\n\n }\n\n\n\n async fn redo(&self, state: &mut State) -> Result<ControlFlow, anyhow::Error> {\n\n let stmt = state\n\n .pgclient\n\n .prepare(&self.query)\n\n .await\n\n .context(\"failed to prepare skip-if query\")?;\n\n\n\n if stmt.columns().len() != 1 || *stmt.columns()[0].type_() != Type::BOOL {\n", "file_path": "src/testdrive/src/action/skip_if.rs", "rank": 78, "score": 266763.79104288353 }, { "content": "/// Decode a `Value` encoded in Avro format given its `Schema` and anything implementing `io::Read`\n\n/// to read from.\n\n///\n\n/// In case a reader `Schema` is provided, schema resolution will also be performed.\n\n///\n\n/// **NOTE** This function has a quite small niche of usage and does NOT take care of reading the\n\n/// header and consecutive data blocks; use [`Reader`](struct.Reader.html) if you don't know what\n\n/// you are doing, instead.\n\npub fn from_avro_datum<R: AvroRead>(schema: &Schema, reader: &mut R) -> Result<Value, AvroError> {\n\n let value = decode(schema.top_node(), reader)?;\n\n Ok(value)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::types::{Record, ToAvro};\n\n use crate::Reader;\n\n\n\n use std::io::Cursor;\n\n\n\n static SCHEMA: &str = r#\"\n\n {\n\n \"type\": \"record\",\n\n \"name\": \"test\",\n\n \"fields\": [\n\n {\"name\": \"a\", \"type\": \"long\", \"default\": 42},\n\n {\"name\": \"b\", \"type\": \"string\"}\n", "file_path": "src/avro/src/reader.rs", "rank": 79, "score": 266217.1582063118 }, { "content": "// Attaches additional information to a `Raw` AST, resulting in an `Aug` AST, by\n\n// resolving names and (aspirationally) performing semantic analysis such as\n\n// type-checking.\n\npub fn resolve_names(qcx: &mut QueryContext, query: Query<Raw>) -> Result<Query<Aug>, PlanError> {\n\n let mut n = NameResolver::new(qcx.scx.catalog);\n\n let result = n.fold_query(query);\n\n n.status?;\n\n qcx.ids.extend(n.ids.iter());\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/sql/src/names.rs", "rank": 80, "score": 266212.67629591527 }, { "content": "/// If the `stream_iter` is not empty, deserialize the next `TokenTree` into a `D`.\n\n///\n\n/// See [`to_json`] for the object spec syntax.\n\n///\n\n/// `type_name` should be `D` in string form.\n\n///\n\n/// `stream_iter` will advance by one `TokenTree` no matter the result.\n\npub fn deserialize_optional<D, I, C>(\n\n stream_iter: &mut I,\n\n type_name: &'static str,\n\n rti: &ReflectedTypeInfo,\n\n ctx: &mut C,\n\n) -> Result<Option<D>, String>\n\nwhere\n\n C: TestDeserializeContext,\n\n D: DeserializeOwned,\n\n I: Iterator<Item = TokenTree>,\n\n{\n\n match to_json(stream_iter, type_name, rti, ctx)? {\n\n Some(j) => Ok(Some(serde_json::from_str::<D>(&j).map_err(|e| {\n\n format!(\"String while serializing: {}\\nOriginal JSON: {}\", e, j)\n\n })?)),\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "src/lowertest/src/lib.rs", "rank": 81, "score": 265377.59221041645 }, { "content": "fn parse_fail_sql(line_reader: &mut LineReader) -> Result<FailSqlCommand, PosError> {\n\n let (pos, line1) = line_reader.next().unwrap();\n\n let line2 = slurp_one(line_reader);\n\n let (err_pos, expected_error) = match line2 {\n\n Some((err_pos, line2)) => (err_pos, line2),\n\n None => {\n\n return Err(PosError {\n\n pos: Some(pos),\n\n source: anyhow!(\"failing SQL command is missing expected error message\"),\n\n });\n\n }\n\n };\n\n let query = line1[1..].trim().to_string();\n\n\n\n let (expected_error, error_match_type) =\n\n if let Some(exp_err) = expected_error.strip_prefix(\"regex:\") {\n\n (exp_err, SqlErrorMatchType::Regex)\n\n } else if let Some(exp_err) = expected_error.strip_prefix(\"contains:\") {\n\n (exp_err, SqlErrorMatchType::Contains)\n\n } else if let Some(exp_err) = expected_error.strip_prefix(\"exact:\") {\n", "file_path": "src/testdrive/src/parser.rs", "rank": 82, "score": 265177.5031644782 }, { "content": "/// Creates a source dataflow operator from a connector implementing [SimpleSource](SimpleSource)\n\npub fn create_source_simple<G, C>(\n\n config: SourceConfig<G>,\n\n connector: C,\n\n) -> (\n\n (\n\n timely::dataflow::Stream<G, (Row, Timestamp, Diff)>,\n\n timely::dataflow::Stream<G, SourceError>,\n\n ),\n\n Option<SourceToken>,\n\n)\n\nwhere\n\n G: Scope<Timestamp = Timestamp>,\n\n C: SimpleSource + Send + 'static,\n\n{\n\n let SourceConfig {\n\n id,\n\n name,\n\n upstream_name,\n\n scope,\n\n active,\n", "file_path": "src/dataflow/src/source/mod.rs", "rank": 83, "score": 265016.719505078 }, { "content": "pub fn resolve_names_expr(qcx: &mut QueryContext, expr: Expr<Raw>) -> Result<Expr<Aug>, PlanError> {\n\n let mut n = NameResolver::new(qcx.scx.catalog);\n\n let result = n.fold_expr(expr);\n\n n.status?;\n\n qcx.ids.extend(n.ids.iter());\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/sql/src/names.rs", "rank": 84, "score": 262882.0954282699 }, { "content": "/// Writes a [`NaiveDateTime`] timestamp to `buf`.\n\npub fn format_timestamp<F>(buf: &mut F, ts: NaiveDateTime) -> Nestable\n\nwhere\n\n F: FormatBuffer,\n\n{\n\n let (year_ad, year) = ts.year_ce();\n\n write!(buf, \"{:04}-{}\", year, ts.format(\"%m-%d %H:%M:%S\"));\n\n format_nanos_to_micros(buf, ts.timestamp_subsec_nanos());\n\n if !year_ad {\n\n write!(buf, \" BC\");\n\n }\n\n // This always needs escaping because of the whitespace\n\n Nestable::MayNeedEscaping\n\n}\n\n\n", "file_path": "src/repr/src/strconv.rs", "rank": 85, "score": 262304.32534917316 }, { "content": "/// Extracts a self-contained slice of commands for the next parse node.\n\nfn extract_value<'a>(values: &mut &'a [Command<'a>]) -> &'a [Command<'a>] {\n\n let result = match values[0] {\n\n Command::Array(further) => &values[..further + 1],\n\n Command::Map(further) => &values[..further + 1],\n\n _ => &values[0..1],\n\n };\n\n *values = &values[result.len()..];\n\n result\n\n}\n\n\n", "file_path": "src/repr/src/adt/jsonb.rs", "rank": 86, "score": 260961.58433625638 }, { "content": "/// Normalizes an identifier that represents a column name.\n\npub fn column_name(id: Ident) -> ColumnName {\n\n ColumnName::from(ident(id))\n\n}\n\n\n", "file_path": "src/sql/src/normalize.rs", "rank": 87, "score": 257686.54983856005 }, { "content": "pub fn build_fail_sql(cmd: FailSqlCommand) -> Result<FailSqlAction, anyhow::Error> {\n\n let stmts = mz_sql_parser::parser::parse_statements(&cmd.query)\n\n .map_err(|e| format!(\"unable to parse SQL: {}: {}\", cmd.query, e));\n\n\n\n // Allow for statements that could not be parsed.\n\n // This way such statements can be used for negative testing in .td files\n\n let stmt = match stmts {\n\n Ok(s) => {\n\n if s.len() != 1 {\n\n bail!(\"expected one statement, but got {}\", s.len());\n\n }\n\n Some(s.into_element())\n\n }\n\n Err(_) => None,\n\n };\n\n\n\n let expected_error = match cmd.error_match_type {\n\n SqlErrorMatchType::Contains => ErrorMatcher::Contains(cmd.expected_error),\n\n SqlErrorMatchType::Exact => ErrorMatcher::Exact(cmd.expected_error),\n\n SqlErrorMatchType::Regex => ErrorMatcher::Regex(Regex::new(&cmd.expected_error)?),\n", "file_path": "src/testdrive/src/action/sql.rs", "rank": 88, "score": 255896.94938670634 }, { "content": "/// Same as [`nonrecursive_dft`], but allows changes to be made to the graph.\n\npub fn nonrecursive_dft_mut<Graph, NodeId, AtEnter, AtExit>(\n\n graph: &mut Graph,\n\n root: NodeId,\n\n at_enter: &mut AtEnter,\n\n at_exit: &mut AtExit,\n\n) where\n\n NodeId: std::cmp::Eq + std::hash::Hash + Clone,\n\n AtEnter: FnMut(&mut Graph, &NodeId) -> Vec<NodeId>,\n\n AtExit: FnMut(&mut Graph, &NodeId) -> (),\n\n{\n\n // Code in this method is identical to the code in `nonrecursive_dft`.\n\n let mut entered = Vec::new();\n\n let mut exited = HashSet::new();\n\n\n\n let children = at_enter(graph, &root);\n\n entered_node(&mut entered, root, children);\n\n while !entered.is_empty() {\n\n if let Some(to_enter) = find_next_child_to_enter(&mut entered, &mut exited) {\n\n let children = at_enter(graph, &to_enter);\n\n entered_node(&mut entered, to_enter, children);\n\n } else {\n\n let (to_exit, _) = entered.pop().unwrap();\n\n at_exit(graph, &to_exit);\n\n exited.insert(to_exit);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/ore/src/graph.rs", "rank": 89, "score": 254496.21987900086 }, { "content": "/// A [`mz_dataflow_types::client::Client`] implementation that intercepts responses from the\n\n/// dataflow server.\n\n///\n\n/// The implementation of the `send` method is unchanged. The implementation of\n\n/// `recv`, however, only presents the responses that have been explicitly\n\n/// forwarded via `forward_response`. To access the actual responses from\n\n/// the underlying dataflow client, call `try_intercepting_recv`.\n\nstruct InterceptingDataflowClient<C> {\n\n inner: Arc<TokioMutex<C>>,\n\n feedback_tx: mpsc::UnboundedSender<mz_dataflow_types::client::Response>,\n\n feedback_rx: Arc<TokioMutex<mpsc::UnboundedReceiver<mz_dataflow_types::client::Response>>>,\n\n}\n\n\n\nimpl<C> Clone for InterceptingDataflowClient<C> {\n\n fn clone(&self) -> InterceptingDataflowClient<C> {\n\n InterceptingDataflowClient {\n\n inner: Arc::clone(&self.inner),\n\n feedback_tx: self.feedback_tx.clone(),\n\n feedback_rx: Arc::clone(&self.feedback_rx),\n\n }\n\n }\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl<C> mz_dataflow_types::client::Client for InterceptingDataflowClient<C>\n\nwhere\n\n C: mz_dataflow_types::client::Client,\n", "file_path": "src/coordtest/src/lib.rs", "rank": 90, "score": 252963.88427287881 }, { "content": "/// Apply all available rewrite rules to the model.\n\npub fn rewrite_model(model: &mut Model) {\n\n let rules: Vec<Box<dyn ApplyRule>> = vec![\n\n // simplify outer joins first\n\n Box::new(rule::simplify_outer_joins::SimplifyOuterJoins),\n\n ];\n\n apply_rules_to_model(model, rules);\n\n model.garbage_collect();\n\n\n\n // At the end of the process, update box and quantifier ids to make it\n\n // easier to compare the graph before and after optimization.\n\n model.update_ids();\n\n}\n\n\n", "file_path": "src/sql/src/query_model/rewrite/mod.rs", "rank": 91, "score": 250202.58621604345 }, { "content": "/// Creates a `reqwest` client builder that obeys the system proxy\n\n/// configuration.\n\n///\n\n/// For details about the system proxy configuration, see the\n\n/// [crate documentation](crate).\n\npub fn client_builder() -> ClientBuilder {\n\n let proxy = reqwest::Proxy::custom(move |url| {\n\n if PROXY_CONFIG.exclude(Some(url.scheme()), url.host_str(), url.port()) {\n\n return None;\n\n }\n\n if let Some(http_proxy) = PROXY_CONFIG.http_proxy() {\n\n if url.scheme() == \"http\" {\n\n return Some(http_proxy.to_string());\n\n }\n\n }\n\n if let Some(https_proxy) = PROXY_CONFIG.https_proxy() {\n\n if url.scheme() == \"https\" {\n\n return Some(https_proxy.to_string());\n\n }\n\n }\n\n if let Some(all_proxy) = PROXY_CONFIG.all_proxy() {\n\n return Some(all_proxy.to_string());\n\n }\n\n None\n\n });\n\n reqwest::ClientBuilder::new().proxy(proxy)\n\n}\n\n\n", "file_path": "src/http-proxy/src/reqwest.rs", "rank": 92, "score": 248575.99755569335 }, { "content": "// Rewrites all function references to have `pg_catalog` qualification; this\n\n// is necessary to support resolving all built-in functions to the catalog.\n\n// (At the time of writing Materialize did not support user-defined\n\n// functions.)\n\n//\n\n// The approach is to prepend `pg_catalog` to all `UnresolvedObjectName`\n\n// names that could refer to functions.\n\nfn ast_use_pg_catalog_0_7_1(stmt: &mut mz_sql::ast::Statement<Raw>) -> Result<(), anyhow::Error> {\n\n fn normalize_function_name(name: &mut UnresolvedObjectName) {\n\n if name.0.len() == 1 {\n\n let func_name = name.to_string();\n\n for (schema, funcs) in &[\n\n (PG_CATALOG_SCHEMA, &*mz_sql::func::PG_CATALOG_BUILTINS),\n\n (MZ_CATALOG_SCHEMA, &*mz_sql::func::MZ_CATALOG_BUILTINS),\n\n (MZ_INTERNAL_SCHEMA, &*mz_sql::func::MZ_INTERNAL_BUILTINS),\n\n ] {\n\n if funcs.contains_key(func_name.as_str()) {\n\n *name = UnresolvedObjectName(vec![Ident::new(*schema), name.0.remove(0)]);\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n\n\n struct FuncNormalizer;\n\n\n\n impl<'ast> VisitMut<'ast, Raw> for FuncNormalizer {\n", "file_path": "src/coord/src/catalog/migrate.rs", "rank": 93, "score": 247636.68055997527 }, { "content": "/// Converts the timestamp `dt`, which is assumed to be in the time of the timezone `tz` to a timestamptz in UTC.\n\n/// This operation is fallible because certain timestamps at timezones that observe DST are simply impossible or\n\n/// ambiguous. In case of ambiguity (when a hour repeats) we will prefer the latest variant, and when an hour is\n\n/// impossible, we will attempt to fix it by advancing it. For example, `EST` and `2020-11-11T12:39:14` would return\n\n/// `2020-11-11T17:39:14Z`. A DST observing timezone like `America/New_York` would cause the following DST anomalies:\n\n/// `2020-11-01T00:59:59` -> `2020-11-01T04:59:59Z` and `2020-11-01T01:00:00` -> `2020-11-01T06:00:00Z`\n\n/// `2020-03-08T02:59:59` -> `2020-03-08T07:59:59Z` and `2020-03-08T03:00:00` -> `2020-03-08T07:00:00Z`\n\nfn timezone_timestamp(tz: Timezone, mut dt: NaiveDateTime) -> Result<Datum<'static>, EvalError> {\n\n let offset = match tz {\n\n Timezone::FixedOffset(offset) => offset,\n\n Timezone::Tz(tz) => match tz.offset_from_local_datetime(&dt).latest() {\n\n Some(offset) => offset.fix(),\n\n None => {\n\n dt += Duration::hours(1);\n\n tz.offset_from_local_datetime(&dt)\n\n .latest()\n\n .ok_or(EvalError::InvalidTimezoneConversion)?\n\n .fix()\n\n }\n\n },\n\n };\n\n Ok(DateTime::from_utc(dt - offset, Utc).into())\n\n}\n\n\n", "file_path": "src/expr/src/scalar/func.rs", "rank": 94, "score": 247614.8409718901 }, { "content": "/// Converts `s` into a [proc_macro2::TokenStream]\n\npub fn tokenize(s: &str) -> Result<TokenStream, String> {\n\n s.parse::<TokenStream>().map_err_to_string()\n\n}\n\n\n", "file_path": "src/lowertest/src/lib.rs", "rank": 95, "score": 244708.47825058096 }, { "content": "pub fn build_verify_timestamp_compaction_action(\n\n mut cmd: BuiltinCommand,\n\n) -> Result<VerifyTimestampCompactionAction, anyhow::Error> {\n\n let source = cmd.args.string(\"source\")?;\n\n let max_size = cmd.args.opt_parse(\"max-size\")?.unwrap_or(3);\n\n let permit_progress = cmd.args.opt_bool(\"permit-progress\")?.unwrap_or(false);\n\n cmd.args.done()?;\n\n Ok(VerifyTimestampCompactionAction {\n\n source,\n\n max_size,\n\n permit_progress,\n\n })\n\n}\n\n\n\n#[async_trait]\n\nimpl Action for VerifyTimestampCompactionAction {\n\n async fn undo(&self, _: &mut State) -> Result<(), anyhow::Error> {\n\n // Can't undo a verification.\n\n Ok(())\n\n }\n", "file_path": "src/testdrive/src/action/verify_timestamp_compaction.rs", "rank": 96, "score": 244693.34007745297 }, { "content": "/// Inline views used in one other view, and in no exported objects.\n\nfn inline_views(dataflow: &mut DataflowDesc) -> Result<(), TransformError> {\n\n // We cannot inline anything whose `BuildDesc::id` appears in either the\n\n // `index_exports` or `sink_exports` of `dataflow`, because we lose our\n\n // ability to name it.\n\n\n\n // A view can / should be in-lined in another view if it is only used by\n\n // one subsequent view. If there are two distinct views that have not\n\n // themselves been merged, then too bad and it doesn't get inlined.\n\n\n\n // Starting from the *last* object to build, walk backwards and inline\n\n // any view that is neither referenced by a `index_exports` nor\n\n // `sink_exports` nor more than two remaining objects to build.\n\n\n\n for index in (0..dataflow.objects_to_build.len()).rev() {\n\n // Capture the name used by others to reference this view.\n\n let global_id = dataflow.objects_to_build[index].id;\n\n // Determine if any exports directly reference this view.\n\n let mut occurs_in_export = false;\n\n for (_gid, sink_desc) in dataflow.sink_exports.iter() {\n\n if sink_desc.from == global_id {\n", "file_path": "src/transform/src/dataflow.rs", "rank": 97, "score": 244193.37483619992 }, { "content": "/// Rewrites predicates that contain subqueries so that the subqueries\n\n/// appear in their own later predicate when possible.\n\n///\n\n/// For example, this function rewrites this expression\n\n///\n\n/// ```text\n\n/// Filter {\n\n/// predicates: [a = b AND EXISTS (<subquery 1>) AND c = d AND (<subquery 2>) = e]\n\n/// }\n\n/// ```\n\n///\n\n/// like so:\n\n///\n\n/// ```text\n\n/// Filter {\n\n/// predicates: [\n\n/// a = b AND c = d,\n\n/// EXISTS (<subquery>),\n\n/// (<subquery 2>) = e,\n\n/// ]\n\n/// }\n\n/// ```\n\n///\n\n/// The rewrite causes decorrelation to incorporate prior predicates into\n\n/// the outer relation upon which the subquery is evaluated. In the above\n\n/// rewritten example, the `EXISTS (<subquery>)` will only be evaluated for\n\n/// outer rows where `a = b AND c = d`. The second subquery, `(<subquery 2>)\n\n/// = e`, will be further restricted to outer rows that match `A = b AND c =\n\n/// d AND EXISTS(<subquery>)`. This can vastly reduce the cost of the\n\n/// subquery, especially when the original conjunction contains join keys.\n\npub fn split_subquery_predicates(expr: &mut HirRelationExpr) {\n\n fn walk_relation(expr: &mut HirRelationExpr) {\n\n expr.visit_mut(0, &mut |expr, _| match expr {\n\n HirRelationExpr::Map { scalars, .. } => {\n\n for scalar in scalars {\n\n walk_scalar(scalar);\n\n }\n\n }\n\n HirRelationExpr::CallTable { exprs, .. } => {\n\n for expr in exprs {\n\n walk_scalar(expr);\n\n }\n\n }\n\n HirRelationExpr::Filter { predicates, .. } => {\n\n let mut subqueries = vec![];\n\n for predicate in &mut *predicates {\n\n walk_scalar(predicate);\n\n extract_conjuncted_subqueries(predicate, &mut subqueries);\n\n }\n\n // TODO(benesch): we could be smarter about the order in which\n", "file_path": "src/sql/src/plan/transform_expr.rs", "rank": 98, "score": 243860.11200562888 }, { "content": "pub fn add_timestamp_months(\n\n dt: NaiveDateTime,\n\n mut months: i32,\n\n) -> Result<NaiveDateTime, EvalError> {\n\n if months == 0 {\n\n return Ok(dt);\n\n }\n\n\n\n let (mut year, mut month, mut day) = (dt.year(), dt.month0() as i32, dt.day());\n\n let years = months / 12;\n\n year = year\n\n .checked_add(years)\n\n .ok_or(EvalError::TimestampOutOfRange)?;\n\n\n\n months %= 12;\n\n // positive modulus is easier to reason about\n\n if months < 0 {\n\n year -= 1;\n\n months += 12;\n\n }\n", "file_path": "src/expr/src/scalar/func.rs", "rank": 99, "score": 243594.63740672672 } ]
Rust
src/driver/text.rs
jutuon/vga
678dc64cf3cb3fd17fb5bd3ce6cdb92f10fab64c
use volatile::Volatile; use crate::io::{ VIDEO_RAM_START_ADDRESS, MemoryMappedIo, PortIo, }; use crate::raw::{ VgaRegisters, }; pub use vga_framebuffer::Colour; use vga_framebuffer::Char; pub const TEXT_BUFFER_FIST_BYTE_ADDRESS: usize = 0xB8000; pub const VIDEO_RAM_BYTES_BEFORE_TEXT: usize = TEXT_BUFFER_FIST_BYTE_ADDRESS - VIDEO_RAM_START_ADDRESS; pub const VGA_TEXT_WIDTH: usize = 80; pub const VGA_TEXT_HEIGHT: usize = 25; pub const VGA_TEXT_CHAR_COUNT: usize = VGA_TEXT_WIDTH * VGA_TEXT_HEIGHT; pub const TEXT_BUFFER_BYTE_COUNT: usize = 80 * 25 * 2; pub struct TextMode<T: PortIo, U: MemoryMappedIo> { registers: VgaRegisters<T>, ram: U, } impl <T: PortIo, U: MemoryMappedIo> TextMode<T, U> { pub fn new(io: T, ram: U) -> Self { Self { registers: VgaRegisters::new(io), ram, } } pub fn vga_text_ram(&self) -> &[Volatile<u8>] { let (_, text_buffer_and_other_ram) = self.ram.video_ram().split_at(VIDEO_RAM_BYTES_BEFORE_TEXT); let (text_buffer, _) = text_buffer_and_other_ram.split_at(TEXT_BUFFER_BYTE_COUNT); text_buffer } pub fn vga_text_ram_mut(&mut self) -> &mut [Volatile<u8>] { let (_, text_buffer_and_other_ram) = self.ram.video_ram_mut().split_at_mut(VIDEO_RAM_BYTES_BEFORE_TEXT); let (text_buffer, _) = text_buffer_and_other_ram.split_at_mut(TEXT_BUFFER_BYTE_COUNT); text_buffer } pub fn vga_chars(&self) -> impl Iterator<Item=VgaCharRef<'_>> { VgaCharRef::raw_slice_to_ref_slice(self.vga_text_ram()) } pub fn vga_chars_mut(&mut self) -> impl Iterator<Item=VgaCharRefMut<'_>> { VgaCharRefMut::raw_slice_to_ref_mut_slice(self.vga_text_ram_mut()) } pub fn attribute_bit_7(&mut self, setting: AttributeBit7) { let value = setting == AttributeBit7::Blink; self.registers.attribute_controller().ar10().modify(|_, w| w.enable_blinking_slash_select_background_intensity().bit(value)); } pub fn read_char(&self, character_i: usize) -> VgaChar { self.vga_chars().nth(character_i).unwrap().read() } pub fn write_char(&mut self, character_i: usize, vga_char: VgaChar) { self.vga_chars_mut().nth(character_i).unwrap().write(vga_char) } pub fn clear_screen(&mut self, value: VgaChar) { for mut vga_char in self.vga_chars_mut() { vga_char.write(value) } } pub fn lines(&self) -> impl Iterator<Item=VgaCharLine<'_>> { self.vga_text_ram().chunks_exact(VGA_TEXT_WIDTH * 2).map(|raw_line| { VgaCharLine { raw_line } }) } pub fn lines_mut(&mut self) -> impl Iterator<Item=VgaCharLineMut<'_>> { self.vga_text_ram_mut().chunks_exact_mut(VGA_TEXT_WIDTH * 2).map(|raw_line| { VgaCharLineMut { raw_line } }) } pub fn copy_line_to(&mut self, src_i: usize, target_i: usize) { if src_i == target_i { return; } let src_and_target = self.lines_mut().enumerate().fold((None, None), |(src, target), (i, line)| { if i == src_i { return (Some(line), target) } if i == target_i { return (src, Some(line)) } (src, target) }); match src_and_target { (None, _) => panic!("source index '{}' is out of bounds", src_i), (_, None) => panic!("target index '{}' is out of bounds", target_i), (Some(src), Some(mut target)) => { target.write_line(&src) } } } pub fn scroll(&mut self) { self.scroll_range(..); } fn scroll_inclusive_range(&mut self, range: core::ops::RangeInclusive<usize>) { let (start_i, mut end_i) = range.into_inner(); if end_i < start_i { return; } if start_i >= VGA_TEXT_HEIGHT { return; } if end_i >= VGA_TEXT_HEIGHT { end_i = VGA_TEXT_HEIGHT - 1; } let (mut iter, copy_count) = if start_i == 0 { (self.lines_mut().skip(0), end_i) } else { (self.lines_mut().skip(start_i - 1), end_i - start_i + 1) }; let mut target = iter.next().unwrap(); let iter = iter.take(copy_count); for src in iter { target.write_line(&src); target = src; } } pub fn scroll_range<R: core::ops::RangeBounds<usize>>(&mut self, range: R) { use core::ops::Bound; let start_i = match range.start_bound() { Bound::Included(&i) => i, Bound::Excluded(&i) if i == usize::max_value() => return, Bound::Excluded(&i) => i + 1, Bound::Unbounded => 0, }; let end_i = match range.end_bound() { Bound::Included(&i) => i, Bound::Excluded(&i) if i == 0 => return, Bound::Excluded(&i) => i - 1, Bound::Unbounded => VGA_TEXT_HEIGHT - 1, }; self.scroll_inclusive_range(start_i..=end_i); } pub fn cursor_visibility(&mut self) -> bool { !self.registers.crt_controller().cr0a().read().text_cursor_off().bit() } pub fn set_cursor_visibility(&mut self, visible: bool) { self.registers.crt_controller().cr0a().modify(|_, w| w.text_cursor_off().bit(!visible)); } pub fn cursor_height(&mut self) -> (u8, u8) { let start = self.registers.crt_controller().cr0a().read().text_cursor_start().bits(); let end = self.registers.crt_controller().cr0b().read().text_cursor_end().bits(); (start, end) } pub fn set_cursor_height(&mut self, start: u8, end: u8) { assert!(start <= end, "error: start > end, start = {}, end = {}", start, end); self.registers.crt_controller().cr0a().modify(|_, w| w.text_cursor_start().bits(start)); self.registers.crt_controller().cr0b().modify(|_, w| w.text_cursor_end().bits(end)); } pub fn cursor_character_index(&mut self) -> Result<usize, IndexOutOfBounds> { let low_byte = self.registers.crt_controller().cr0f().read().text_cursor_location_low_byte().bits(); let high_byte = self.registers.crt_controller().cr0e().read().text_cursor_location_high_byte().bits(); let location = ((high_byte as u16) << 8) | low_byte as u16; if (location as usize) < VGA_TEXT_CHAR_COUNT { Ok(location as usize) } else { Err(IndexOutOfBounds(location)) } } pub fn set_cursor_character_index(&mut self, character_i: usize) { if character_i >= VGA_TEXT_CHAR_COUNT { panic!("Max value for character_i is '{}'", VGA_TEXT_CHAR_COUNT); } let low_byte = character_i as u8; let high_byte = (character_i >> 8) as u8; self.registers.crt_controller().cr0f().modify(|_, w| w.text_cursor_location_low_byte().bits(low_byte)); self.registers.crt_controller().cr0e().modify(|_, w| w.text_cursor_location_high_byte().bits(high_byte)); } } #[derive(Debug)] pub struct IndexOutOfBounds(pub u16); #[derive(Debug, Clone, Copy, PartialEq)] pub enum AttributeBit7 { Blink, Intensity, } pub struct VgaCharLine<'a> { raw_line: &'a [Volatile<u8>], } impl VgaCharLine<'_> { pub fn iter(&self) -> impl Iterator<Item=VgaCharRef<'_>> { VgaCharRef::raw_slice_to_ref_slice(self.raw_line) } } pub struct VgaCharLineMut<'a> { raw_line: &'a mut [Volatile<u8>], } impl VgaCharLineMut<'_> { pub fn iter_mut(&mut self) -> impl Iterator<Item=VgaCharRefMut<'_>> { VgaCharRefMut::raw_slice_to_ref_mut_slice(self.raw_line) } pub fn iter(&self) -> impl Iterator<Item=VgaCharRef<'_>> { VgaCharRef::raw_slice_to_ref_slice(self.raw_line) } pub fn write_line<'a, T: Into<VgaCharLine<'a>>>(&mut self, src: T) { for (src, mut target) in src.into().iter().zip(self.iter_mut()) { target.write(src.read()) } } pub fn clear_with(&mut self, value: VgaChar) { for mut target in self.iter_mut() { target.write(value) } } } impl <'a, 'b> From<&'b VgaCharLineMut<'a>> for VgaCharLine<'b> { fn from(value: &'b VgaCharLineMut<'a>) -> Self { Self { raw_line: value.raw_line } } } impl <'a> From<VgaCharLineMut<'a>> for VgaCharLine<'a> { fn from(value: VgaCharLineMut<'a>) -> Self { Self { raw_line: value.raw_line } } } pub struct VgaCharRef<'a> { character: &'a Volatile<u8>, attributes: &'a Volatile<u8>, } impl VgaCharRef<'_> { fn raw_slice_to_ref_slice(raw: &[Volatile<u8>]) -> impl Iterator<Item=VgaCharRef<'_>> { raw.chunks_exact(2).map(|data| { VgaCharRef { character: &data[0], attributes: &data[1], } }) } pub fn read(&self) -> VgaChar { VgaChar { character: self.character.read(), attributes: self.attributes.read(), } } } pub struct VgaCharRefMut<'a> { character: &'a mut Volatile<u8>, attributes: &'a mut Volatile<u8>, } impl VgaCharRefMut<'_> { fn raw_slice_to_ref_mut_slice(raw: &mut [Volatile<u8>]) -> impl Iterator<Item=VgaCharRefMut<'_>> { raw.chunks_exact_mut(2).map(|data| { let (character, attributes) = data.split_first_mut().unwrap(); VgaCharRefMut { character: character, attributes: &mut attributes[0], } }) } fn as_ref(&self) -> VgaCharRef<'_> { VgaCharRef { character: self.character, attributes: self.attributes, } } pub fn write(&mut self, value: VgaChar) { self.character.write(value.character); self.attributes.write(value.attributes); } pub fn read(&self) -> VgaChar { self.as_ref().read() } } #[derive(Debug, Clone, Copy, PartialEq)] pub struct VgaChar { pub character: u8, pub attributes: u8, } impl VgaChar { const FOREGROUND_COLOR_MASK: u8 = 0b0000_0111; const FOREGROUND_INTENSITY_MASK: u8 = 0b0000_1000; const BACKGROUND_COLOR_MASK: u8 = 0b0111_0000; const BACKGROUND_INTENSITY_OR_BLINK_MASK: u8 = 0b1000_0000; pub fn empty() -> Self { Self { character: 0, attributes: 0, } } pub fn new(c: char) -> Self { Self::empty().character(c) } pub fn character(mut self, c: char) -> Self { self.character = Char::map_char(c).to_byte(); self } pub fn foreground_color(mut self, color: Colour) -> Self { self.attributes &= !Self::FOREGROUND_COLOR_MASK; self.attributes |= color as u8; self } pub fn foreground_intensity(mut self, value: bool) -> Self { if value { self.attributes |= Self::FOREGROUND_INTENSITY_MASK; } else { self.attributes &= !Self::FOREGROUND_INTENSITY_MASK; } self } pub fn background_color(mut self, color: Colour) -> Self { self.attributes &= !Self::BACKGROUND_COLOR_MASK; let value = (color as u8) << 4; self.attributes |= value; self } fn bit_7(mut self, value: bool) -> Self { if value { self.attributes |= Self::BACKGROUND_INTENSITY_OR_BLINK_MASK; } else { self.attributes &= !Self::BACKGROUND_INTENSITY_OR_BLINK_MASK; } self } pub fn blink(self, value: bool) -> Self { self.bit_7(value) } pub fn background_intensity(self, value: bool) -> Self { self.bit_7(value) } }
use volatile::Volatile; use crate::io::{ VIDEO_RAM_START_ADDRESS, MemoryMappedIo, PortIo, }; use crate::raw::{ VgaRegisters, }; pub use vga_framebuffer::Colour; use vga_framebuffer::Char; pub const TEXT_BUFFER_FIST_BYTE_ADDRESS: usize = 0xB8000; pub const VIDEO_RAM_BYTES_BEFORE_TEXT: usize = TEXT_BUFFER_FIST_BYTE_ADDRESS - VIDEO_RAM_START_ADDRESS; pub const VGA_TEXT_WIDTH: usize = 80; pub const VGA_TEXT_HEIGHT: usize = 25; pub const VGA_TEXT_CHAR_COUNT: usize = VGA_TEXT_WIDTH * VGA_TEXT_HEIGHT; pub const TEXT_BUFFER_BYTE_COUNT: usize = 80 * 25 * 2; pub struct TextMode<T: PortIo, U: MemoryMappedIo> { registers: VgaRegisters<T>, ram: U, } impl <T: PortIo, U: MemoryMappedIo> TextMode<T, U> { pub fn new(io: T, ram: U) -> Self { Self { registers: VgaRegisters::new(io), ram, } } pub fn vga_text_ram(&self) -> &[Volatile<u8>] { let (_, text_buffer_and_other_ram) = self.ram.video_ram().split_at(VIDEO_RAM_BYTES_BEFORE_TEXT); let (text_buffer, _) = text_buffer_and_other_ram.split_at(TEXT_BUFFER_BYTE_COUNT); text_buffer } pub fn vga_text_ram_mut(&mut self) -> &mut [Volatile<u8>] { let (_, text_buffer_and_other_ram) = self.ram.video_ram_mut().split_at_mut(VIDEO_RAM_BYTES_BEFORE_TEXT); let (text_buffer, _) = text_buffer_and_other_ram.split_at_mut(TEXT_BUFFER_BYTE_COUNT); text_buffer } pub fn vga_chars(&self) -> impl Iterator<Item=VgaCharRef<'_>> { VgaCharRef::raw_slice_to_ref_slice(self.vga_text_ram()) } pub fn vga_chars_mut(&mut self) -> impl Iterator<Item=VgaCharRefMut<'_>> { VgaCharRefMut::raw_slice_to_ref_mut_slice(self.vga_text_ram_mut()) } pub fn attribute_bit_7(&mut self, setting: AttributeBit7) { let value = setting == AttributeBit7::Blink; self.registers.attribute_controller().ar10().modify(|_, w| w.enable_blinking_slash_select_background_intensity().bit(value)); } pub fn read_char(&self, character_i: usize)
rator<Item=VgaCharRefMut<'_>> { VgaCharRefMut::raw_slice_to_ref_mut_slice(self.raw_line) } pub fn iter(&self) -> impl Iterator<Item=VgaCharRef<'_>> { VgaCharRef::raw_slice_to_ref_slice(self.raw_line) } pub fn write_line<'a, T: Into<VgaCharLine<'a>>>(&mut self, src: T) { for (src, mut target) in src.into().iter().zip(self.iter_mut()) { target.write(src.read()) } } pub fn clear_with(&mut self, value: VgaChar) { for mut target in self.iter_mut() { target.write(value) } } } impl <'a, 'b> From<&'b VgaCharLineMut<'a>> for VgaCharLine<'b> { fn from(value: &'b VgaCharLineMut<'a>) -> Self { Self { raw_line: value.raw_line } } } impl <'a> From<VgaCharLineMut<'a>> for VgaCharLine<'a> { fn from(value: VgaCharLineMut<'a>) -> Self { Self { raw_line: value.raw_line } } } pub struct VgaCharRef<'a> { character: &'a Volatile<u8>, attributes: &'a Volatile<u8>, } impl VgaCharRef<'_> { fn raw_slice_to_ref_slice(raw: &[Volatile<u8>]) -> impl Iterator<Item=VgaCharRef<'_>> { raw.chunks_exact(2).map(|data| { VgaCharRef { character: &data[0], attributes: &data[1], } }) } pub fn read(&self) -> VgaChar { VgaChar { character: self.character.read(), attributes: self.attributes.read(), } } } pub struct VgaCharRefMut<'a> { character: &'a mut Volatile<u8>, attributes: &'a mut Volatile<u8>, } impl VgaCharRefMut<'_> { fn raw_slice_to_ref_mut_slice(raw: &mut [Volatile<u8>]) -> impl Iterator<Item=VgaCharRefMut<'_>> { raw.chunks_exact_mut(2).map(|data| { let (character, attributes) = data.split_first_mut().unwrap(); VgaCharRefMut { character: character, attributes: &mut attributes[0], } }) } fn as_ref(&self) -> VgaCharRef<'_> { VgaCharRef { character: self.character, attributes: self.attributes, } } pub fn write(&mut self, value: VgaChar) { self.character.write(value.character); self.attributes.write(value.attributes); } pub fn read(&self) -> VgaChar { self.as_ref().read() } } #[derive(Debug, Clone, Copy, PartialEq)] pub struct VgaChar { pub character: u8, pub attributes: u8, } impl VgaChar { const FOREGROUND_COLOR_MASK: u8 = 0b0000_0111; const FOREGROUND_INTENSITY_MASK: u8 = 0b0000_1000; const BACKGROUND_COLOR_MASK: u8 = 0b0111_0000; const BACKGROUND_INTENSITY_OR_BLINK_MASK: u8 = 0b1000_0000; pub fn empty() -> Self { Self { character: 0, attributes: 0, } } pub fn new(c: char) -> Self { Self::empty().character(c) } pub fn character(mut self, c: char) -> Self { self.character = Char::map_char(c).to_byte(); self } pub fn foreground_color(mut self, color: Colour) -> Self { self.attributes &= !Self::FOREGROUND_COLOR_MASK; self.attributes |= color as u8; self } pub fn foreground_intensity(mut self, value: bool) -> Self { if value { self.attributes |= Self::FOREGROUND_INTENSITY_MASK; } else { self.attributes &= !Self::FOREGROUND_INTENSITY_MASK; } self } pub fn background_color(mut self, color: Colour) -> Self { self.attributes &= !Self::BACKGROUND_COLOR_MASK; let value = (color as u8) << 4; self.attributes |= value; self } fn bit_7(mut self, value: bool) -> Self { if value { self.attributes |= Self::BACKGROUND_INTENSITY_OR_BLINK_MASK; } else { self.attributes &= !Self::BACKGROUND_INTENSITY_OR_BLINK_MASK; } self } pub fn blink(self, value: bool) -> Self { self.bit_7(value) } pub fn background_intensity(self, value: bool) -> Self { self.bit_7(value) } }
-> VgaChar { self.vga_chars().nth(character_i).unwrap().read() } pub fn write_char(&mut self, character_i: usize, vga_char: VgaChar) { self.vga_chars_mut().nth(character_i).unwrap().write(vga_char) } pub fn clear_screen(&mut self, value: VgaChar) { for mut vga_char in self.vga_chars_mut() { vga_char.write(value) } } pub fn lines(&self) -> impl Iterator<Item=VgaCharLine<'_>> { self.vga_text_ram().chunks_exact(VGA_TEXT_WIDTH * 2).map(|raw_line| { VgaCharLine { raw_line } }) } pub fn lines_mut(&mut self) -> impl Iterator<Item=VgaCharLineMut<'_>> { self.vga_text_ram_mut().chunks_exact_mut(VGA_TEXT_WIDTH * 2).map(|raw_line| { VgaCharLineMut { raw_line } }) } pub fn copy_line_to(&mut self, src_i: usize, target_i: usize) { if src_i == target_i { return; } let src_and_target = self.lines_mut().enumerate().fold((None, None), |(src, target), (i, line)| { if i == src_i { return (Some(line), target) } if i == target_i { return (src, Some(line)) } (src, target) }); match src_and_target { (None, _) => panic!("source index '{}' is out of bounds", src_i), (_, None) => panic!("target index '{}' is out of bounds", target_i), (Some(src), Some(mut target)) => { target.write_line(&src) } } } pub fn scroll(&mut self) { self.scroll_range(..); } fn scroll_inclusive_range(&mut self, range: core::ops::RangeInclusive<usize>) { let (start_i, mut end_i) = range.into_inner(); if end_i < start_i { return; } if start_i >= VGA_TEXT_HEIGHT { return; } if end_i >= VGA_TEXT_HEIGHT { end_i = VGA_TEXT_HEIGHT - 1; } let (mut iter, copy_count) = if start_i == 0 { (self.lines_mut().skip(0), end_i) } else { (self.lines_mut().skip(start_i - 1), end_i - start_i + 1) }; let mut target = iter.next().unwrap(); let iter = iter.take(copy_count); for src in iter { target.write_line(&src); target = src; } } pub fn scroll_range<R: core::ops::RangeBounds<usize>>(&mut self, range: R) { use core::ops::Bound; let start_i = match range.start_bound() { Bound::Included(&i) => i, Bound::Excluded(&i) if i == usize::max_value() => return, Bound::Excluded(&i) => i + 1, Bound::Unbounded => 0, }; let end_i = match range.end_bound() { Bound::Included(&i) => i, Bound::Excluded(&i) if i == 0 => return, Bound::Excluded(&i) => i - 1, Bound::Unbounded => VGA_TEXT_HEIGHT - 1, }; self.scroll_inclusive_range(start_i..=end_i); } pub fn cursor_visibility(&mut self) -> bool { !self.registers.crt_controller().cr0a().read().text_cursor_off().bit() } pub fn set_cursor_visibility(&mut self, visible: bool) { self.registers.crt_controller().cr0a().modify(|_, w| w.text_cursor_off().bit(!visible)); } pub fn cursor_height(&mut self) -> (u8, u8) { let start = self.registers.crt_controller().cr0a().read().text_cursor_start().bits(); let end = self.registers.crt_controller().cr0b().read().text_cursor_end().bits(); (start, end) } pub fn set_cursor_height(&mut self, start: u8, end: u8) { assert!(start <= end, "error: start > end, start = {}, end = {}", start, end); self.registers.crt_controller().cr0a().modify(|_, w| w.text_cursor_start().bits(start)); self.registers.crt_controller().cr0b().modify(|_, w| w.text_cursor_end().bits(end)); } pub fn cursor_character_index(&mut self) -> Result<usize, IndexOutOfBounds> { let low_byte = self.registers.crt_controller().cr0f().read().text_cursor_location_low_byte().bits(); let high_byte = self.registers.crt_controller().cr0e().read().text_cursor_location_high_byte().bits(); let location = ((high_byte as u16) << 8) | low_byte as u16; if (location as usize) < VGA_TEXT_CHAR_COUNT { Ok(location as usize) } else { Err(IndexOutOfBounds(location)) } } pub fn set_cursor_character_index(&mut self, character_i: usize) { if character_i >= VGA_TEXT_CHAR_COUNT { panic!("Max value for character_i is '{}'", VGA_TEXT_CHAR_COUNT); } let low_byte = character_i as u8; let high_byte = (character_i >> 8) as u8; self.registers.crt_controller().cr0f().modify(|_, w| w.text_cursor_location_low_byte().bits(low_byte)); self.registers.crt_controller().cr0e().modify(|_, w| w.text_cursor_location_high_byte().bits(high_byte)); } } #[derive(Debug)] pub struct IndexOutOfBounds(pub u16); #[derive(Debug, Clone, Copy, PartialEq)] pub enum AttributeBit7 { Blink, Intensity, } pub struct VgaCharLine<'a> { raw_line: &'a [Volatile<u8>], } impl VgaCharLine<'_> { pub fn iter(&self) -> impl Iterator<Item=VgaCharRef<'_>> { VgaCharRef::raw_slice_to_ref_slice(self.raw_line) } } pub struct VgaCharLineMut<'a> { raw_line: &'a mut [Volatile<u8>], } impl VgaCharLineMut<'_> { pub fn iter_mut(&mut self) -> impl Ite
random
[ { "content": " pub trait RegisterRelIoW<T: RegisterGroup, U: Sized> {\n\n fn write(&mut self, rel_address: u16, value: U);\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 0, "score": 79613.21741051001 }, { "content": " pub trait RegisterAbsIoW<T: RegisterGroup, U: Sized> {\n\n fn write(&mut self, abs_address: u16, value: U);\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 1, "score": 79613.21741051004 }, { "content": " pub trait RegisterIndexIoW<T: RegisterGroup, U: Sized> {\n\n fn write(&mut self, index: u8, value: U);\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 2, "score": 79613.21741051003 }, { "content": " pub trait RegisterRelIoR<T: RegisterGroup, U: Sized> {\n\n fn read(&mut self, rel_address: u16) -> U;\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 3, "score": 62718.52242516009 }, { "content": " pub trait RegisterAbsIoR<T: RegisterGroup, U: Sized> {\n\n fn read(&mut self, abs_address: u16) -> U;\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 4, "score": 62718.52242516009 }, { "content": " pub trait RegisterIndexIoR<T: RegisterGroup, U: Sized> {\n\n fn read(&mut self, index: u8) -> U;\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 5, "score": 62718.52242516009 }, { "content": " pub trait RegisterGroup {}\n", "file_path": "src/raw/generated.rs", "rank": 6, "score": 51414.90857577377 }, { "content": " pub trait LocationRelW {\n\n const REL_ADDRESS_W: u16;\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 7, "score": 48414.48018388995 }, { "content": " pub trait LocationAbsW {\n\n const ABS_ADDRESS_W: u16;\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 8, "score": 48414.48018388995 }, { "content": " pub trait LocationIndexW {\n\n const INDEX_W: u8;\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 9, "score": 48414.48018388995 }, { "content": "pub trait PortIo {\n\n fn read(&mut self, port: u16) -> u8;\n\n fn write(&mut self, port: u16, data: u8);\n\n}\n\n\n\npub const VIDEO_RAM_START_ADDRESS: usize = 0xA0000;\n\npub const VIDEO_RAM_AREA_SIZE_IN_BYTES: usize = 128*1024;\n\n\n", "file_path": "src/io.rs", "rank": 10, "score": 30812.768646944656 }, { "content": "/// Access to VGA memory mapped video RAM.\n\n///\n\n/// Methods of this trait must return\n\n/// 128 KiB memory mapped video RAM area as a slice.\n\npub trait MemoryMappedIo {\n\n fn video_ram(&self) -> &[Volatile<u8>];\n\n fn video_ram_mut(&mut self) -> &mut [Volatile<u8>];\n\n}\n\n\n\npub struct StandardVideoRamLocation {\n\n ram: &'static mut [Volatile<u8>],\n\n}\n\n\n\nimpl MemoryMappedIo for StandardVideoRamLocation {\n\n fn video_ram(&self) -> &[Volatile<u8>] {\n\n self.ram\n\n }\n\n\n\n fn video_ram_mut(&mut self) -> &mut [Volatile<u8>] {\n\n self.ram\n\n }\n\n}\n\n\n\n\n", "file_path": "src/io.rs", "rank": 11, "score": 29107.58910570326 }, { "content": " pub trait InGroup {\n\n type Group: RegisterGroup;\n\n }\n\n}\n\npub mod general {\n\n use super::register_trait::*;\n\n pub struct GeneralRegisters<\n\n T: RegisterAbsIoR<GeneralGroup, u8>\n\n + RegisterAbsIoW<GeneralGroup, u8>\n\n + RegisterRelIoR<GeneralGroup, u8>\n\n + RegisterRelIoW<GeneralGroup, u8>,\n\n > {\n\n io: T,\n\n }\n\n impl<\n\n T: RegisterAbsIoR<GeneralGroup, u8>\n\n + RegisterAbsIoW<GeneralGroup, u8>\n\n + RegisterRelIoR<GeneralGroup, u8>\n\n + RegisterRelIoW<GeneralGroup, u8>,\n\n > GeneralRegisters<T>\n", "file_path": "src/raw/generated.rs", "rank": 12, "score": 28532.954592916547 }, { "content": " pub trait LocationRelR {\n\n const REL_ADDRESS_R: u16;\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 13, "score": 25321.549279789306 }, { "content": " pub trait LocationIndexR {\n\n const INDEX_R: u8;\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 14, "score": 25321.549279789306 }, { "content": " pub trait LocationAbsR {\n\n const ABS_ADDRESS_R: u16;\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 15, "score": 25321.549279789306 }, { "content": " }\n\n }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n\n impl core::fmt::Debug for R {\n\n fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {\n\n fmt.debug_struct(\"CR13\")\n", "file_path": "src/raw/generated.rs", "rank": 16, "score": 28.91765953586777 }, { "content": " pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Overflow Register\"]\n\n pub struct CR07<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > {\n\n io: &'a mut T,\n\n }\n\n pub mod cr07 {\n\n use super::super::register_trait::*;\n\n use super::CrtControllerGroup;\n\n impl<\n", "file_path": "src/raw/generated.rs", "rank": 17, "score": 28.84546283406913 }, { "content": " }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n\n impl core::fmt::Debug for R {\n\n fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {\n\n fmt.debug_struct(\"CR0F\")\n\n .field(\n", "file_path": "src/raw/generated.rs", "rank": 18, "score": 28.751513161111244 }, { "content": " pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Vertical Display Enable End Register\"]\n\n pub struct CR12<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > {\n\n io: &'a mut T,\n\n }\n\n pub mod cr12 {\n\n use super::super::register_trait::*;\n\n use super::CrtControllerGroup;\n\n impl<\n", "file_path": "src/raw/generated.rs", "rank": 19, "score": 28.468410550649814 }, { "content": " pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Palette Write Index Register\"]\n\n pub struct DACWX<'a, T: RegisterAbsIoW<ColorPaletteGroup, u8>> {\n\n io: &'a mut T,\n\n }\n\n pub mod dacwx {\n\n use super::super::register_trait::*;\n\n use super::ColorPaletteGroup;\n\n impl<'a, T: RegisterAbsIoW<ColorPaletteGroup, u8>> LocationAbsW for super::DACWX<'a, T> {\n\n const ABS_ADDRESS_W: u16 = 968;\n\n }\n\n impl<'a, T: RegisterAbsIoW<ColorPaletteGroup, u8>> InGroup for super::DACWX<'a, T> {\n", "file_path": "src/raw/generated.rs", "rank": 20, "score": 28.426637557375695 }, { "content": " #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n\n impl core::fmt::Debug for R {\n\n fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {\n\n fmt.debug_struct(\"CR02\")\n\n .field(\n\n \"horizontal_blanking_start\",\n", "file_path": "src/raw/generated.rs", "rank": 21, "score": 28.265944495647332 }, { "content": " type Group = ColorPaletteGroup;\n\n }\n\n impl<'a, T: RegisterAbsIoW<ColorPaletteGroup, u8>> super::DACWX<'a, T> {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::ABS_ADDRESS_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value read from the register\"]\n\n pub struct W {\n\n raw_bits: u8,\n", "file_path": "src/raw/generated.rs", "rank": 22, "score": 27.653992310796088 }, { "content": " impl W {\n\n #[doc = \"Bits 3:0\"]\n\n #[inline]\n\n pub fn enable_set_slash_reset_plane(&mut self) -> _ENABLE_SET_SLASH_RESET_PLANE<'_> {\n\n _ENABLE_SET_SLASH_RESET_PLANE { w: self }\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _ENABLE_SET_SLASH_RESET_PLANE<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _ENABLE_SET_SLASH_RESET_PLANE<'a> {\n\n const _MASK: u8 = 15;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n", "file_path": "src/raw/generated.rs", "rank": 23, "score": 27.38950075913122 }, { "content": " #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n\n impl core::fmt::Debug for R {\n\n fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {\n\n fmt.debug_struct(\"CR04\")\n\n .field(\n\n \"horizontal_sync_start\",\n\n &self.horizontal_sync_start().bits(),\n", "file_path": "src/raw/generated.rs", "rank": 24, "score": 27.02754187388275 }, { "content": " F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n\n impl core::fmt::Debug for R {\n\n fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {\n\n fmt.debug_struct(\"CR06\")\n\n .field(\n\n \"vertical_total_bits_from_0_to_7\",\n\n &self.vertical_total_bits_from_0_to_7().bits(),\n\n )\n\n .finish()\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 25, "score": 26.97824581398079 }, { "content": " pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n\n impl core::fmt::Debug for R {\n\n fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {\n\n fmt.debug_struct(\"CR15\")\n\n .field(\n\n \"vertical_blanking_start_bits_from_0_to_7\",\n\n &self.vertical_blanking_start_bits_from_0_to_7().bits(),\n\n )\n", "file_path": "src/raw/generated.rs", "rank": 26, "score": 26.87339154538784 }, { "content": "impl StandardVideoRamLocation {\n\n /// Create handle to VGA video RAM located at address `0xA0000`.\n\n pub unsafe fn new() -> Self {\n\n let start = VIDEO_RAM_START_ADDRESS as *mut Volatile<u8>;\n\n\n\n Self {\n\n ram: slice::from_raw_parts_mut(start, VIDEO_RAM_AREA_SIZE_IN_BYTES),\n\n }\n\n }\n\n}\n\n\n\n\n\nuse crate::raw::generated::{\n\n register_trait::{\n\n RegisterAbsIoR,\n\n RegisterAbsIoW,\n\n RegisterRelIoR,\n\n RegisterRelIoW,\n\n RegisterIndexIoR,\n\n RegisterIndexIoW,\n", "file_path": "src/io.rs", "rank": 27, "score": 26.847280965572487 }, { "content": " #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n\n impl core::fmt::Debug for R {\n\n fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {\n\n fmt.debug_struct(\"CR0D\")\n\n .field(\n\n \"start_address_low_byte\",\n\n &self.start_address_low_byte().bits(),\n", "file_path": "src/raw/generated.rs", "rank": 28, "score": 26.721242783509204 }, { "content": " pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n\n }\n\n self.w\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _DOT_CLOCK<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _DOT_CLOCK<'a> {\n\n const _MASK: u8 = 8;\n\n const _OFFSET: u8 = 3;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n", "file_path": "src/raw/generated.rs", "rank": 29, "score": 26.663445434936374 }, { "content": " }\n\n impl<'a> _GRAPHICS_CONTROLLER_REGISTER_INDEX<'a> {\n\n const _MASK: u8 = 15;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Set/Reset Register\"]\n\n pub struct GR00<\n\n 'a,\n\n T: RegisterIndexIoR<GraphicsControllerGroup, u8>\n\n + RegisterIndexIoW<GraphicsControllerGroup, u8>,\n", "file_path": "src/raw/generated.rs", "rank": 31, "score": 26.538383426793857 }, { "content": " let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Data Rotate Register\"]\n\n pub struct GR03<\n\n 'a,\n\n T: RegisterIndexIoR<GraphicsControllerGroup, u8>\n\n + RegisterIndexIoW<GraphicsControllerGroup, u8>,\n\n > {\n\n io: &'a mut T,\n\n }\n\n pub mod gr03 {\n\n use super::super::register_trait::*;\n\n use super::GraphicsControllerGroup;\n\n impl<\n\n 'a,\n", "file_path": "src/raw/generated.rs", "rank": 32, "score": 26.525231877779657 }, { "content": " raw_bits: self.io.read(Self::INDEX_R),\n\n }\n\n }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n\n impl core::fmt::Debug for R {\n\n fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {\n", "file_path": "src/raw/generated.rs", "rank": 33, "score": 26.501646435370663 }, { "content": " let value = value >> Self::_OFFSET;\n\n SET_SLASH_RESET_PLANE_R::_Reserved(value)\n\n }\n\n #[doc = \"Value of the field as raw bits\"]\n\n #[inline]\n\n pub fn bits(&self) -> u8 {\n\n match *self {\n\n SET_SLASH_RESET_PLANE_R::_Reserved(value) => value,\n\n }\n\n }\n\n }\n\n #[doc = \"Value read from the register\"]\n\n pub struct W {\n\n raw_bits: u8,\n\n }\n\n impl W {\n\n #[doc = \"Bits 3:0\"]\n\n #[inline]\n\n pub fn set_slash_reset_plane(&mut self) -> _SET_SLASH_RESET_PLANE<'_> {\n\n _SET_SLASH_RESET_PLANE { w: self }\n", "file_path": "src/raw/generated.rs", "rank": 34, "score": 26.471182734372306 }, { "content": " g: PALETTE_DATA_R::from_register_value(0),\n\n b: PALETTE_DATA_R::from_register_value(0),\n\n }\n\n }\n\n}\n\n\n\nimpl PaletteColor {\n\n pub fn new(r: u8, g: u8, b: u8) -> Self {\n\n let mut value = Self::default();\n\n value.set_r(r);\n\n value.set_g(g);\n\n value.set_b(b);\n\n value\n\n }\n\n\n\n /// A 6-bit value.\n\n pub fn r(&self) -> u8 {\n\n self.r.bits()\n\n }\n\n\n", "file_path": "src/raw.rs", "rank": 35, "score": 26.414735849130192 }, { "content": " }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _SET_SLASH_RESET_PLANE<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _SET_SLASH_RESET_PLANE<'a> {\n\n const _MASK: u8 = 15;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 36, "score": 26.333698651534803 }, { "content": " #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Underline Location Register\"]\n\n pub struct CR14<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > {\n\n io: &'a mut T,\n\n }\n\n pub mod cr14 {\n\n use super::super::register_trait::*;\n\n use super::CrtControllerGroup;\n", "file_path": "src/raw/generated.rs", "rank": 37, "score": 26.320064860155565 }, { "content": " }\n\n impl GRAPHICS_SLASH_TEXT_MODE_W {\n\n const _MASK: u8 = 1;\n\n const _OFFSET: u8 = 0;\n\n #[inline]\n\n pub fn to_register_value(&self) -> u8 {\n\n let value = *self as u8;\n\n let value = value << Self::_OFFSET;\n\n value\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _GRAPHICS_SLASH_TEXT_MODE<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _GRAPHICS_SLASH_TEXT_MODE<'a> {\n\n const _MASK: u8 = 1;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n", "file_path": "src/raw/generated.rs", "rank": 38, "score": 26.242946242095577 }, { "content": " raw_bits: self.io.read(Self::ABS_ADDRESS_R),\n\n }\n\n }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::ABS_ADDRESS_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n\n impl core::fmt::Debug for R {\n\n fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {\n", "file_path": "src/raw/generated.rs", "rank": 39, "score": 26.22999252733595 }, { "content": " pub struct _PROTECT_REGISTERS_FROM_0_TO_7<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _PROTECT_REGISTERS_FROM_0_TO_7<'a> {\n\n const _MASK: u8 = 128;\n\n const _OFFSET: u8 = 7;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = \"Clears the field bit\"]\n\n #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n", "file_path": "src/raw/generated.rs", "rank": 40, "score": 26.223400366770896 }, { "content": " #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Horizontal Sync End Register\"]\n\n pub struct CR05<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > {\n\n io: &'a mut T,\n\n }\n\n pub mod cr05 {\n\n use super::super::register_trait::*;\n\n use super::CrtControllerGroup;\n", "file_path": "src/raw/generated.rs", "rank": 41, "score": 26.212109711892875 }, { "content": " #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Start Address High Register\"]\n\n pub struct CR0C<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > {\n\n io: &'a mut T,\n\n }\n\n pub mod cr0c {\n\n use super::super::register_trait::*;\n\n use super::CrtControllerGroup;\n", "file_path": "src/raw/generated.rs", "rank": 42, "score": 26.212109711892875 }, { "content": " #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Text Cursor End Register\"]\n\n pub struct CR0B<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > {\n\n io: &'a mut T,\n\n }\n\n pub mod cr0b {\n\n use super::super::register_trait::*;\n\n use super::CrtControllerGroup;\n", "file_path": "src/raw/generated.rs", "rank": 43, "score": 26.212109711892875 }, { "content": " #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Text Cursor Location High Register\"]\n\n pub struct CR0E<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > {\n\n io: &'a mut T,\n\n }\n\n pub mod cr0e {\n\n use super::super::register_trait::*;\n\n use super::CrtControllerGroup;\n", "file_path": "src/raw/generated.rs", "rank": 44, "score": 26.105094911532312 }, { "content": " raw_bits: self.io.read(Self::ABS_ADDRESS_R),\n\n }\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n\n impl core::fmt::Debug for R {\n\n fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {\n\n fmt.debug_struct(\"FCR\").finish()\n\n }\n\n }\n\n impl R {}\n\n #[doc = \"Value read from the register\"]\n\n pub struct W {\n\n raw_bits: u8,\n\n }\n\n impl W {}\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 45, "score": 26.095246082509963 }, { "content": " pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n\n }\n\n self.w\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _SELECT_ROW_SCAN_COUNTER<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _SELECT_ROW_SCAN_COUNTER<'a> {\n\n const _MASK: u8 = 2;\n\n const _OFFSET: u8 = 1;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n", "file_path": "src/raw/generated.rs", "rank": 46, "score": 26.087656001080006 }, { "content": " }\n\n impl<'a> _BIT_MASK<'a> {\n\n const _MASK: u8 = 255;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n}\n\npub mod attribute_controller {\n\n use super::register_trait::*;\n\n pub struct AttributeControllerRegisters<\n\n T: RegisterAbsIoR<AttributeControllerGroup, u8>\n", "file_path": "src/raw/generated.rs", "rank": 47, "score": 26.074579990873293 }, { "content": " #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"DAC State Register\"]\n\n pub struct DACSTATE<'a, T: RegisterAbsIoR<ColorPaletteGroup, u8>> {\n\n io: &'a mut T,\n\n }\n\n pub mod dacstate {\n\n use super::super::register_trait::*;\n\n use super::ColorPaletteGroup;\n\n impl<'a, T: RegisterAbsIoR<ColorPaletteGroup, u8>> LocationAbsR for super::DACSTATE<'a, T> {\n\n const ABS_ADDRESS_R: u16 = 967;\n", "file_path": "src/raw/generated.rs", "rank": 48, "score": 25.838507693939437 }, { "content": " #[doc = \"Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&mut self) -> R {\n\n R {\n\n raw_bits: self.io.read(Self::INDEX_R),\n\n }\n\n }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n", "file_path": "src/raw/generated.rs", "rank": 49, "score": 25.8254697748209 }, { "content": " let value = *self as u8;\n\n let value = value << Self::_OFFSET;\n\n value\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _GRAPHICS_SLASH_ALPHANUMERIC_MODE<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _GRAPHICS_SLASH_ALPHANUMERIC_MODE<'a> {\n\n const _MASK: u8 = 1;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = \"Clears the field bit\"]\n\n #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n", "file_path": "src/raw/generated.rs", "rank": 50, "score": 25.73185713447918 }, { "content": " }\n\n #[doc = \"Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&mut self) -> R {\n\n R {\n\n raw_bits: self.io.read(Self::INDEX_R),\n\n }\n\n }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n", "file_path": "src/raw/generated.rs", "rank": 51, "score": 25.72849085358878 }, { "content": " }\n\n #[doc = \"Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&mut self) -> R {\n\n R {\n\n raw_bits: self.io.read(Self::INDEX_R),\n\n }\n\n }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n", "file_path": "src/raw/generated.rs", "rank": 52, "score": 25.728490853588784 }, { "content": " }\n\n self.w\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _ATTRIBUTE_CONTROLLER_REGISTER_INDEX<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _ATTRIBUTE_CONTROLLER_REGISTER_INDEX<'a> {\n\n const _MASK: u8 = 31;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 53, "score": 25.660888722153736 }, { "content": " #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n\n }\n\n self.w\n\n }\n\n }\n\n }\n\n}\n\npub mod color_palette {\n\n use super::register_trait::*;\n\n pub struct ColorPaletteRegisters<\n", "file_path": "src/raw/generated.rs", "rank": 54, "score": 25.53755628855636 }, { "content": " #[doc = \"Bits 1:0\"]\n\n #[inline]\n\n pub fn write_mode(&mut self) -> _WRITE_MODE<'_> {\n\n _WRITE_MODE { w: self }\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _SHIFT_REGISTER_CONTROL<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _SHIFT_REGISTER_CONTROL<'a> {\n\n const _MASK: u8 = 96;\n\n const _OFFSET: u8 = 5;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n", "file_path": "src/raw/generated.rs", "rank": 55, "score": 25.521035758289997 }, { "content": " pub fn read(&mut self) -> R {\n\n R {\n\n raw_bits: self.io.read(Self::INDEX_R),\n\n }\n\n }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 56, "score": 25.48326757490414 }, { "content": " pub fn read(&mut self) -> R {\n\n R {\n\n raw_bits: self.io.read(Self::INDEX_R),\n\n }\n\n }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 57, "score": 25.483267574904144 }, { "content": " if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n\n }\n\n self.w\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _MAP_0_ENABLE<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _MAP_0_ENABLE<'a> {\n\n const _MASK: u8 = 1;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 58, "score": 25.43641374145728 }, { "content": " if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n\n }\n\n self.w\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _ENABLE_PLANE_0<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _ENABLE_PLANE_0<'a> {\n\n const _MASK: u8 = 1;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 59, "score": 25.43641374145728 }, { "content": " impl<'a> _VIDEO_ENABLE<'a> {\n\n const _MASK: u8 = 32;\n\n const _OFFSET: u8 = 5;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = \"Clears the field bit\"]\n\n #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n", "file_path": "src/raw/generated.rs", "rank": 60, "score": 25.271456785749603 }, { "content": " pub fn read(&mut self) -> R {\n\n R {\n\n raw_bits: self.io.read(Self::ABS_ADDRESS_R),\n\n }\n\n }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::ABS_ADDRESS_W, w.raw_bits);\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 61, "score": 25.227888335074766 }, { "content": " #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Horizontal Blanking End Register\"]\n\n pub struct CR03<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > {\n\n io: &'a mut T,\n\n }\n\n pub mod cr03 {\n\n use super::super::register_trait::*;\n", "file_path": "src/raw/generated.rs", "rank": 62, "score": 25.190660030536762 }, { "content": " w: &'a mut W,\n\n }\n\n impl<'a> _SYNCHRONOUS_RESET<'a> {\n\n const _MASK: u8 = 2;\n\n const _OFFSET: u8 = 1;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = \"Clears the field bit\"]\n\n #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n", "file_path": "src/raw/generated.rs", "rank": 63, "score": 25.170806106585847 }, { "content": " impl<'a> _VERTICAL_TOTAL_BIT_9<'a> {\n\n const _MASK: u8 = 32;\n\n const _OFFSET: u8 = 5;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = \"Clears the field bit\"]\n\n #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n", "file_path": "src/raw/generated.rs", "rank": 64, "score": 25.140611335090817 }, { "content": " impl<'a> _VERTICAL_TOTAL_BIT_8<'a> {\n\n const _MASK: u8 = 1;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = \"Clears the field bit\"]\n\n #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n", "file_path": "src/raw/generated.rs", "rank": 65, "score": 25.14061133509082 }, { "content": " #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Horizontal Display Enable End Register\"]\n\n pub struct CR01<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > {\n\n io: &'a mut T,\n\n }\n\n pub mod cr01 {\n\n use super::super::register_trait::*;\n", "file_path": "src/raw/generated.rs", "rank": 66, "score": 25.082418676838742 }, { "content": " w: &'a mut W,\n\n }\n\n impl<'a> _IGNORE_COLOR_PLANE_2<'a> {\n\n const _MASK: u8 = 4;\n\n const _OFFSET: u8 = 2;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = \"Clears the field bit\"]\n\n #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n", "file_path": "src/raw/generated.rs", "rank": 67, "score": 25.040281650295995 }, { "content": " pub struct _WRITE_MODE<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _WRITE_MODE<'a> {\n\n const _MASK: u8 = 3;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n #[doc = \"Writes `variant` to the field\"]\n\n #[inline]\n\n pub fn variant(self, variant: WRITE_MODE_W) -> &'a mut W {\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= variant.to_register_value();\n", "file_path": "src/raw/generated.rs", "rank": 68, "score": 25.03220926482738 }, { "content": " const ABS_ADDRESS_W: u16 = 967;\n\n }\n\n impl<'a, T: RegisterAbsIoW<ColorPaletteGroup, u8>> InGroup for super::DACRX<'a, T> {\n\n type Group = ColorPaletteGroup;\n\n }\n\n impl<'a, T: RegisterAbsIoW<ColorPaletteGroup, u8>> super::DACRX<'a, T> {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Writes to the register\"]\n\n #[inline]\n\n pub fn write<F>(&mut self, f: F)\n\n where\n\n F: FnOnce(&mut W) -> &mut W,\n\n {\n\n let mut w = W { raw_bits: 0 };\n\n (f)(&mut w);\n\n self.io.write(Self::ABS_ADDRESS_W, w.raw_bits);\n\n }\n\n }\n", "file_path": "src/raw/generated.rs", "rank": 69, "score": 25.009781118615923 }, { "content": " for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n #[doc = \"Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&mut self) -> R {\n\n R {\n\n raw_bits: self.io.read(Self::INDEX_R),\n\n }\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n", "file_path": "src/raw/generated.rs", "rank": 70, "score": 24.983401975249773 }, { "content": " for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n #[doc = \"Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&mut self) -> R {\n\n R {\n\n raw_bits: self.io.read(Self::INDEX_R),\n\n }\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n", "file_path": "src/raw/generated.rs", "rank": 71, "score": 24.983401975249773 }, { "content": " impl<'a, T: RegisterAbsIoR<SequencerGroup, u8> + RegisterAbsIoW<SequencerGroup, u8>>\n\n super::SRX<'a, T>\n\n {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&mut self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n\n self.io.write(Self::ABS_ADDRESS_W, w.raw_bits);\n\n }\n\n #[doc = \"Reads the contents of the register\"]\n", "file_path": "src/raw/generated.rs", "rank": 72, "score": 24.84175730369165 }, { "content": " pub struct _HORIZONTAL_SYNC_END<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _HORIZONTAL_SYNC_END<'a> {\n\n const _MASK: u8 = 31;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Vertical Total Register\"]\n\n pub struct CR06<\n\n 'a,\n", "file_path": "src/raw/generated.rs", "rank": 73, "score": 24.83656879805451 }, { "content": " }\n\n impl<'a> _SHIFT_LOAD<'a> {\n\n const _MASK: u8 = 4;\n\n const _OFFSET: u8 = 2;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = \"Clears the field bit\"]\n\n #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n", "file_path": "src/raw/generated.rs", "rank": 74, "score": 24.825523221352803 }, { "content": " }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n\n }\n\n self.w\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _MAP_2_ENABLE<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _MAP_2_ENABLE<'a> {\n\n const _MASK: u8 = 4;\n\n const _OFFSET: u8 = 2;\n\n #[doc = \"Sets the field bit\"]\n", "file_path": "src/raw/generated.rs", "rank": 75, "score": 24.7852456927018 }, { "content": " }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n\n }\n\n self.w\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _ENABLE_PLANE_2<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _ENABLE_PLANE_2<'a> {\n\n const _MASK: u8 = 4;\n\n const _OFFSET: u8 = 2;\n\n #[doc = \"Sets the field bit\"]\n", "file_path": "src/raw/generated.rs", "rank": 76, "score": 24.7852456927018 }, { "content": " const _OFFSET: u8 = 5;\n\n #[inline]\n\n pub fn to_register_value(&self) -> u8 {\n\n let value = *self as u8;\n\n let value = value << Self::_OFFSET;\n\n value\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _TEXT_CURSOR_SKEW<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _TEXT_CURSOR_SKEW<'a> {\n\n const _MASK: u8 = 96;\n\n const _OFFSET: u8 = 5;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n", "file_path": "src/raw/generated.rs", "rank": 77, "score": 24.773037075119298 }, { "content": " }\n\n impl<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > super::CR14<'a, T>\n\n {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&mut self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n", "file_path": "src/raw/generated.rs", "rank": 78, "score": 24.76317017085764 }, { "content": " impl<\n\n 'a,\n\n T: RegisterIndexIoR<GraphicsControllerGroup, u8>\n\n + RegisterIndexIoW<GraphicsControllerGroup, u8>,\n\n > super::GR02<'a, T>\n\n {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&mut self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n", "file_path": "src/raw/generated.rs", "rank": 79, "score": 24.763170170857638 }, { "content": " }\n\n impl<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > super::CR0E<'a, T>\n\n {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&mut self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n", "file_path": "src/raw/generated.rs", "rank": 80, "score": 24.76317017085764 }, { "content": " }\n\n impl<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > super::CR0C<'a, T>\n\n {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&mut self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n", "file_path": "src/raw/generated.rs", "rank": 81, "score": 24.76317017085764 }, { "content": " }\n\n impl<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > super::CR0B<'a, T>\n\n {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&mut self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n", "file_path": "src/raw/generated.rs", "rank": 82, "score": 24.763170170857638 }, { "content": " }\n\n impl<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > super::CR05<'a, T>\n\n {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&mut self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n", "file_path": "src/raw/generated.rs", "rank": 83, "score": 24.76317017085764 }, { "content": " }\n\n #[doc = \"Proxy\"]\n\n pub struct _START_UNDERLINE<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _START_UNDERLINE<'a> {\n\n const _MASK: u8 = 31;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Vertical Blanking Start Register\"]\n", "file_path": "src/raw/generated.rs", "rank": 84, "score": 24.754112862182293 }, { "content": " }\n\n impl<'a, T: RegisterAbsIoR<GeneralGroup, u8> + RegisterAbsIoW<GeneralGroup, u8>> super::MSR<'a, T> {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&mut self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n\n self.io.write(Self::ABS_ADDRESS_W, w.raw_bits);\n\n }\n\n #[doc = \"Reads the contents of the register\"]\n\n #[inline]\n", "file_path": "src/raw/generated.rs", "rank": 85, "score": 24.739586126896477 }, { "content": " impl<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > super::CR12<'a, T>\n\n {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&mut self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n", "file_path": "src/raw/generated.rs", "rank": 86, "score": 24.736087392200922 }, { "content": " impl<\n\n 'a,\n\n T: RegisterIndexIoR<CrtControllerGroup, u8> + RegisterIndexIoW<CrtControllerGroup, u8>,\n\n > super::CR07<'a, T>\n\n {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&mut self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n\n (f)(&r, &mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n", "file_path": "src/raw/generated.rs", "rank": 87, "score": 24.73608739220092 }, { "content": " }\n\n impl<\n\n 'a,\n\n T: RegisterAbsIoR<GraphicsControllerGroup, u8>\n\n + RegisterAbsIoW<GraphicsControllerGroup, u8>,\n\n > super::GRX<'a, T>\n\n {\n\n pub fn new(io: &'a mut T) -> Self {\n\n Self { io }\n\n }\n\n #[doc = \"Modifies the contents of the register\"]\n\n #[inline]\n\n pub fn modify<F>(&mut self, f: F)\n\n where\n\n for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,\n\n {\n\n let r = self.read();\n\n let mut w = W {\n\n raw_bits: r.raw_bits,\n\n };\n", "file_path": "src/raw/generated.rs", "rank": 88, "score": 24.725762886515433 }, { "content": " }\n\n impl<'a> _COMPATIBILITY_MODE_SUPPORT<'a> {\n\n const _MASK: u8 = 1;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = \"Clears the field bit\"]\n\n #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n", "file_path": "src/raw/generated.rs", "rank": 89, "score": 24.686830248993292 }, { "content": " const _MASK: u8 = 12;\n\n const _OFFSET: u8 = 2;\n\n #[inline]\n\n pub fn to_register_value(&self) -> u8 {\n\n let value = *self as u8;\n\n let value = value << Self::_OFFSET;\n\n value\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _CLOCK_SELECT<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _CLOCK_SELECT<'a> {\n\n const _MASK: u8 = 12;\n\n const _OFFSET: u8 = 2;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n", "file_path": "src/raw/generated.rs", "rank": 90, "score": 24.65382565798039 }, { "content": " ZERO = 0,\n\n ONE = 1,\n\n TWO = 2,\n\n THREE = 3,\n\n }\n\n impl BYTE_PANNING_W {\n\n const _MASK: u8 = 96;\n\n const _OFFSET: u8 = 5;\n\n #[inline]\n\n pub fn to_register_value(&self) -> u8 {\n\n let value = *self as u8;\n\n let value = value << Self::_OFFSET;\n\n value\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _BYTE_PANNING<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _BYTE_PANNING<'a> {\n", "file_path": "src/raw/generated.rs", "rank": 91, "score": 24.642784959994305 }, { "content": " #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n\n }\n\n self.w\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _PALETTE_BIT_6<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _PALETTE_BIT_6<'a> {\n\n const _MASK: u8 = 4;\n\n const _OFFSET: u8 = 2;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n", "file_path": "src/raw/generated.rs", "rank": 92, "score": 24.619118664732124 }, { "content": " #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n\n self.w.raw_bits &= !Self::_MASK;\n\n }\n\n self.w\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _READ_MODE<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _READ_MODE<'a> {\n\n const _MASK: u8 = 8;\n\n const _OFFSET: u8 = 3;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n", "file_path": "src/raw/generated.rs", "rank": 93, "score": 24.619118664732124 }, { "content": " w: &'a mut W,\n\n }\n\n impl<'a> _DISPLAY_ENABLE_SKEW_CONTROL<'a> {\n\n const _MASK: u8 = 96;\n\n const _OFFSET: u8 = 5;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n #[doc = \"Writes `variant` to the field\"]\n\n #[inline]\n\n pub fn variant(self, variant: DISPLAY_ENABLE_SKEW_CONTROL_W) -> &'a mut W {\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= variant.to_register_value();\n\n self.w\n", "file_path": "src/raw/generated.rs", "rank": 94, "score": 24.554875738466635 }, { "content": " }\n\n impl<'a> _WORD_MODE_OR_BYTE_MODE<'a> {\n\n const _MASK: u8 = 64;\n\n const _OFFSET: u8 = 6;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = \"Clears the field bit\"]\n\n #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n", "file_path": "src/raw/generated.rs", "rank": 95, "score": 24.54979832956446 }, { "content": " }\n\n impl<'a> _ODD_SLASH_EVEN_MODE<'a> {\n\n const _MASK: u8 = 4;\n\n const _OFFSET: u8 = 2;\n\n #[doc = \"Sets the field bit\"]\n\n #[inline]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = \"Clears the field bit\"]\n\n #[inline]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n if value {\n\n self.w.raw_bits |= Self::_MASK;\n\n } else {\n", "file_path": "src/raw/generated.rs", "rank": 96, "score": 24.54979832956446 }, { "content": " };\n\n (f)(&r, &mut w);\n\n self.io.write(Self::INDEX_W, w.raw_bits);\n\n }\n\n #[doc = \"Reads the contents of the register\"]\n\n #[inline]\n\n pub fn read(&mut self) -> R {\n\n R {\n\n raw_bits: self.io.read(Self::INDEX_R),\n\n }\n\n }\n\n }\n\n #[doc = \"Value to write to the register\"]\n\n pub struct R {\n\n raw_bits: u8,\n\n }\n\n impl core::fmt::Debug for R {\n\n fn fmt(&self, fmt: &mut core::fmt::Formatter) -> core::fmt::Result {\n\n fmt.debug_struct(\"GR03\")\n\n .field(\n", "file_path": "src/raw/generated.rs", "rank": 97, "score": 24.54668959251762 }, { "content": " pub struct _STARTING_ROW_SCAN_COUNT<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _STARTING_ROW_SCAN_COUNT<'a> {\n\n const _MASK: u8 = 31;\n\n const _OFFSET: u8 = 0;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n\n self.w\n\n }\n\n }\n\n }\n\n #[doc = \"Text Cursor Start Register\"]\n\n pub struct CR0A<\n\n 'a,\n", "file_path": "src/raw/generated.rs", "rank": 98, "score": 24.45370183431637 }, { "content": " pub fn to_register_value(&self) -> u8 {\n\n let value = *self as u8;\n\n let value = value << Self::_OFFSET;\n\n value\n\n }\n\n }\n\n #[doc = \"Proxy\"]\n\n pub struct _VERTICAL_AND_HORIZONTAL_SYNC_POLARITY<'a> {\n\n w: &'a mut W,\n\n }\n\n impl<'a> _VERTICAL_AND_HORIZONTAL_SYNC_POLARITY<'a> {\n\n const _MASK: u8 = 192;\n\n const _OFFSET: u8 = 6;\n\n #[doc = \"Writes raw bits to the field\"]\n\n #[inline]\n\n pub fn bits(self, value: u8) -> &'a mut W {\n\n let value = value << Self::_OFFSET;\n\n let value = value & Self::_MASK;\n\n self.w.raw_bits &= !Self::_MASK;\n\n self.w.raw_bits |= value;\n", "file_path": "src/raw/generated.rs", "rank": 99, "score": 24.443630808868576 } ]
Rust
src/lib/fixture_tests.rs
loalang/loalang
745edc192564b3f362ad1623fbb8853fcefa876e
use crate::optimization::Optimizable; use crate::*; use serde::Deserialize; extern crate serde_yaml; extern crate simple_logging; #[derive(Deserialize)] struct FixtureConfig { main_class: Option<String>, expected: FixtureExpectations, } #[derive(Deserialize)] struct FixtureExpectations { success: bool, stdout: Vec<String>, } #[test] fn fixtures() { simple_logging::log_to_stderr(LevelFilter::Debug); let mut failures = vec![]; for entry in glob::glob("src/__fixtures__/*").unwrap() { let entry = entry.unwrap(); let fixture_name = entry.file_name().and_then(std::ffi::OsStr::to_str).unwrap(); eprintln!("\n{} ===============================", fixture_name); if fixture_name.starts_with("_") { eprintln!("Skipping"); continue; } eprintln!("Parsing."); let mut fixture_config_path = entry.clone(); fixture_config_path.push("fixture.yml"); let fixture_config: FixtureConfig = serde_yaml::from_reader(std::fs::File::open(fixture_config_path).unwrap()).unwrap(); let mut source_files_path = entry.clone(); source_files_path.push("**"); source_files_path.push("*.loa"); let mut diagnostics = vec![]; let mut test_comments = vec![]; let mut sources = Source::files(source_files_path.to_str().unwrap()).unwrap(); sources.extend(Source::stdlib().unwrap()); if let Some(ref main_class) = fixture_config.main_class { sources.push(Source::main(main_class)); } eprintln!("Analyzing."); let mut analysis: semantics::Analysis = sources .into_iter() .map(syntax::Parser::new) .map(syntax::Parser::parse_with_test_comments) .map(|(t, d, c)| { diagnostics.extend(d); test_comments.extend(c); (t.source.uri.clone(), t) }) .into(); diagnostics.extend(analysis.check().clone()); let actual_success = !Diagnostic::failed(&diagnostics); 'expected_comment: for comment in test_comments.iter() { for diagnostic in diagnostics.iter() { if matches(comment, diagnostic) { continue 'expected_comment; } } failures.push(format!( "Expected diagnostic: {:?} @ {}", comment.lexeme(), comment.span )); } 'actual_diagnostic: for diagnostic in diagnostics { for comment in test_comments.iter() { if matches(comment, &diagnostic) { continue 'actual_diagnostic; } } failures.push(format!("Unexpected diagnostic: {:#?}", diagnostic)); } if !actual_success { continue; } if let Some(_) = fixture_config.main_class { eprintln!("Generating."); let mut generator = generation::Generator::new(&mut analysis); let mut assembly = generator.generate_all().unwrap(); eprintln!("Optimizing."); assembly.optimize(); eprintln!("Running."); eprintln!("{:?}", assembly); let mut vm = vm::VM::new(); let result = vm.eval_pop::<()>(assembly.clone().into()).unwrap(); let actual_stdout = format!("{}\n", result); let expected_stdout: String = fixture_config .expected .stdout .into_iter() .map(|s| format!("{}\n", s)) .collect(); if actual_stdout != expected_stdout { failures.push(format!( "{}:\nExpected output: {}\n Actual output: {}", fixture_name, expected_stdout, actual_stdout )); } } if fixture_config.expected.success != actual_success { failures.push(format!( "Expected {} to {}", fixture_name, if fixture_config.expected.success { "be successful" } else { "fail" } )); } } assert!(failures.is_empty(), "\n\n{}", failures.join("\n\n")); } fn matches(comment: &syntax::Token, diagnostic: &Diagnostic) -> bool { let d_span = diagnostic.span(); ( comment.span.start.line, comment.span.start.uri.clone(), &comment.lexeme()[4..], ) == ( d_span.start.line, d_span.start.uri.clone(), diagnostic.to_string().as_str(), ) }
use crate::optimization::Optimizable; use crate::*; use serde::Deserialize; extern crate serde_yaml; extern crate simple_logging; #[derive(Deserialize)] struct FixtureConfig { main_class: Option<String>, expected: FixtureExpectations, } #[derive(Deserialize)] struct FixtureExpectations { success: bool, stdout: Vec<String>, } #[test] fn fixtures() { simple_logging::log_to_stderr(LevelFilter::Debug); let mut failures = vec![]; for entry in glob::glob("src/__fixtures__/*").unwrap() { let entry = entry.unwrap(); let fixture_name = entry.file_name().and_then(std::ffi::OsStr::to_str).unwrap(); eprintln!("\n{} ===============================", fixture_name); if fixture_name.starts_with("_") { eprintln!("Skipping"); continue; } eprintln!("Parsing."); let mut fixture_config_path = entry.clone(); fixture_config_path.push("fixture.yml"); let fixture_config: FixtureConfig = serde_yaml::from_reader(std::fs::File::open(fixture_config_path).unwrap()).unwrap(); let mut source_files_path = entry.clone(); source_files_path.push("**"); source_files_path.push("*.loa"); let mut diagnostics = vec![]; let mut test_comments = vec![]; let mut sources = Source::files(source_files_path.to_str().unwrap()).unwrap(); sources.extend(Source::stdlib().unwrap()); if let Some(ref main_class) = fixture_config.main_class { sources.push(Source::main(main_class)); } eprintln!("Analyzing."); let mut analysis: semantics::Analysis = sources .into_iter() .map(syntax::Parser::new) .map(syntax::Parser::parse_with_test_comments) .map(|(t, d, c)| { diagnostics.extend(d); test_comments.extend(c); (t.source.uri.clone(), t) }) .into(); diagnostics.extend(analysis.check().clone()); let actual_success = !Diagnostic::failed(&diagnostics); 'expected_comment: for comment in test_comments.iter() { for diagnostic in diagnostics.iter() { if matches(comment, diagnostic) { continue 'expected_comment; } } failures.push(format!( "Expected diagnostic: {:?} @ {}", comment.lexeme(), comment.span )); } 'actual_diagnostic: for diagnostic in diagnostics { for comment in test_comments.iter() { if matches(comment, &diagnostic) { continue 'actual_diagnostic; } } failures.push(format!("Unexpected diagnostic: {:#?}", diagnostic)); } if !actual_success { continue; } if let Some(_) = fixture_config.main_class { eprintln!("Generating."); let mut generator = generation::Generator::new(&mut analysis); let mut assembly = generator.generate_all().unwrap(); eprintln!("Optimizing."); assembly.optimize(); eprintln!("Running."); eprintln!("{:?}", assembly); let mut vm = vm::VM::new(); let result = vm.eval_pop::<()>(assembly.clone().into()).unwrap(); let actual_stdout = format!("{}\n", result); let expected_stdout: String = fixture_config .expected .stdout .into_iter() .map(|s| format!("{}\n", s)) .collect(); if actual_stdout != expected_stdout { failures.push(format!( "{}:\nExpected output: {}\n Actual output: {}", fixture_name, expected_stdout, actual_stdout )); } } if fixture_config.expected.success != actual_success { failures.push(format!( "Expected {} to {}", fixture_name, if fixture_config.expected.success { "be successful" } else { "fail" } )); } } assert!(failures.is_empty(), "\n\n{}", failures.join("\n\n")); } fn matches(comment: &syntax::Token, diagnostic: &Diagnostic) -> bool { let d_span = diagnostic.spa
n(); ( comment.span.start.line, comment.span.start.uri.clone(), &comment.lexeme()[4..], ) == ( d_span.start.line, d_span.start.uri.clone(), diagnostic.to_string().as_str(), ) }
function_block-function_prefixed
[ { "content": "pub fn is_valid_symbol(string: &String) -> bool {\n\n let source = Source::new(SourceKind::Module, URI::Exact(\"tmp\".into()), string.clone());\n\n let tokens = tokenize(source);\n\n\n\n tokens.len() == 2 && matches!(tokens[0].kind, TokenKind::SimpleSymbol(_))\n\n}\n\n\n", "file_path": "src/lib/syntax/lexer.rs", "rank": 3, "score": 169101.77003074042 }, { "content": "pub fn is_valid_binary_selector(string: &String) -> bool {\n\n let source = Source::new(SourceKind::Module, URI::Exact(\"tmp\".into()), string.clone());\n\n let mut tokens = tokenize(source);\n\n tokens.pop();\n\n\n\n use TokenKind::*;\n\n\n\n for token in tokens {\n\n if !matches!(\n\n token.kind,\n\n Asterisk | Plus | Slash | EqualSign | OpenAngle | CloseAngle\n\n ) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/lib/syntax/lexer.rs", "rank": 4, "score": 166189.93202466477 }, { "content": "pub fn string_to_characters(s: String) -> Vec<u16> {\n\n s.encode_utf16().collect()\n\n}\n", "file_path": "src/lib/syntax/characters.rs", "rank": 5, "score": 156329.22936419275 }, { "content": "pub fn is_valid_keyword_selector(string: &String, length: usize) -> bool {\n\n let source = Source::new(SourceKind::Module, URI::Exact(\"tmp\".into()), string.clone());\n\n let tokens = tokenize(source);\n\n\n\n if tokens.len() != length * 2 + 1 {\n\n return false;\n\n }\n\n\n\n use TokenKind::*;\n\n\n\n for i in 0..length - 1 {\n\n let kw_index = i * 2;\n\n let colon_index = kw_index + 1;\n\n\n\n if !matches!(tokens[kw_index].kind, SimpleSymbol(_)) {\n\n return false;\n\n }\n\n\n\n if !matches!(tokens[colon_index].kind, Colon) {\n\n return false;\n\n }\n\n }\n\n\n\n return true;\n\n}\n\n\n", "file_path": "src/lib/syntax/lexer.rs", "rank": 7, "score": 152451.99503505387 }, { "content": "fn sees_doc_newline(stream: &mut CharStream) -> Option<bool> {\n\n loop {\n\n let (_, c) = stream.peek()?;\n\n if let SPACE | NEWLINE | CARRIAGE_RETURN | TAB = *c {\n\n stream.move_next();\n\n continue;\n\n }\n\n break;\n\n }\n\n if let (_, SLASH) = stream.peek()? {\n\n if let (_, SLASH) = stream.peek_next()? {\n\n if let (_, SLASH) = stream.peek_next()? {\n\n return Some(true);\n\n }\n\n }\n\n }\n\n Some(false)\n\n}\n\n\n", "file_path": "src/lib/syntax/lexer.rs", "rank": 8, "score": 136356.80050463218 }, { "content": "pub fn tokenize(source: Arc<Source>) -> Vec<Token> {\n\n let mut chars = source.code.encode_utf16().enumerate().peekmore();\n\n let mut end_offset = 0;\n\n let mut tokens = vec![];\n\n let mut state = LexerState::Normal;\n\n\n\n loop {\n\n let token = match state {\n\n LexerState::Normal => match next_token(&source, &mut state, &mut chars) {\n\n None => break,\n\n Some(token) => token,\n\n },\n\n\n\n LexerState::Doc => match next_doc_token(&source, &mut state, &mut chars) {\n\n None => break,\n\n Some(token) => token,\n\n },\n\n };\n\n end_offset = token.span.end.offset;\n\n tokens.push(token)\n", "file_path": "src/lib/syntax/lexer.rs", "rank": 9, "score": 135314.00771358586 }, { "content": "struct SimulatedStack(Vec<SimulatedStackElement>);\n\n\n\nimpl SimulatedStack {\n\n pub fn new() -> SimulatedStack {\n\n SimulatedStack(vec![])\n\n }\n\n\n\n pub fn pop(&mut self) {\n\n #[allow(unused)]\n\n let result = self.0.pop();\n\n #[cfg(debug_assertions)]\n\n assert!(result.is_some());\n\n }\n\n\n\n pub fn push_self(&mut self) {\n\n self.0.push(SimulatedStackElement::Self_);\n\n }\n\n\n\n pub fn push_declaration(&mut self, id: Id) {\n\n self.0.push(SimulatedStackElement::Declaration(id));\n", "file_path": "src/lib/generation/generator.rs", "rank": 10, "score": 108401.11262270376 }, { "content": "pub fn characters_to_string<I: Iterator<Item = u16>>(i: I) -> String {\n\n decode_utf16(i)\n\n .map(|r| r.unwrap_or(REPLACEMENT_CHARACTER))\n\n .collect()\n\n}\n\n\n", "file_path": "src/lib/syntax/characters.rs", "rank": 11, "score": 99495.45710697738 }, { "content": "pub fn sdk_glob(segments: &[&str]) -> String {\n\n let sdk = sdk_dir();\n\n let mut path = vec![sdk.to_str().unwrap()];\n\n path.extend(segments);\n\n path.join(std::path::MAIN_SEPARATOR.to_string().as_ref())\n\n}\n", "file_path": "src/lib/sdk_dir.rs", "rank": 12, "score": 98810.76366123176 }, { "content": "#[inline]\n\npub fn checkers() -> Vec<&'static dyn Checker> {\n\n vec![\n\n &UNDEFINED_TYPE_REFERENCE,\n\n &UNDEFINED_REFERENCE,\n\n &UNDEFINED_BEHAVIOUR,\n\n &TYPE_ASSIGNMENT,\n\n &DUPLICATE_DECLARATION,\n\n &INVALID_IMPORT,\n\n &INVALID_INHERIT,\n\n &OUT_OF_BOUNDS_NUMBER,\n\n &IMPRECISE_FLOAT_LITERAL,\n\n &WRONG_NUMBER_OF_TYPE_ARGUMENTS,\n\n &PRIVATE_METHODS,\n\n &TYPE_PARAMETER_VARIANCE,\n\n &VARIABLE_INITIALIZATION,\n\n ]\n\n}\n", "file_path": "src/lib/semantics/checkers/mod.rs", "rank": 13, "score": 94041.99808986139 }, { "content": "use crate::vm::*;\n\n\n\nmacro_rules! expect {\n\n ($vm:expr, $opt:expr, $($arg:tt)*) => {\n\n match $opt {\n\n Some(t) => t,\n\n None => return VMResult::Panic(format!($($arg)*), $vm.call_stack.detach()),\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! unwrap {\n\n ($vm:expr, $opt:expr) => {\n\n match $opt {\n\n VMResult::Ok(t) => t,\n\n VMResult::Panic(s, cs) => return VMResult::Panic(s, cs),\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/lib/vm/vm_result.rs", "rank": 14, "score": 87953.10074287133 }, { "content": "pub enum VMResult<T> {\n\n Ok(T),\n\n Panic(String, CallStack),\n\n}\n\n\n\nimpl<T> VMResult<T> {\n\n pub fn report<M: Runtime>(self) -> Option<T> {\n\n match self {\n\n VMResult::Ok(t) => Some(t),\n\n VMResult::Panic(s, cs) => {\n\n M::print_panic(s, cs);\n\n None\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/lib/vm/vm_result.rs", "rank": 15, "score": 87947.54477089802 }, { "content": "fn fix_parentage(tree: &mut Tree, id: Id, parent_id: Option<Id>, start: Location) {\n\n if let Some(node) = tree.get_mut(id) {\n\n node.parent_id = parent_id;\n\n if let MessageSendExpression { .. } = node.kind {\n\n node.span.start = start;\n\n }\n\n let start = node.span.start.clone();\n\n for child in node.children() {\n\n fix_parentage(tree, child, Some(id), start.clone());\n\n }\n\n }\n\n}\n", "file_path": "src/lib/syntax/node.rs", "rank": 22, "score": 75223.38162213439 }, { "content": "fn subtract_i64(lhs: i64, rhs: i64) -> Arc<Object> {\n\n match lhs.checked_sub(rhs) {\n\n Some(res) => Object::box_i64(res),\n\n None => subtract_i128(lhs as i128, rhs as i128),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 23, "score": 74595.90332831576 }, { "content": "fn add_i32(lhs: i32, rhs: i32) -> Arc<Object> {\n\n match lhs.checked_add(rhs) {\n\n Some(res) => Object::box_i32(res),\n\n None => add_i64(lhs as i64, rhs as i64),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 24, "score": 74595.90332831576 }, { "content": "fn add_u64(lhs: u64, rhs: u64) -> Arc<Object> {\n\n match lhs.checked_add(rhs) {\n\n Some(res) => Object::box_u64(res),\n\n None => add_u128(lhs as u128, rhs as u128),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 25, "score": 74595.90332831576 }, { "content": "fn subtract_u64(lhs: u64, rhs: u64) -> Arc<Object> {\n\n match lhs.checked_sub(rhs) {\n\n Some(res) => Object::box_u64(res),\n\n None => subtract_u128(lhs as u128, rhs as u128),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 26, "score": 74595.90332831576 }, { "content": "fn subtract_i128(lhs: i128, rhs: i128) -> Arc<Object> {\n\n match lhs.checked_sub(rhs) {\n\n Some(res) => Object::box_i128(res),\n\n None => subtract_ibig(&lhs.into(), &rhs.into()),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 27, "score": 74595.90332831576 }, { "content": "fn subtract_u8(lhs: u8, rhs: u8) -> Arc<Object> {\n\n match lhs.checked_sub(rhs) {\n\n Some(res) => Object::box_u8(res),\n\n None => subtract_u16(lhs as u16, rhs as u16),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 28, "score": 74595.90332831576 }, { "content": "fn subtract_u128(lhs: u128, rhs: u128) -> Arc<Object> {\n\n match lhs.checked_sub(rhs) {\n\n Some(res) => Object::box_u128(res),\n\n None => subtract_ubig(&BigUint::from(lhs), &BigUint::from(rhs)),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 29, "score": 74595.90332831576 }, { "content": "fn add_f64(lhs: f64, rhs: f64) -> Arc<Object> {\n\n Object::box_f64(lhs + rhs)\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 30, "score": 74595.90332831576 }, { "content": "fn add_f32(lhs: f32, rhs: f32) -> Arc<Object> {\n\n Object::box_f32(lhs + rhs)\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 31, "score": 74595.90332831576 }, { "content": "fn add_i16(lhs: i16, rhs: i16) -> Arc<Object> {\n\n match lhs.checked_add(rhs) {\n\n Some(res) => Object::box_i16(res),\n\n None => add_i32(lhs as i32, rhs as i32),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 32, "score": 74595.90332831576 }, { "content": "fn subtract_i8(lhs: i8, rhs: i8) -> Arc<Object> {\n\n match lhs.checked_sub(rhs) {\n\n Some(res) => Object::box_i8(res),\n\n None => subtract_i16(lhs as i16, rhs as i16),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 33, "score": 74595.90332831576 }, { "content": "fn add_u128(lhs: u128, rhs: u128) -> Arc<Object> {\n\n match lhs.checked_add(rhs) {\n\n Some(res) => Object::box_u128(res),\n\n None => add_ubig(&BigUint::from(lhs), &BigUint::from(rhs)),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 34, "score": 74595.90332831576 }, { "content": "fn subtract_i16(lhs: i16, rhs: i16) -> Arc<Object> {\n\n match lhs.checked_sub(rhs) {\n\n Some(res) => Object::box_i16(res),\n\n None => subtract_i32(lhs as i32, rhs as i32),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 35, "score": 74595.90332831576 }, { "content": "fn add_u32(lhs: u32, rhs: u32) -> Arc<Object> {\n\n match lhs.checked_add(rhs) {\n\n Some(res) => Object::box_u32(res),\n\n None => add_u64(lhs as u64, rhs as u64),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 36, "score": 74595.90332831576 }, { "content": "fn subtract_u16(lhs: u16, rhs: u16) -> Arc<Object> {\n\n match lhs.checked_sub(rhs) {\n\n Some(res) => Object::box_u16(res),\n\n None => subtract_u32(lhs as u32, rhs as u32),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 37, "score": 74595.90332831576 }, { "content": "fn subtract_f32(lhs: f32, rhs: f32) -> Arc<Object> {\n\n Object::box_f32(lhs - rhs)\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 38, "score": 74595.90332831576 }, { "content": "fn add_i128(lhs: i128, rhs: i128) -> Arc<Object> {\n\n match lhs.checked_add(rhs) {\n\n Some(res) => Object::box_i128(res),\n\n None => add_ibig(&lhs.into(), &rhs.into()),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 39, "score": 74595.90332831576 }, { "content": "fn add_u8(lhs: u8, rhs: u8) -> Arc<Object> {\n\n match lhs.checked_add(rhs) {\n\n Some(res) => Object::box_u8(res),\n\n None => add_u16(lhs as u16, rhs as u16),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 40, "score": 74595.90332831576 }, { "content": "fn add_i64(lhs: i64, rhs: i64) -> Arc<Object> {\n\n match lhs.checked_add(rhs) {\n\n Some(res) => Object::box_i64(res),\n\n None => add_i128(lhs as i128, rhs as i128),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 41, "score": 74595.90332831576 }, { "content": "fn subtract_f64(lhs: f64, rhs: f64) -> Arc<Object> {\n\n Object::box_f64(lhs - rhs)\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 42, "score": 74595.90332831576 }, { "content": "fn add_u16(lhs: u16, rhs: u16) -> Arc<Object> {\n\n match lhs.checked_add(rhs) {\n\n Some(res) => Object::box_u16(res),\n\n None => add_u32(lhs as u32, rhs as u32),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 43, "score": 74595.90332831576 }, { "content": "fn subtract_i32(lhs: i32, rhs: i32) -> Arc<Object> {\n\n match lhs.checked_sub(rhs) {\n\n Some(res) => Object::box_i32(res),\n\n None => subtract_i64(lhs as i64, rhs as i64),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 44, "score": 74595.90332831576 }, { "content": "fn subtract_u32(lhs: u32, rhs: u32) -> Arc<Object> {\n\n match lhs.checked_sub(rhs) {\n\n Some(res) => Object::box_u32(res),\n\n None => subtract_u64(lhs as u64, rhs as u64),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 45, "score": 74595.90332831576 }, { "content": "fn add_i8(lhs: i8, rhs: i8) -> Arc<Object> {\n\n match lhs.checked_add(rhs) {\n\n Some(res) => Object::box_i8(res),\n\n None => add_i16(lhs as i16, rhs as i16),\n\n }\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 46, "score": 74595.90332831576 }, { "content": "fn uppercase(n: u16) -> u16 {\n\n (n as u8 as char).to_ascii_uppercase() as u16\n\n}\n\n\n", "file_path": "src/lib/syntax/lexer.rs", "rank": 47, "score": 72617.03391739045 }, { "content": "fn add_fbig(lhs: &BigFraction, rhs: &BigFraction) -> Arc<Object> {\n\n Object::box_fbig(lhs + rhs)\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 48, "score": 71200.21556732032 }, { "content": "fn add_ubig(lhs: &BigUint, rhs: &BigUint) -> Arc<Object> {\n\n Object::box_ubig(lhs + rhs)\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 49, "score": 71200.21556732032 }, { "content": "fn add_ibig(lhs: &BigInt, rhs: &BigInt) -> Arc<Object> {\n\n Object::box_ibig(lhs + rhs)\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 50, "score": 71200.21556732032 }, { "content": "fn subtract_fbig(lhs: &BigFraction, rhs: &BigFraction) -> Arc<Object> {\n\n Object::box_fbig(lhs - rhs)\n\n}\n\n\n\nimpl Runtime for () {\n\n fn print_panic(message: String, call_stack: CallStack) {\n\n panic!(\"{}\\n{:#?}\", message, call_stack)\n\n }\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\n#[derive(Debug, Clone, PartialEq)]\n\npub enum NativeMethod {\n\n Number_plus,\n\n Number_minus,\n\n Object_eq,\n\n Object_asString,\n\n}\n\n\n\nimpl<'a> From<&'a str> for NativeMethod {\n", "file_path": "src/lib/vm/runtime.rs", "rank": 51, "score": 71200.21556732032 }, { "content": "fn subtract_ibig(lhs: &BigInt, rhs: &BigInt) -> Arc<Object> {\n\n Object::box_ibig(lhs - rhs)\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 52, "score": 71200.21556732032 }, { "content": "fn subtract_ubig(lhs: &BigUint, rhs: &BigUint) -> Arc<Object> {\n\n Object::box_ubig(lhs + rhs)\n\n}\n\n\n", "file_path": "src/lib/vm/runtime.rs", "rank": 53, "score": 71200.21556732032 }, { "content": " class C {\n\n is A.\n\n }\n\n \"#,\n\n ),\n\n r#\"\n\n @N/A$methods\n\n DeclareMethod \"x\" @N/A#x\n\n\n\n @N/B$methods\n\n DeclareMethod \"x\" @N/B#x\n\n\n\n @N/A\n\n DeclareClass \"N/A\"\n\n UseMethod @N/A#x\n\n\n\n @N/B\n\n DeclareClass \"N/B\"\n\n OverrideMethod @N/A#x @N/B#x\n\n UseMethod @N/B#x\n", "file_path": "src/lib/generation/generator.rs", "rank": 54, "score": 66704.19304847531 }, { "content": " class C.\n\n \"#,\n\n ),\n\n r#\"\n\n @N/C\n\n DeclareClass \"N/C\"\n\n \n\n Halt\n\n \"#,\n\n );\n\n }\n\n\n\n #[test]\n\n fn simple_method() {\n\n assert_generates(\n\n Source::test(\n\n r#\"\n\n namespace N.\n\n\n", "file_path": "src/lib/generation/generator.rs", "rank": 55, "score": 66704.19304847531 }, { "content": "fn next_token(\n\n source: &Arc<Source>,\n\n state: &mut LexerState,\n\n stream: &mut CharStream,\n\n) -> Option<Token> {\n\n let (offset, ch) = stream.next()?;\n\n let mut kind;\n\n let mut end_offset = offset;\n\n\n\n let peek = stream.peek();\n\n let next_ch = peek.map(|(_, c)| *c).unwrap_or(NUL);\n\n\n\n match (ch, next_ch) {\n\n // Whitespace\n\n (s, _) if matches!(s, SPACE | NEWLINE | CARRIAGE_RETURN | TAB) => {\n\n let mut chars = vec![ch];\n\n loop {\n\n match stream.peek() {\n\n Some((_, s)) if matches!(*s, SPACE | NEWLINE | CARRIAGE_RETURN | TAB) => {\n\n let (o, c) = stream.next().unwrap();\n", "file_path": "src/lib/syntax/lexer.rs", "rank": 56, "score": 62819.09957637841 }, { "content": "fn consume_number(\n\n first_char: u16,\n\n end_offset: &mut usize,\n\n stream: &mut CharStream,\n\n kind: &mut TokenKind,\n\n) {\n\n let first_int = consume_integer(first_char, end_offset, stream, 10);\n\n let mut base = 10;\n\n let mut hash = None;\n\n let mut after_hash = String::new();\n\n\n\n if let Some((_, HASH)) = stream.peek() {\n\n base = u64::from_str_radix(first_int.as_str(), 10).unwrap() as usize;\n\n\n\n if base <= 36 {\n\n stream.move_next();\n\n if let Some((_, n)) = stream.peek() {\n\n if INTEGER_CHARS[..base].contains(&uppercase(*n)) {\n\n let (_, h) = stream.next().unwrap();\n\n hash = Some(h);\n", "file_path": "src/lib/syntax/lexer.rs", "rank": 57, "score": 62819.09957637841 }, { "content": "fn consume_integer(\n\n first_char: u16,\n\n end_offset: &mut usize,\n\n stream: &mut CharStream,\n\n base: usize,\n\n) -> String {\n\n let candidates = &INTEGER_CHARS[..base];\n\n let mut chars = vec![first_char];\n\n\n\n loop {\n\n match stream.peek() {\n\n None => break,\n\n Some((_, character)) => {\n\n if candidates.contains(&uppercase(*character)) {\n\n let (index, character) = stream.next().unwrap();\n\n chars.push(character);\n\n *end_offset = index;\n\n } else {\n\n break;\n\n }\n\n }\n\n }\n\n }\n\n\n\n characters_to_string(chars.into_iter())\n\n}\n\n\n", "file_path": "src/lib/syntax/lexer.rs", "rank": 58, "score": 62819.09957637841 }, { "content": "struct FormatterDisplay<'a> {\n\n tree: &'a Tree,\n\n indent: &'a str,\n\n}\n\n\n\nimpl<'a> Display for FormatterDisplay<'a> {\n\n fn fmt(&self, f: &mut F) -> Result {\n\n Formatter::new(self.tree, self.indent).write_tree(f)\n\n }\n\n}\n\n\n\nimpl Display for Tree {\n\n fn fmt(&self, f: &mut F) -> Result {\n\n if f.alternate() {\n\n Formatter::new(self, \" \").write_tree(f)\n\n } else {\n\n write!(f, \"{}\", self.source.code)\n\n }\n\n }\n\n}\n", "file_path": "src/lib/format/formatter.rs", "rank": 59, "score": 61674.90451743883 }, { "content": "fn next_doc_token(\n\n source: &Arc<Source>,\n\n state: &mut LexerState,\n\n stream: &mut CharStream,\n\n) -> Option<Token> {\n\n let (offset, first_char) = stream.next()?;\n\n\n\n match first_char {\n\n NEWLINE | CARRIAGE_RETURN => {\n\n if sees_doc_newline(stream).unwrap_or(false) {\n\n stream.reset_view();\n\n return consume_doc_newline(source, stream, offset, first_char);\n\n }\n\n\n\n *state = LexerState::Normal;\n\n return Some(Token {\n\n span: Span::at_range(source, offset..offset + 1),\n\n kind: TokenKind::Whitespace(\"\\n\".into()),\n\n before: vec![],\n\n after: vec![],\n", "file_path": "src/lib/syntax/lexer.rs", "rank": 60, "score": 61530.40182192761 }, { "content": "fn resolve_number(\n\n unresolved: &Type,\n\n proposed: &Type,\n\n navigator: &Navigator,\n\n types: &Types,\n\n) -> Option<TypeAssignability> {\n\n if let Type::Class(_, class, _) = proposed {\n\n let class = navigator.find_node(*class)?;\n\n let (name, _, _) = navigator.qualified_name_of(&class)?;\n\n\n\n match (unresolved, name.as_ref()) {\n\n (Type::UnresolvedFloat(_, id), \"Loa/Object\")\n\n | (Type::UnresolvedFloat(_, id), \"Loa/Number\")\n\n | (Type::UnresolvedFloat(_, id), \"Loa/Float\")\n\n | (Type::UnresolvedFloat(_, id), \"Loa/Float16\")\n\n | (Type::UnresolvedFloat(_, id), \"Loa/Float32\")\n\n | (Type::UnresolvedFloat(_, id), \"Loa/Float64\")\n\n | (Type::UnresolvedFloat(_, id), \"Loa/BigFloat\")\n\n | (Type::UnresolvedInteger(_, id), \"Loa/Object\")\n\n | (Type::UnresolvedInteger(_, id), \"Loa/Number\")\n", "file_path": "src/lib/semantics/type_assignability.rs", "rank": 61, "score": 61530.40182192761 }, { "content": "fn consume_doc_newline(\n\n source: &Arc<Source>,\n\n stream: &mut CharStream,\n\n offset: usize,\n\n first_char: u16,\n\n) -> Option<Token> {\n\n let mut chars = vec![first_char];\n\n loop {\n\n if let Some((_, c)) = stream.peek() {\n\n match *c {\n\n SPACE | NEWLINE | CARRIAGE_RETURN | TAB => {\n\n let (_, c) = stream.next()?;\n\n chars.push(c);\n\n }\n\n _ => break,\n\n }\n\n }\n\n }\n\n\n\n chars.push(stream.next()?.1);\n", "file_path": "src/lib/syntax/lexer.rs", "rank": 62, "score": 61530.40182192761 }, { "content": "fn format_type_assignability(\n\n f: &mut fmt::Formatter,\n\n mut indentation: usize,\n\n assignability: &TypeAssignability,\n\n) -> fmt::Result {\n\n match assignability {\n\n TypeAssignability::Valid => Ok(()),\n\n TypeAssignability::Uncoercable {\n\n from,\n\n to,\n\n already_coerced_to,\n\n because,\n\n } => {\n\n for _ in 0..indentation {\n\n write!(f, \" \")?;\n\n }\n\n\n\n if indentation > 0 {\n\n write!(f, \"because \")?;\n\n }\n", "file_path": "src/lib/semantics/type_assignability.rs", "rank": 63, "score": 60327.30608987995 }, { "content": "fn check_message_argument(\n\n assignee_arg: Type,\n\n assigned_arg: Type,\n\n navigator: &Navigator,\n\n types: &Types,\n\n issues: &mut Vec<TypeAssignability>,\n\n) {\n\n let assignment = check_assignment(\n\n assigned_arg.clone(),\n\n assignee_arg.clone(),\n\n navigator,\n\n types,\n\n false,\n\n );\n\n if assignment.is_invalid() {\n\n issues.push(assignment);\n\n }\n\n}\n", "file_path": "src/lib/semantics/type_assignability.rs", "rank": 64, "score": 60327.30608987995 }, { "content": "pub fn check_assignment(\n\n assignee: Type,\n\n assigned: Type,\n\n navigator: &Navigator,\n\n types: &Types,\n\n invariant: bool,\n\n) -> TypeAssignability {\n\n match (&assignee, &assigned) {\n\n (Type::Unknown, _) | (_, Type::Unknown) => TypeAssignability::Valid,\n\n\n\n (Type::ClassObject(_), _) | (_, Type::ClassObject(_)) => TypeAssignability::Invalid {\n\n assignee,\n\n assigned,\n\n invariant,\n\n because: vec![],\n\n },\n\n\n\n (Type::Symbol(assignee_symbol), Type::Symbol(assigned_symbol)) => {\n\n if assignee_symbol == assigned_symbol {\n\n TypeAssignability::Valid\n", "file_path": "src/lib/semantics/type_assignability.rs", "rank": 65, "score": 57012.46802802071 }, { "content": "pub fn format_invalid_type_assignability(\n\n f: &mut fmt::Formatter,\n\n indentation: usize,\n\n assignee: &Type,\n\n assigned: &Type,\n\n because: &Vec<TypeAssignability>,\n\n invariant: bool,\n\n) -> fmt::Result {\n\n if indentation > 0 {\n\n write!(f, \"\\n\")?;\n\n }\n\n\n\n for _ in 0..indentation {\n\n write!(f, \" \")?;\n\n }\n\n\n\n if indentation > 0 {\n\n write!(f, \"because \")?;\n\n }\n\n\n", "file_path": "src/lib/semantics/type_assignability.rs", "rank": 66, "score": 54831.09089383157 }, { "content": "pub fn sdk_dir() -> PathBuf {\n\n unsafe { OVERRIDE_LOA_SDK.clone() }\n\n .or_else(|| var(LOA_SDK).ok().map(PathBuf::from))\n\n .or_else(|| current_dir().ok())\n\n .expect(\"Please set the LOA_SDK environment variable\")\n\n}\n\n\n", "file_path": "src/lib/sdk_dir.rs", "rank": 67, "score": 53339.71503176082 }, { "content": "#[allow(unused_variables)]\n\nfn cache_candidate<T, F: FnOnce() -> T>(name: &str, f: F) -> T {\n\n #[cfg(debug_assertions)]\n\n {\n\n let now = Instant::now();\n\n let result = f();\n\n if now.elapsed() > CACHE_CANDIDATE_WARNING_LIMIT {\n\n warn!(\"Cache candidate {:?} took {:?}.\", name, now.elapsed());\n\n }\n\n result\n\n }\n\n\n\n #[cfg(not(debug_assertions))]\n\n {\n\n f()\n\n }\n\n}\n", "file_path": "src/lib/semantics/mod.rs", "rank": 68, "score": 51626.393702968744 }, { "content": " failed\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Diagnostic {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let name: &'static str = self.into();\n\n write!(\n\n f,\n\n \"{:?} ({} @ {}:{})\",\n\n self.to_string(),\n\n name,\n\n self.span().start.uri,\n\n self.span().start.line,\n\n )\n\n }\n\n}\n\n\n\nimpl fmt::Display for Diagnostic {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 69, "score": 51067.9509469442 }, { "content": " DuplicatedDeclaration(_, _, _) => 8,\n\n InvalidInherit { .. } => 9,\n\n InvalidLiteralType(_, _) => 10,\n\n OutOfBounds(_, _, _) => 11,\n\n TooPreciseFloat(_, _, _) => 12,\n\n WrongNumberOfTypeArguments(_, _, _, _) => 13,\n\n InvalidAccessToPrivateMethod(_, _, _) => 14,\n\n InvalidTypeParameterReferenceVarianceUsage(_, _, _, _) => 15,\n\n IncompleteInitializer(_, _, _) => 16,\n\n UndefinedInitializedVariable(_, _, _) => 17,\n\n }\n\n }\n\n\n\n pub fn failed(diagnostics: &Vec<Diagnostic>) -> bool {\n\n let mut failed = false;\n\n for diagnostic in diagnostics.iter() {\n\n if let DiagnosticLevel::Error = diagnostic.level() {\n\n failed = true;\n\n }\n\n }\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 70, "score": 51066.73676279518 }, { "content": "use crate::*;\n\nuse std::f64::INFINITY;\n\nuse std::fmt;\n\n\n\n#[derive(Clone, IntoStaticStr)]\n\npub enum Diagnostic {\n\n SyntaxError(Span, String),\n\n UndefinedTypeReference(Span, String),\n\n UndefinedReference(Span, String),\n\n UndefinedBehaviour(Span, semantics::Type, String),\n\n UndefinedImport(Span, String),\n\n UnexportedImport(Span, String),\n\n UnassignableType {\n\n span: Span,\n\n assignability: semantics::TypeAssignability,\n\n },\n\n DuplicatedDeclaration(Span, String, usize),\n\n InvalidInherit {\n\n span: Span,\n\n super_type: semantics::Type,\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 71, "score": 51062.99072019993 }, { "content": " sub_type: semantics::Type,\n\n violations: Vec<InheritanceViolation>,\n\n },\n\n InvalidLiteralType(Span, semantics::Type),\n\n OutOfBounds(Span, semantics::Type, String),\n\n TooPreciseFloat(Span, semantics::Type, BigFraction),\n\n WrongNumberOfTypeArguments(Span, String, usize, usize),\n\n InvalidAccessToPrivateMethod(Span, String, String),\n\n InvalidTypeParameterReferenceVarianceUsage(Span, String, &'static str, &'static str),\n\n IncompleteInitializer(Span, String, Vec<String>),\n\n UndefinedInitializedVariable(Span, String, String),\n\n}\n\n\n\n#[derive(Clone)]\n\npub enum InheritanceViolation {\n\n BehaviourNotImplemented(semantics::Behaviour),\n\n OverrideNotSound(semantics::Behaviour, semantics::TypeAssignability),\n\n}\n\n\n\nimpl Diagnostic {\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 72, "score": 51059.187421133116 }, { "content": " | InvalidAccessToPrivateMethod(_, _, _)\n\n | InvalidTypeParameterReferenceVarianceUsage(_, _, _, _)\n\n | IncompleteInitializer(_, _, _)\n\n | UndefinedInitializedVariable(_, _, _) => DiagnosticLevel::Error,\n\n\n\n TooPreciseFloat(_, _, _) => DiagnosticLevel::Warning,\n\n }\n\n }\n\n\n\n pub fn code(&self) -> usize {\n\n use Diagnostic::*;\n\n\n\n match self {\n\n SyntaxError(_, _) => 1,\n\n UndefinedTypeReference(_, _) => 2,\n\n UndefinedReference(_, _) => 3,\n\n UndefinedBehaviour(_, _, _) => 4,\n\n UndefinedImport(_, _) => 5,\n\n UnexportedImport(_, _) => 6,\n\n UnassignableType { .. } => 7,\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 73, "score": 51056.35972870793 }, { "content": " name,\n\n if *params == 0 {\n\n \"no\".into()\n\n } else {\n\n params.to_string()\n\n },\n\n if *args == 0 {\n\n \"none\".into()\n\n } else {\n\n args.to_string()\n\n },\n\n ),\n\n InvalidAccessToPrivateMethod(_, class_name, method_selector) => {\n\n write!(f, \"`{}#{}` is private.\", class_name, method_selector)\n\n }\n\n InvalidTypeParameterReferenceVarianceUsage(_, name, usage, mark) => write!(\n\n f,\n\n \"`{}` cannot be used in {} position, because it's marked as `{}`.\",\n\n name, usage, mark\n\n ),\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 74, "score": 51056.23184428026 }, { "content": " | UndefinedInitializedVariable(ref s, _, _) => s,\n\n }\n\n }\n\n\n\n pub fn level(&self) -> DiagnosticLevel {\n\n use Diagnostic::*;\n\n\n\n match self {\n\n SyntaxError(_, _)\n\n | UndefinedTypeReference(_, _)\n\n | UndefinedReference(_, _)\n\n | UndefinedBehaviour(_, _, _)\n\n | UndefinedImport(_, _)\n\n | UnexportedImport(_, _)\n\n | UnassignableType { .. }\n\n | DuplicatedDeclaration(_, _, _)\n\n | InvalidInherit { .. }\n\n | InvalidLiteralType(_, _)\n\n | OutOfBounds(_, _, _)\n\n | WrongNumberOfTypeArguments(_, _, _, _)\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 75, "score": 51056.09740636315 }, { "content": " use Diagnostic::*;\n\n\n\n match self {\n\n SyntaxError(_, s) => write!(f, \"{}\", s),\n\n UndefinedTypeReference(_, s) => write!(f, \"`{}` is undefined.\", s),\n\n UndefinedReference(_, s) => write!(f, \"`{}` is undefined.\", s),\n\n UndefinedBehaviour(_, t, s) => write!(f, \"`{}` doesn't respond to `{}`.\", t, s),\n\n UndefinedImport(_, s) => write!(f, \"`{}` is undefined.\", s),\n\n UnexportedImport(_, s) => write!(f, \"`{}` is not exported.\", s),\n\n UnassignableType { assignability, .. } => write!(f, \"{}\", assignability),\n\n DuplicatedDeclaration(_, s, n) => {\n\n write!(f, \"`{}` is defined {} times in this scope.\", s, n)\n\n }\n\n InvalidInherit {\n\n sub_type,\n\n super_type,\n\n violations,\n\n ..\n\n } => {\n\n write!(f, \"`{}` doesn't act as `{}` because:\", sub_type, super_type)?;\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 76, "score": 51054.09209716238 }, { "content": " pub fn span(&self) -> &Span {\n\n use Diagnostic::*;\n\n\n\n match self {\n\n SyntaxError(ref s, _)\n\n | UndefinedTypeReference(ref s, _)\n\n | UndefinedReference(ref s, _)\n\n | UndefinedBehaviour(ref s, _, _)\n\n | UndefinedImport(ref s, _)\n\n | UnexportedImport(ref s, _)\n\n | UnassignableType { span: ref s, .. }\n\n | DuplicatedDeclaration(ref s, _, _)\n\n | InvalidInherit { span: ref s, .. }\n\n | InvalidLiteralType(ref s, _)\n\n | OutOfBounds(ref s, _, _)\n\n | TooPreciseFloat(ref s, _, _)\n\n | WrongNumberOfTypeArguments(ref s, _, _, _)\n\n | InvalidAccessToPrivateMethod(ref s, _, _)\n\n | InvalidTypeParameterReferenceVarianceUsage(ref s, _, _, _)\n\n | IncompleteInitializer(ref s, _, _)\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 77, "score": 51053.56800326078 }, { "content": " }\n\n }\n\n UndefinedInitializedVariable(_, var_name, class_name) => {\n\n write!(f, \"`{}` is not a variable of `{}`.\", var_name, class_name)\n\n }\n\n }\n\n }\n\n}\n\n\n\npub enum DiagnosticLevel {\n\n Error,\n\n Warning,\n\n Info,\n\n}\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 78, "score": 51053.02105585012 }, { "content": " for violation in violations.iter() {\n\n match violation {\n\n InheritanceViolation::BehaviourNotImplemented(ref b) => {\n\n write!(f, \"\\n - it doesn't respond to `{}`\", b)?\n\n }\n\n InheritanceViolation::OverrideNotSound(ref b, ref t) => {\n\n write!(\n\n f,\n\n \"\\n• it doesn't respond to `{}` like `{}` would\",\n\n b.selector(),\n\n super_type,\n\n )?;\n\n if let semantics::TypeAssignability::Invalid {\n\n assignee,\n\n assigned,\n\n because,\n\n invariant,\n\n } = t\n\n {\n\n semantics::format_invalid_type_assignability(\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 79, "score": 51051.982890312844 }, { "content": " IncompleteInitializer(_, selector, uninitialized_names) => {\n\n write!(f, \"Initializer `{}` must initialize \", selector)?;\n\n\n\n match uninitialized_names.len() {\n\n 1 => write!(f, \"`{}`.\", &uninitialized_names[0]),\n\n 2 => write!(\n\n f,\n\n \"`{}` and `{}`.\",\n\n &uninitialized_names[0], &uninitialized_names[1]\n\n ),\n\n n => {\n\n for (i, name) in uninitialized_names.iter().enumerate() {\n\n if i < n - 1 {\n\n write!(f, \"`{}`, \", name)?;\n\n } else {\n\n write!(f, \"and `{}`\", name)?;\n\n }\n\n }\n\n write!(f, \".\")\n\n }\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 80, "score": 51049.010196734955 }, { "content": " f, 2, assignee, assigned, &because, *invariant,\n\n )?;\n\n }\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n InvalidLiteralType(_, type_) => {\n\n write!(f, \"`{}` is not a valid type for this literal.\", type_)\n\n }\n\n OutOfBounds(_, type_, message) => write!(f, \"`{}` must not be {}.\", type_, message),\n\n TooPreciseFloat(_, type_, fraction) => write!(\n\n f,\n\n \"`{:.2$}` is too precise to be coerced to {} without losing precision.\",\n\n fraction, type_, INFINITY as usize\n\n ),\n\n WrongNumberOfTypeArguments(_, name, params, args) => write!(\n\n f,\n\n \"`{}` takes {} type arguments, but was provided {}.\",\n", "file_path": "src/lib/diagnostics/diagnostic.rs", "rank": 81, "score": 51049.010196734955 }, { "content": " let mut code = String::new();\n\n io::stdin().read_to_string(&mut code)?;\n\n Ok(Self::new(SourceKind::Module, URI::Stdin, code))\n\n }\n\n\n\n pub fn files<S: AsRef<str>>(s: S) -> io::Result<Vec<Arc<Source>>> {\n\n Self::files_with_uri(s.as_ref(), |path| URI::File(path))\n\n }\n\n\n\n fn files_with_uri<F: Fn(PathBuf) -> URI>(g: &str, f: F) -> io::Result<Vec<Arc<Source>>> {\n\n let mut sources = vec![];\n\n match glob::glob(g) {\n\n Ok(paths) => {\n\n for path in paths {\n\n if let Ok(path) = path {\n\n let uri = f(path.clone());\n\n sources.push(Self::file_with_uri(path, uri)?);\n\n }\n\n }\n\n }\n", "file_path": "src/lib/source/source.rs", "rank": 82, "score": 51022.628813013194 }, { "content": " pub fn len(&self) -> usize {\n\n string_to_characters(self.code.clone()).len()\n\n }\n\n\n\n #[cfg(test)]\n\n pub fn test(code: &str) -> Arc<Source> {\n\n Self::new(SourceKind::Module, URI::Test, code.into())\n\n }\n\n\n\n #[cfg(test)]\n\n pub fn test_repl(code: &str) -> Arc<Source> {\n\n Self::new(SourceKind::REPLLine, URI::Test, code.into())\n\n }\n\n}\n\n\n\nimpl fmt::Display for Source {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.uri)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Source {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Source({})\", self.uri)\n\n }\n\n}\n", "file_path": "src/lib/source/source.rs", "rank": 83, "score": 51021.101953880745 }, { "content": "use crate::syntax::string_to_characters;\n\nuse crate::*;\n\nuse std::fmt;\n\nuse std::io::{self, Read};\n\nuse std::path::PathBuf;\n\n\n\npub struct Source {\n\n pub kind: SourceKind,\n\n pub uri: URI,\n\n pub code: String,\n\n}\n\n\n\n#[derive(Clone)]\n\npub enum SourceKind {\n\n Module,\n\n REPLLine,\n\n}\n\n\n\nimpl Source {\n\n pub fn new(kind: SourceKind, uri: URI, code: String) -> Arc<Source> {\n", "file_path": "src/lib/source/source.rs", "rank": 84, "score": 51020.42958395382 }, { "content": " Arc::new(Source { kind, uri, code })\n\n }\n\n\n\n pub fn main<S: AsRef<str>>(main_class: S) -> Arc<Source> {\n\n let main_class = main_class.as_ref();\n\n let class_name = main_class.split(\"/\").collect::<Vec<_>>().pop().unwrap();\n\n\n\n Source::new(\n\n SourceKind::REPLLine,\n\n URI::Main,\n\n format!(\"import {}.\\n\\n{} run asString.\", main_class, class_name),\n\n )\n\n }\n\n\n\n pub fn file(path: PathBuf) -> io::Result<Arc<Source>> {\n\n let uri = URI::File(path.clone());\n\n Self::file_with_uri(path, uri)\n\n }\n\n\n\n pub fn stdin() -> io::Result<Arc<Source>> {\n", "file_path": "src/lib/source/source.rs", "rank": 85, "score": 51016.88279818154 }, { "content": " _ => (),\n\n }\n\n Ok(sources)\n\n }\n\n\n\n fn file_with_uri(path: PathBuf, uri: URI) -> io::Result<Arc<Source>> {\n\n let path = path.canonicalize()?;\n\n Ok(Self::new(\n\n SourceKind::Module,\n\n uri,\n\n std::fs::read_to_string(path)?,\n\n ))\n\n }\n\n\n\n pub fn stdlib() -> io::Result<Vec<Arc<Source>>> {\n\n Self::files_with_uri(sdk_glob(&[\"std\", \"**\", \"*.loa\"]).as_ref(), |path| {\n\n URI::Stdlib(path)\n\n })\n\n }\n\n\n", "file_path": "src/lib/source/source.rs", "rank": 86, "score": 51016.04746550593 }, { "content": "}\n\n\n\nimpl Into<Vec<BytecodeInstruction>> for Assembly {\n\n fn into(self) -> Vec<BytecodeInstruction> {\n\n self.compile(&mut Cursor::new())\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Assembly {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n for (i, section) in self.iter().enumerate() {\n\n if i > 0 {\n\n writeln!(f)?;\n\n }\n\n\n\n if let Some(ref comment) = section.leading_comment {\n\n writeln!(f, \"; {}\", comment)?;\n\n }\n\n\n\n let indent = if let Some(ref label) = section.label {\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 87, "score": 50610.222477380295 }, { "content": " if let Some(ref label) = section.label {\n\n cursor.labels.insert(label.clone(), cursor.end);\n\n }\n\n cursor.end += section.instructions.len() as u64;\n\n }\n\n\n\n let mut instructions = vec![];\n\n for section in self.into_iter() {\n\n for assembly_instruction in section.instructions {\n\n instructions.push(assembly_instruction.compile(&cursor.labels));\n\n }\n\n }\n\n instructions\n\n }\n\n}\n\n\n\nimpl PartialEq for Assembly {\n\n fn eq(&self, other: &Assembly) -> bool {\n\n self.iter().collect::<Vec<_>>() == other.iter().collect::<Vec<_>>()\n\n }\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 88, "score": 50609.13755474073 }, { "content": " Section {\n\n leading_comment: None,\n\n label: None,\n\n instructions: vec![],\n\n }\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.instructions.is_empty()\n\n }\n\n\n\n pub fn with_comment<S: Into<String>>(mut self, comment: S) -> Self {\n\n self.leading_comment = Some(comment.into());\n\n self\n\n }\n\n\n\n pub fn add_instruction(&mut self, instruction: InstructionKind) {\n\n self.instructions\n\n .push(Instruction::uncommented(instruction));\n\n }\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 89, "score": 50608.866605651834 }, { "content": "use crate::bytecode::Instruction as BytecodeInstruction;\n\nuse crate::vm::NativeMethod;\n\nuse crate::HashMap;\n\nuse crate::*;\n\nuse std::fmt;\n\n\n\npub struct Cursor {\n\n pub end: u64,\n\n pub labels: HashMap<Label, u64>,\n\n}\n\n\n\nimpl Cursor {\n\n pub fn new() -> Cursor {\n\n Cursor {\n\n end: 0,\n\n labels: HashMap::new(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 90, "score": 50608.41369299309 }, { "content": "\n\n pub fn with_instruction(mut self, instruction: InstructionKind) -> Self {\n\n self.add_instruction(instruction);\n\n self\n\n }\n\n\n\n pub fn with_commented_instruction<S: Into<String>>(\n\n mut self,\n\n comment: S,\n\n instruction: InstructionKind,\n\n ) -> Self {\n\n self.instructions\n\n .push(Instruction::commented(comment.into(), instruction));\n\n self\n\n }\n\n}\n\n\n\nimpl PartialEq for Section {\n\n fn eq(&self, rhs: &Section) -> bool {\n\n self.label == rhs.label && self.instructions == rhs.instructions\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 91, "score": 50604.9496632085 }, { "content": " }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Section {\n\n pub leading_comment: Option<String>,\n\n pub label: Option<String>,\n\n pub instructions: Vec<Instruction>,\n\n}\n\n\n\nimpl Section {\n\n pub fn named<S: Into<String>>(label: S) -> Section {\n\n Section {\n\n leading_comment: None,\n\n label: Some(label.into()),\n\n instructions: vec![],\n\n }\n\n }\n\n\n\n pub fn unnamed() -> Section {\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 92, "score": 50603.333847032816 }, { "content": "#[derive(Clone)]\n\npub struct Assembly {\n\n method_declaration_sections: Vec<Section>,\n\n class_declaration_sections: Vec<Section>,\n\n main_sections: Vec<Section>,\n\n sections: Vec<Section>,\n\n}\n\n\n\nimpl Assembly {\n\n pub fn new() -> Assembly {\n\n Assembly {\n\n method_declaration_sections: vec![],\n\n class_declaration_sections: vec![],\n\n main_sections: vec![],\n\n sections: vec![],\n\n }\n\n }\n\n\n\n pub fn add_method_declaration_section(&mut self, section: Section) {\n\n self.method_declaration_sections.push(section);\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 93, "score": 50603.18116852557 }, { "content": "\n\n pub fn uncommented(kind: InstructionKind) -> Instruction {\n\n Instruction {\n\n leading_comment: None,\n\n kind,\n\n }\n\n }\n\n\n\n pub fn compile(&self, offsets: &HashMap<String, u64>) -> BytecodeInstruction {\n\n macro_rules! label {\n\n ($label:expr, $expected:expr) => {\n\n *offsets\n\n .get($label)\n\n .expect(format!(\"{} {} not found\", $expected, $label).as_ref())\n\n };\n\n }\n\n match self.kind {\n\n InstructionKind::Noop => BytecodeInstruction::Noop,\n\n InstructionKind::Halt => BytecodeInstruction::Halt,\n\n InstructionKind::Panic => BytecodeInstruction::Panic,\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 94, "score": 50602.115278978454 }, { "content": " }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Instruction {\n\n pub leading_comment: Option<String>,\n\n pub kind: InstructionKind,\n\n}\n\n\n\nimpl fmt::Debug for Instruction {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use InstructionKind::*;\n\n match self.kind {\n\n Noop => write!(f, \"Noop\"),\n\n Halt => write!(f, \"Halt\"),\n\n Panic => write!(f, \"Panic\"),\n\n DumpStack => write!(f, \"DumpStack\"),\n\n DeclareClass(ref name) => write!(f, \"DeclareClass {:?}\", name),\n\n DeclareVariable(ref name, ref vl, ref gl, ref sl) => {\n\n write!(f, \"DeclareVariable {:?} @{} @{} @{}\", name, vl, gl, sl)\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 95, "score": 50601.504515466295 }, { "content": " LoadConstF64(ref value) => write!(f, \"LoadConstF64 {}\", value),\n\n LoadConstFBig(ref value) => write!(f, \"LoadConstFBig {}\", value),\n\n }\n\n }\n\n}\n\n\n\npub type Label = String;\n\n\n\n#[derive(PartialEq, Debug, Clone)]\n\npub enum InstructionKind {\n\n Noop,\n\n Halt,\n\n Panic,\n\n DumpStack,\n\n DeclareClass(String),\n\n DeclareVariable(String, Label, Label, Label),\n\n UseVariable(Label),\n\n DeclareMethod(String, Label),\n\n UseMethod(Label),\n\n OverrideMethod(Label, Label),\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 96, "score": 50598.44961553401 }, { "content": " }\n\n\n\n pub fn into_iter(\n\n self,\n\n ) -> std::iter::Chain<\n\n std::iter::Chain<\n\n std::iter::Chain<std::vec::IntoIter<Section>, std::vec::IntoIter<Section>>,\n\n std::vec::IntoIter<Section>,\n\n >,\n\n std::vec::IntoIter<Section>,\n\n > {\n\n self.method_declaration_sections\n\n .into_iter()\n\n .chain(self.class_declaration_sections.into_iter())\n\n .chain(self.main_sections.into_iter())\n\n .chain(self.sections.into_iter())\n\n }\n\n\n\n pub fn compile(self, cursor: &mut Cursor) -> Vec<BytecodeInstruction> {\n\n for section in self.iter() {\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 97, "score": 50597.80131642218 }, { "content": " LoadConstU128(u128),\n\n LoadConstUBig(BigUint),\n\n LoadConstI8(i8),\n\n LoadConstI16(i16),\n\n LoadConstI32(i32),\n\n LoadConstI64(i64),\n\n LoadConstI128(i128),\n\n LoadConstIBig(BigInt),\n\n LoadConstF32(f32),\n\n LoadConstF64(f64),\n\n LoadConstFBig(BigFraction),\n\n}\n\n\n\nimpl Instruction {\n\n pub fn commented(comment: String, kind: InstructionKind) -> Instruction {\n\n Instruction {\n\n leading_comment: Some(comment),\n\n kind,\n\n }\n\n }\n", "file_path": "src/lib/assembly/assembly.rs", "rank": 98, "score": 50597.27221601518 }, { "content": "use crate::semantics::*;\n\nuse crate::syntax::*;\n\nuse crate::*;\n\n\n\npub struct VariableInitialization;\n\n\n\nimpl VariableInitialization {\n\n fn check_class(\n\n &self,\n\n class: &Node,\n\n analysis: &mut Analysis,\n\n diagnostics: &mut Vec<Diagnostic>,\n\n ) {\n\n if let Some((class_name, _)) = analysis.navigator.symbol_of(class) {\n\n let variables = analysis.navigator.variables_of_class(class);\n\n let variable_names = variables\n\n .iter()\n\n .filter_map(|v| analysis.navigator.symbol_of(v))\n\n .map(|(name, _)| name)\n\n .collect::<Vec<_>>();\n", "file_path": "src/lib/semantics/checkers/variable_initialization.rs", "rank": 99, "score": 31.97801157336269 } ]
Rust
src/api/process/process.rs
DaanA32/lunatic
b7dc3f98e10c50337526f8a42cb39032f7b4b9be
use anyhow::Result; use async_wormhole::{ stack::{OneMbStack, Stack}, AsyncWormhole, AsyncYielder, }; use lazy_static::lazy_static; use smol::{Executor as TaskExecutor, Task}; use uptown_funk::{Executor, FromWasm, HostFunctions, ToWasm}; use crate::module::{LunaticModule, Runtime}; use crate::{ api::{channel::ChannelReceiver, heap_profiler::HeapProfilerState}, linker::*, }; use log::info; use std::future::Future; use super::api::ProcessState; lazy_static! { pub static ref EXECUTOR: TaskExecutor<'static> = TaskExecutor::new(); } pub enum FunctionLookup { TableIndex(u32), Name(&'static str), } #[derive(Clone)] pub enum MemoryChoice { Existing, New(Option<u32>), } pub struct Process { task: Task<Result<()>>, } impl Process { pub fn task(self) -> Task<Result<()>> { self.task } pub fn create_with_api<A>( module: LunaticModule, function: FunctionLookup, memory: MemoryChoice, api: A, ) -> anyhow::Result<(A::Return, impl Future<Output = Result<()>>)> where A: HostFunctions + 'static, A::Wrap: Send, { let created_at = std::time::Instant::now(); let runtime = module.runtime(); let (ret, api) = api.split(); let stack = OneMbStack::new()?; let mut process = AsyncWormhole::new(stack, move |yielder| { let yielder_ptr = &yielder as *const AsyncYielder<anyhow::Result<()>> as usize; match module.runtime() { Runtime::Wasmtime => { let mut linker = WasmtimeLunaticLinker::new(module, yielder_ptr, memory)?; linker.add_api::<A>(api); let instance = linker.instance()?; match function { FunctionLookup::Name(name) => { let func = instance.get_func(name).ok_or_else(|| { anyhow::Error::msg(format!( "No function {} in wasmtime instance", name )) })?; let performance_timer = std::time::Instant::now(); func.call(&[])?; info!(target: "performance", "Process {} finished in {:.5} ms.", name, performance_timer.elapsed().as_secs_f64() * 1000.0); } FunctionLookup::TableIndex(index) => { let func = instance.get_func("lunatic_spawn_by_index").ok_or_else(|| { anyhow::Error::msg( "No function lunatic_spawn_by_index in wasmtime instance", ) })?; func.call(&[(index as i32).into()])?; } } Ok(()) } } })?; let mut wasmtime_cts_saver = super::tls::CallThreadStateSaveWasmtime::new(); process.set_pre_post_poll(move || match runtime { Runtime::Wasmtime => wasmtime_cts_saver.swap(), }); info!(target: "performance", "Total time {:.5} ms.", created_at.elapsed().as_secs_f64() * 1000.0); Ok((ret, process)) } pub async fn create( context_receiver: Option<ChannelReceiver>, module: LunaticModule, function: FunctionLookup, memory: MemoryChoice, profiler: <HeapProfilerState as HostFunctions>::Wrap, ) -> Result<()> { let api = crate::api::default::DefaultApi::new(context_receiver, module.clone()); let ((p, _), fut) = Process::create_with_api(module, function, memory, api)?; profiler.lock().unwrap().add_process(p.clone()); fut.await?; p.lock().unwrap().free_all(); Ok(()) } pub fn spawn<Fut>(future: Fut) -> Self where Fut: Future<Output = Result<()>> + Send + 'static, { let task = EXECUTOR.spawn(future); Self { task } } } impl ToWasm<&mut ProcessState> for Process { type To = u32; fn to( state: &mut ProcessState, _: &impl Executor, process: Self, ) -> Result<u32, uptown_funk::Trap> { Ok(state.processes.add(process)) } } impl FromWasm<&mut ProcessState> for Process { type From = u32; fn from( state: &mut ProcessState, _: &impl Executor, process_id: u32, ) -> Result<Self, uptown_funk::Trap> where Self: Sized, { match state.processes.remove(process_id) { Some(process) => Ok(process), None => Err(uptown_funk::Trap::new("Process not found")), } } }
use anyhow::Result; use async_wormhole::{ stack::{OneMbStack, Stack}, AsyncWormhole, AsyncYielder, }; use lazy_static::lazy_static; use smol::{Executor as TaskExecutor, Task}; use uptown_funk::{Executor, FromWasm, HostFunctions, ToWasm}; use crate::module::{LunaticModule, Runtime}; use crate::{ api::{channel::ChannelReceiver, heap_profiler::HeapProfilerState}, linker::*, }; use log::info; use std::future::Future; use super::api::ProcessState; lazy_static! { pub static ref EXECUTOR: TaskExecutor<'static> = TaskExecutor::new(); } pub enum FunctionLookup { TableIndex(u32), Name(&'static str), } #[derive(Clone)] pub enum MemoryChoice { Existing, New(Option<u32>), } pub struct Process { task: Task<Result<()>>, } impl Process { pub fn task(self) -> Task<Result<()>> { self.task } pub fn create_with_api<A>( module: LunaticModule, function: FunctionLookup, memory: MemoryChoice, api: A, ) -> anyhow::Result<(A::Return, impl Future<Output = Result<()>>)> where A: HostFunctions + 'static, A::Wrap: Send, { let created_at = std::time::Instant::now(); let runtime = module.runtime(); let (ret, api) = api.split(); let stack = OneMbStack::new()?; let mut process = AsyncWormhole::new(stack, move |yielder| { let yielder_ptr = &yielder as *const AsyncYielder<anyhow::Result<()>> as usize; match module.runtime() { Runtime::Wasmtime => { let mut linker = WasmtimeLunaticLinker::new(module, yielder_ptr, memory)?; linker.add_api::<A>(api); let instance = linker.instance()?; match function { FunctionLookup::Name(name) => {
pub async fn create( context_receiver: Option<ChannelReceiver>, module: LunaticModule, function: FunctionLookup, memory: MemoryChoice, profiler: <HeapProfilerState as HostFunctions>::Wrap, ) -> Result<()> { let api = crate::api::default::DefaultApi::new(context_receiver, module.clone()); let ((p, _), fut) = Process::create_with_api(module, function, memory, api)?; profiler.lock().unwrap().add_process(p.clone()); fut.await?; p.lock().unwrap().free_all(); Ok(()) } pub fn spawn<Fut>(future: Fut) -> Self where Fut: Future<Output = Result<()>> + Send + 'static, { let task = EXECUTOR.spawn(future); Self { task } } } impl ToWasm<&mut ProcessState> for Process { type To = u32; fn to( state: &mut ProcessState, _: &impl Executor, process: Self, ) -> Result<u32, uptown_funk::Trap> { Ok(state.processes.add(process)) } } impl FromWasm<&mut ProcessState> for Process { type From = u32; fn from( state: &mut ProcessState, _: &impl Executor, process_id: u32, ) -> Result<Self, uptown_funk::Trap> where Self: Sized, { match state.processes.remove(process_id) { Some(process) => Ok(process), None => Err(uptown_funk::Trap::new("Process not found")), } } }
let func = instance.get_func(name).ok_or_else(|| { anyhow::Error::msg(format!( "No function {} in wasmtime instance", name )) })?; let performance_timer = std::time::Instant::now(); func.call(&[])?; info!(target: "performance", "Process {} finished in {:.5} ms.", name, performance_timer.elapsed().as_secs_f64() * 1000.0); } FunctionLookup::TableIndex(index) => { let func = instance.get_func("lunatic_spawn_by_index").ok_or_else(|| { anyhow::Error::msg( "No function lunatic_spawn_by_index in wasmtime instance", ) })?; func.call(&[(index as i32).into()])?; } } Ok(()) } } })?; let mut wasmtime_cts_saver = super::tls::CallThreadStateSaveWasmtime::new(); process.set_pre_post_poll(move || match runtime { Runtime::Wasmtime => wasmtime_cts_saver.swap(), }); info!(target: "performance", "Total time {:.5} ms.", created_at.elapsed().as_secs_f64() * 1000.0); Ok((ret, process)) }
function_block-function_prefix_line
[ { "content": "/// Adds WASM functions required by the stdlib implementation:\n\n/// * `lunatic_spawn_by_index(i32)`\n\n/// - receives the index of the function (in the table) to be called indirectly.\n\npub fn patch(module: &mut Module) -> Result<()> {\n\n if let Some(main_function_table) = module.tables.main_function_table()? {\n\n let mut builder = walrus::FunctionBuilder::new(&mut module.types, &[ValType::I32], &[]);\n\n let lunatic_spawn_by_index_type = module.types.add(&[], &[]);\n\n // Create the index parameter\n\n let index = module.locals.add(ValType::I32);\n\n // invoke __wasm_call_ctors to properly setup environment\n\n // FIXME remove this when wasm adds proper environment initialisation\n\n match module.funcs.by_name(\"__wasm_call_ctors\") {\n\n Some(ctors) => {\n\n builder.func_body().call(ctors);\n\n }\n\n // ignore if __wasm_call_ctors wasn't found\n\n None => log::info!(\"__wasm_call_ctors wasn't found.\"),\n\n };\n\n builder\n\n .func_body()\n\n .local_get(index)\n\n .call_indirect(lunatic_spawn_by_index_type, main_function_table);\n\n let function = builder.finish(vec![index], &mut module.funcs);\n\n module.exports.add(\"lunatic_spawn_by_index\", function);\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/module/normalisation/stdlib.rs", "rank": 0, "score": 224824.7494141342 }, { "content": "// This function adds profiling capabilities to a local function with\n\n// provided name. If function with such name exists this function will:\n\n// * add an import statement that imports \"{name}_profiler\"\n\n// * move all instructions to a new function called \"{name}_wrap\"\n\n// * invoke \"{name}_wrap\" function and \"{name}_profiler\" from original function\n\n//\n\n// In essence it will convert (pseudo code):\n\n//\n\n// (func $name ...\n\n// ...\n\n// )\n\n//\n\n// Into:\n\n//\n\n// (import \"name_profiler\")\n\n//\n\n// (func $name ...\n\n// call $name_wrap\n\n// call $name_profiler\n\n// )\n\n// (func $name_wrap ...\n\n// ...\n\n// )\n\n//\n\n// \"{name}_profiler function has the same arguments as \"name\" function with one\n\n// optional last argument. This last argument is a return value from \"name\"\n\n// function if such exists.\n\nfn add_profiler_to(module: &mut Module, name: &str) -> Result<()> {\n\n // find local function in module\n\n let fn_id = module\n\n .funcs\n\n .by_name(name)\n\n .ok_or(anyhow::Error::msg(format!(\n\n \"heap_profiler: '{}' was not found in wasm\",\n\n name\n\n )))?;\n\n let types = module.types.params_results(module.funcs.get(fn_id).ty());\n\n let (params, results) = (types.0.to_vec(), types.1.to_vec());\n\n\n\n // Import profiler. Profilers don't return anything. Profilers last argument\n\n // is result from the original function. For example, local function \"malloc(i32) -> u32\"\n\n // will import profiler of type \"malloc(i32, u32)\".\n\n let profiler_type = module\n\n .types\n\n .add(&[params.clone(), results.clone()].concat(), &[]);\n\n let profiler_id = module\n\n .add_import_func(\n", "file_path": "src/module/normalisation/heap_profiler.rs", "rank": 1, "score": 211043.19952693846 }, { "content": "/// Finds memory with the index 0 and turns it into an import.\n\n/// Returns the initial and maximum memory sizes.\n\npub fn patch(module: &mut Module) -> (u32, Option<u32>) {\n\n if let Some(memory) = module.memories.iter_mut().next() {\n\n let memory_id = memory.id();\n\n let memory_import = module\n\n .imports\n\n .add(\"lunatic\", \"memory\", ImportKind::Memory(memory_id));\n\n memory.shared = false;\n\n memory.import = Some(memory_import);\n\n (memory.initial, memory.maximum)\n\n } else {\n\n (0, None)\n\n }\n\n}\n", "file_path": "src/module/normalisation/shared_memory.rs", "rank": 2, "score": 189784.8162312358 }, { "content": "pub fn instance_creation(c: &mut Criterion) {\n\n c.bench_function(\"spawn thread\", |b| {\n\n b.iter(move || {\n\n std::thread::spawn(|| 1 + 3);\n\n });\n\n });\n\n\n\n c.bench_function(\"Wasmtime instance creation\", |b| {\n\n let engine = wasmtime::Engine::default();\n\n let wasm = include_bytes!(\"guest/start.wasm\");\n\n let module = wasmtime::Module::new(&engine, &wasm).unwrap();\n\n\n\n b.iter(move || {\n\n let store = wasmtime::Store::new(&engine);\n\n let linker = wasmtime::Linker::new(&store);\n\n let _instance = linker.instantiate(&module);\n\n store\n\n });\n\n });\n\n\n", "file_path": "benches/instance_creation.rs", "rank": 3, "score": 184343.13522090926 }, { "content": "/// Modifies the WASM binary to add heap profiling support. Every time one of allocation function\n\n/// is called:\n\n/// * malloc(arg1) -> ret\n\n/// * aligned_alloc(arg1, arg2) -> ret\n\n/// * calloc(arg1, arg2) -> ret\n\n/// * realloc(arg1, arg2) -> ret\n\n/// * free(arg1)\n\n/// extra call to its profiling function will be invoked:\n\n/// * malloc_profiler(arg1, ret)\n\n/// * aligned_alloc_profiler(arg1, arg2, ret)\n\n/// * calloc_profiler(arg1, arg2, ret)\n\n/// * realloc_profiler(arg1, arg2, ret)\n\n/// * free_profiler(arg1)\n\n///\n\n/// Profiling functions are imported from heap_profiler module.\n\n///\n\n/// Functions that can't be found in a module are ignored and error is logged.\n\npub fn patch(module: &mut Module) {\n\n // NOTE rusts global allocator __rust_alloc sometimes will\n\n // execute aligned_alloc instead of malloc\n\n [\"malloc\", \"aligned_alloc\", \"calloc\", \"realloc\", \"free\"]\n\n .iter()\n\n .for_each(|name| {\n\n add_profiler_to(module, name).unwrap_or_else(|e| error!(\"{}\", e));\n\n });\n\n}\n\n\n", "file_path": "src/module/normalisation/heap_profiler.rs", "rank": 4, "score": 181564.0882716932 }, { "content": "/// Modifies the WASM binary to add a `yield` call after `REDUCTION_LIMIT` of **operations** has\n\n/// been reached. Function calls and loop iterations without calls are counted as **operations**.\n\n/// The idea behind this is to not allow any WASM Instance to block a thread for too long.\n\n///\n\n/// To achieve this the following things are inserted into the WASM module:\n\n/// * A global variable to hold the current count\n\n/// * An import to the host provided `lunatic::yield` function\n\n/// * Instructions on top of each function to check if we reached the `REDUCTION_LIMIT` and yield.\n\n/// * Instructions on top of tight loops to check if we reached the `REDUCTION_LIMIT` and yield.\n\npub fn patch(module: &mut Module) {\n\n let counter = module\n\n .globals\n\n .add_local(ValType::I32, true, InitExpr::Value(ir::Value::I32(0)));\n\n let yield_type = module.types.add(&[], &[]);\n\n let yield_import = module.add_import_func(\"lunatic\", \"yield_\", yield_type);\n\n\n\n // If a function is called inside a loop we can avoid inserting the reduction count inside of it, because all\n\n // function calls will also perform a reduction count. But this is not true for imported functions.\n\n // To make it easier to check if an imported function is called we keep a list of all of them around.\n\n let imported_functions: Vec<FunctionId> = module\n\n .imports\n\n .iter()\n\n .filter_map(|import| match import.kind {\n\n ImportKind::Function(function) => Some(function),\n\n _ => None,\n\n })\n\n .collect();\n\n\n\n for (_, function) in module.funcs.iter_local_mut() {\n\n patch_function(function, counter, yield_import.0, &imported_functions)\n\n }\n\n}\n\n\n", "file_path": "src/module/normalisation/reduction_counting.rs", "rank": 5, "score": 181560.74562824762 }, { "content": "pub fn run() -> Result<()> {\n\n let opts: Opts = Opts::parse();\n\n let is_profile = opts.profile;\n\n\n\n let wasm = fs::read(opts.input).expect(\"Can't open .wasm file\");\n\n\n\n let module =\n\n module::LunaticModule::new(&wasm, Runtime::default(), is_profile, opts.normalised_out)?;\n\n\n\n // Set up async runtime\n\n let cpus = thread::available_concurrency().unwrap();\n\n let (signal, shutdown) = smol::channel::unbounded::<()>();\n\n let (_, profiler) = heap_profiler::HeapProfilerState::new().split();\n\n\n\n Parallel::new()\n\n .each(0..cpus.into(), |_| {\n\n smol::future::block_on(EXECUTOR.run(shutdown.recv()))\n\n })\n\n .finish(|| {\n\n smol::future::block_on(async {\n", "file_path": "src/main.rs", "rank": 6, "score": 162405.77518948133 }, { "content": "// TODO: move to normalisation/utils.rs ?\n\n// Create a new local function that will have same signiture and same\n\n// instructions as the supplied local function.\n\nfn clone_function(module: &mut Module, fn_id: FunctionId, name: Option<String>) -> FunctionId {\n\n let types = module.types.params_results(module.funcs.get(fn_id).ty());\n\n let (params, results) = (types.0.to_vec(), types.1.to_vec());\n\n\n\n let mut fn_builder = FunctionBuilder::new(&mut module.types, &params, &results);\n\n let fn_local_function = module.funcs.get(fn_id).kind.unwrap_local();\n\n if let Some(name) = name {\n\n fn_builder.name(name);\n\n }\n\n let mut fn_instr_seq = fn_builder.func_body();\n\n\n\n // copy instructions from fn_id to new function\n\n clone_rec(\n\n fn_local_function,\n\n fn_local_function.block(fn_local_function.entry_block()),\n\n &mut fn_instr_seq,\n\n &mut HashMap::new(),\n\n );\n\n let fn_copy_id = fn_builder.finish(fn_local_function.args.clone(), &mut module.funcs);\n\n\n\n // number of instructions in original and cloned function should match\n\n assert_eq!(\n\n module.funcs.get(fn_id).kind.unwrap_local().size(),\n\n module.funcs.get(fn_copy_id).kind.unwrap_local().size()\n\n );\n\n fn_copy_id\n\n}\n\n\n", "file_path": "src/module/normalisation/heap_profiler.rs", "rank": 7, "score": 157097.71663918364 }, { "content": "/// Patches:\n\n/// * Add reduction counters and yielding to functions and ~hot loops~.\n\n/// * Add low level functions required by the Lunatic stdlib.\n\n/// * Transforming defined memories into imported (shared) ones.\n\npub fn patch(\n\n module_buffer: &[u8],\n\n is_profile: bool,\n\n is_normalisation_out: bool,\n\n) -> Result<((u32, Option<u32>), Vec<u8>), Error> {\n\n let mut module = Module::from_buffer(&module_buffer)?;\n\n\n\n reduction_counting::patch(&mut module);\n\n stdlib::patch(&mut module)?;\n\n if is_profile {\n\n heap_profiler::patch(&mut module);\n\n }\n\n let memory = shared_memory::patch(&mut module);\n\n let wasm = module.emit_wasm();\n\n\n\n if is_normalisation_out {\n\n let mut normalisation_out = File::create(\"normalisation.wasm\")?;\n\n normalisation_out.write_all(&wasm)?;\n\n }\n\n\n\n Ok((memory, wasm))\n\n}\n", "file_path": "src/module/normalisation/mod.rs", "rank": 8, "score": 139787.31761078717 }, { "content": "pub fn get_namespace(attributes: &AttributeArgs) -> Result<&LitStr, TokenStream> {\n\n for kv in attributes.iter() {\n\n match kv {\n\n NestedMeta::Meta(meta) => match meta {\n\n Meta::NameValue(name_value) => {\n\n let key = match name_value.path.segments.first() {\n\n Some(path_segment) => &path_segment.ident,\n\n None => return Err(namespace_error(kv)),\n\n };\n\n\n\n if key != \"namespace\" {\n\n continue;\n\n }\n\n\n\n let namespace = match &name_value.lit {\n\n Lit::Str(lit_str) => lit_str,\n\n _ => return Err(namespace_error(kv)),\n\n };\n\n\n\n return Ok(namespace);\n", "file_path": "uptown_funk/uptown_funk_macro/src/attribute.rs", "rank": 9, "score": 138057.84061544554 }, { "content": "pub fn platform_symlink<P: AsRef<Path>>(old_path: P, new_path: P) -> StatusResult {\n\n symlink(old_path, new_path)?;\n\n Status::Success.into()\n\n}\n", "file_path": "src/api/wasi/unix.rs", "rank": 10, "score": 137708.72303832386 }, { "content": "pub fn platform_symlink<P: AsRef<Path>>(old_path: P, new_path: P) -> StatusResult {\n\n if metadata(&old_path)?.is_dir() {\n\n symlink_dir(old_path, new_path)?\n\n } else {\n\n symlink_file(old_path, new_path)?\n\n };\n\n Status::Success.into()\n\n}\n", "file_path": "src/api/wasi/windows.rs", "rank": 11, "score": 137708.72303832386 }, { "content": "/// Return a configured Wasmtime engine.\n\npub fn engine() -> Engine {\n\n static mut ENGINE: Option<Engine> = None;\n\n static INIT: Once = Once::new();\n\n unsafe {\n\n INIT.call_once(|| {\n\n let mut config = Config::new();\n\n config.wasm_threads(true);\n\n config.wasm_simd(true);\n\n config.wasm_reference_types(true);\n\n config.static_memory_guard_size(8 * 1024 * 1024); // 8 Mb\n\n ENGINE = Some(Engine::new(&config).unwrap());\n\n });\n\n ENGINE.clone().unwrap()\n\n }\n\n}\n", "file_path": "src/linker/wasmtime.rs", "rank": 12, "score": 136571.73485495482 }, { "content": "pub fn platform_clock_res_get(clock_id: Clockid, mut res: Pointer<Timestamp>) -> Status {\n\n let resolution_val = match clock_id {\n\n // resolution of monotonic clock at 10ms, from:\n\n // https://docs.microsoft.com/en-us/windows/desktop/api/sysinfoapi/nf-sysinfoapi-gettickcount64\n\n Clockid::Realtime => 1,\n\n Clockid::Monotonic => 10_000_000,\n\n // TODO: verify or compute this\n\n Clockid::ProcessCpuTimeId => {\n\n return Status::Inval;\n\n }\n\n Clockid::ThreadCpuTimeId => {\n\n return Status::Inval;\n\n }\n\n Clockid::Unsupported => return Status::Inval,\n\n };\n\n res.set(resolution_val);\n\n Status::Success\n\n}\n\n\n", "file_path": "src/api/wasi/windows.rs", "rank": 13, "score": 132643.5935461711 }, { "content": "pub fn platform_clock_res_get(clock_id: Clockid, mut res: Pointer<Timestamp>) -> Status {\n\n let unix_clock_id = match clock_id {\n\n Clockid::Realtime => CLOCK_REALTIME,\n\n Clockid::Monotonic => CLOCK_MONOTONIC,\n\n Clockid::ProcessCpuTimeId => CLOCK_PROCESS_CPUTIME_ID,\n\n Clockid::ThreadCpuTimeId => CLOCK_THREAD_CPUTIME_ID,\n\n Clockid::Unsupported => return Status::Inval,\n\n };\n\n\n\n let (output, timespec_out) = unsafe {\n\n let mut timespec_out: timespec = timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n };\n\n (clock_getres(unix_clock_id, &mut timespec_out), timespec_out)\n\n };\n\n\n\n let t_out = (timespec_out.tv_sec * 1_000_000_000).wrapping_add(timespec_out.tv_nsec);\n\n res.set(t_out as Timestamp);\n\n\n\n errno_to_status(output)\n\n}\n\n\n", "file_path": "src/api/wasi/unix.rs", "rank": 14, "score": 132643.5935461711 }, { "content": "#[test]\n\nfn wasmtime_ref_str_test() {\n\n let store = wasmtime::Store::default();\n\n let wasm = read(\"tests/wasm/ref_str.wasm\")\n\n .expect(\"Wasm file not found. Did you run ./build.sh inside the tests/wasm/ folder?\");\n\n let module = wasmtime::Module::new(store.engine(), wasm).unwrap();\n\n let mut linker = wasmtime::Linker::new(&store);\n\n\n\n let memory_ty = wasmtime::MemoryType::new(wasmtime::Limits::new(32, None));\n\n let memory = wasmtime::Memory::new(&store, memory_ty);\n\n linker.define(\"env\", \"memory\", memory.clone()).unwrap();\n\n\n\n let empty = Empty {};\n\n let instance_state = SimpleExcutor {\n\n memory: Memory::from(memory),\n\n };\n\n Empty::add_to_linker(empty, instance_state, &mut linker);\n\n\n\n let instance = linker.instantiate(&module).unwrap();\n\n let test_count = instance.get_func(\"test_count\").unwrap().call(&[]);\n\n assert_eq!(test_count.is_ok(), true);\n\n\n\n let test_add = instance.get_func(\"test_add\").unwrap().call(&[]);\n\n assert_eq!(test_add.is_ok(), true);\n\n}\n", "file_path": "uptown_funk/tests/ref_str_test.rs", "rank": 15, "score": 128038.94877474224 }, { "content": "pub fn platform_clock_time_get(\n\n clock_id: Clockid,\n\n _precision: Timestamp,\n\n mut time: Pointer<Timestamp>,\n\n) -> StatusTrapResult {\n\n let unix_clock_id = match clock_id {\n\n Clockid::Realtime => CLOCK_REALTIME,\n\n Clockid::Monotonic => CLOCK_MONOTONIC,\n\n Clockid::ProcessCpuTimeId => CLOCK_PROCESS_CPUTIME_ID,\n\n Clockid::ThreadCpuTimeId => CLOCK_THREAD_CPUTIME_ID,\n\n Clockid::Unsupported => return Status::Inval.into(),\n\n };\n\n\n\n let (output, timespec_out) = unsafe {\n\n let mut timespec_out: timespec = timespec {\n\n tv_sec: 0,\n\n tv_nsec: 0,\n\n };\n\n (\n\n clock_gettime(unix_clock_id, &mut timespec_out),\n\n timespec_out,\n\n )\n\n };\n\n\n\n let t_out = (timespec_out.tv_sec * 1_000_000_000).wrapping_add(timespec_out.tv_nsec);\n\n time.set(t_out as Timestamp);\n\n\n\n errno_to_status(output).into()\n\n}\n\n\n", "file_path": "src/api/wasi/unix.rs", "rank": 16, "score": 127878.34463568928 }, { "content": "pub fn platform_clock_time_get(\n\n clock_id: Clockid,\n\n _precision: Timestamp,\n\n mut time: Pointer<Timestamp>,\n\n) -> StatusTrapResult {\n\n let nanos =\n\n match clock_id {\n\n Clockid::Realtime => {\n\n let duration = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .map_err(|_| Status::Io)?;\n\n duration.as_nanos() as u64\n\n }\n\n Clockid::Monotonic => {\n\n let tick_ms = unsafe { GetTickCount64() };\n\n tick_ms * 1_000_000\n\n }\n\n\n\n Clockid::ProcessCpuTimeId => return Err(Trap::new(\n\n \"wasi::api::platform_clock_time_get(Clockid::ProcessCpuTimeId, ..) not implemented\",\n", "file_path": "src/api/wasi/windows.rs", "rank": 17, "score": 127878.34463568928 }, { "content": "fn patch_function(\n\n function: &mut LocalFunction,\n\n counter: GlobalId,\n\n yield_func: FunctionId,\n\n imported_functions: &Vec<FunctionId>,\n\n) {\n\n let mut insertion_points = Vec::new();\n\n\n\n // Insert reduction counter at the top of every function\n\n let start = function.entry_block();\n\n insertion_points.push(start);\n\n\n\n // Check if there are tight loops\n\n let instr_seq = function.block(start);\n\n for (instr, _) in &instr_seq.instrs {\n\n match instr {\n\n ir::Instr::Loop(loop_) => {\n\n patch_sequence(\n\n true,\n\n loop_.seq,\n", "file_path": "src/module/normalisation/reduction_counting.rs", "rank": 18, "score": 116372.33618730454 }, { "content": "/// Check if the type implementing uptown_funk::HostFunctions is allowed to be captured as Wasm\n\n/// instance state. (Only simple paths are currently supported, e.g. `Networking`, `Porcesses`).\n\npub fn check(state_type: &Type) -> Result<&Ident, TokenStream> {\n\n match state_type {\n\n Type::Path(type_path) => match type_path.path.get_ident() {\n\n Some(ident) => Ok(ident),\n\n None => Err(quote_spanned! {\n\n state_type.span() =>\n\n compile_error!(\"Unsupported type path for `#[uptown_funk::host_functions]` state\");\n\n }\n\n .into()),\n\n },\n\n _ => Err(quote_spanned! {\n\n state_type.span() =>\n\n compile_error!(\"Unsupported type for `#[uptown_funk::host_functions]` state\");\n\n }\n\n .into()),\n\n }\n\n}\n", "file_path": "uptown_funk/uptown_funk_macro/src/state_type.rs", "rank": 19, "score": 116352.22958938338 }, { "content": "pub fn get_sync(attributes: &AttributeArgs) -> Result<SyncType, TokenStream> {\n\n for kv in attributes.iter() {\n\n match kv {\n\n NestedMeta::Meta(meta) => match meta {\n\n Meta::NameValue(name_value) => {\n\n let key = match name_value.path.segments.first() {\n\n Some(path_segment) => &path_segment.ident,\n\n None => return Err(sync_error(kv)),\n\n };\n\n\n\n if key != \"sync\" {\n\n continue;\n\n }\n\n\n\n let namespace = match &name_value.lit {\n\n Lit::Str(lit_str) => match lit_str.value().as_str() {\n\n \"none\" => SyncType::None,\n\n \"mutex\" => SyncType::Mutex,\n\n _ => return Err(sync_error(kv)),\n\n },\n", "file_path": "uptown_funk/uptown_funk_macro/src/attribute.rs", "rank": 20, "score": 114617.42266431791 }, { "content": "fn main() -> Result<()> {\n\n env_logger::init();\n\n run()\n\n}\n", "file_path": "src/main.rs", "rank": 21, "score": 114190.2675568035 }, { "content": "/// Provides access to the instance execution environment.\n\npub trait Executor {\n\n /// Execute `Future` f.\n\n #[cfg(feature = \"async\")]\n\n fn async_<R, F>(&self, f: F) -> R\n\n where\n\n F: std::future::Future<Output = R>;\n\n\n\n /// Get mutable access to the instance memory.\n\n fn memory(&self) -> memory::Memory;\n\n}\n\n\n", "file_path": "uptown_funk/src/lib.rs", "rank": 22, "score": 113652.66188399626 }, { "content": "/// Takes a `signature` and returns a tuple of:\n\n/// * Input signature of the wasm guest function.\n\n/// * Return signature of the wasm guest function.\n\n/// * Transformation steps from wasm guest arguments to host arguments.\n\n/// * Signature of the host function.\n\n/// * Transformation step from host return values to wasm guest returns.\n\npub fn transform(sync: SyncType, signature: &Signature) -> Result<Transform, TokenStream> {\n\n let mut input_arguments = signature.inputs.iter();\n\n // First element must match exactly `&self or &mut self`\n\n match input_arguments.next() {\n\n Some(FnArg::Receiver(receiver)) => {\n\n if receiver.reference.is_none() {\n\n return Err(self_error(receiver));\n\n }\n\n }\n\n None | Some(FnArg::Typed(_)) => return Err(self_error(signature)),\n\n };\n\n\n\n // Transform other input argumetns\n\n let mut guest_signature_input = Vec::new();\n\n let mut from_guest_input_transformations = Vec::new();\n\n let mut host_call_signature = Vec::new();\n\n\n\n for input_argument in input_arguments {\n\n match input_argument {\n\n FnArg::Typed(pat_type) => match inputs::transform(sync, pat_type) {\n", "file_path": "uptown_funk/uptown_funk_macro/src/signature/mod.rs", "rank": 23, "score": 107802.41004727525 }, { "content": "pub fn bench_channel() {\n\n println!(\n\n \"Send 0 bytes: {}\",\n\n bench_env(channel::unbounded(), |(sender, _r)| sender\n\n .send(())\n\n .unwrap())\n\n );\n\n let (payload, _r): (channel::Sender<()>, _) = channel::bounded(1);\n\n println!(\n\n \"Send 4 bytes + Sender: {}\",\n\n bench_env(channel::unbounded(), |(sender, _r)| sender\n\n .send((1337u32, payload.clone()))\n\n .unwrap())\n\n );\n\n println!(\n\n \"Send 8 bytes: {}\",\n\n bench_env(channel::unbounded(), |(sender, _r)| sender\n\n .send(1337u64)\n\n .unwrap())\n\n );\n", "file_path": "benches/guest/rust/src/channel.rs", "rank": 24, "score": 102942.90877622101 }, { "content": "/// Takes the input arguments part of the host function's signature and returns wrappers around higher\n\n/// level types to make them compatible with WASM guest functions, according to WASI conventions.\n\n///\n\n/// There are 3 parts to this transformation (the return values):\n\n/// 1. The input arguments of the WASM guest function.\n\n/// 2. Code that maps the WASM guest input arguments to the provided host function arguments.\n\n/// 3. List of arguments passed to the host function.\n\n///\n\n/// The following rules are followed when doing the transformation:\n\n/// 1. **i32, i64, f32 and f64** (WASM guest compatible types) are just forwarded to the host function.\n\n/// 2. **&str** is split on the guest in two arguments, a pointer to the string and its length.\n\n/// 3. **&mut [u8]** is split on the guest in two arguments, a pointer to the u8 slice and its length.\n\n/// 4. **&[std::io::IoSlice<'_>]** is split on the guest in two arguments, a pointer to a slice containing WASI\n\n/// ciovec structs and its length.\n\n/// 5. **&mut [IoSliceMut<'_>]** is split on the guest in two arguments, a pointer to a slice containing WASI\n\n/// iovec structs and its length.\n\n/// 6. **Custom types** need to implement uptown_funk::FromWasm.\n\n/// 7. All other patterns will result in a compilation error.\n\npub fn transform(\n\n sync: SyncType,\n\n pat_type: &PatType,\n\n) -> Result<(TokenStream2, TokenStream2, TokenStream2), TokenStream> {\n\n let argument_name = match &*pat_type.pat {\n\n Pat::Ident(pat_ident) => {\n\n if pat_ident.by_ref.is_some() {\n\n return Err(arg_error(&pat_type.pat));\n\n };\n\n &pat_ident.ident\n\n }\n\n _ => return Err(arg_error(&pat_type.pat)),\n\n };\n\n\n\n let argument_transformation = match &*pat_type.ty {\n\n Type::Path(type_path) => transform_path(&type_path.path),\n\n Type::Reference(type_ref) => transform_reference(&type_ref),\n\n _ => return Err(arg_error(&pat_type.ty)),\n\n };\n\n\n", "file_path": "uptown_funk/uptown_funk_macro/src/signature/inputs.rs", "rank": 25, "score": 99289.34534650735 }, { "content": "pub fn wrap(\n\n namespace: &LitStr,\n\n sync: SyncType,\n\n method: &ImplItemMethod,\n\n) -> Result<TokenStream2, TokenStream> {\n\n let signature = &method.sig;\n\n let method_name = &signature.ident;\n\n let method_name_as_str = LitStr::new(&method_name.to_string(), method_name.span());\n\n\n\n // If it's an async function wrap it in an async block.\n\n let maybe_async = match signature.asyncness {\n\n Some(_) => quote! { cloned_executor.async_ },\n\n None => quote! { std::convert::identity },\n\n };\n\n\n\n let Transform {\n\n input_sig,\n\n output_sig,\n\n input_trans,\n\n call_args,\n", "file_path": "uptown_funk/uptown_funk_macro/src/wasmtime_method.rs", "rank": 26, "score": 99276.88393654994 }, { "content": "pub fn transform(\n\n sync: SyncType,\n\n return_type: &Type,\n\n) -> Result<\n\n (\n\n TokenStream2,\n\n TokenStream2,\n\n TokenStream2,\n\n TokenStream2,\n\n TokenStream2,\n\n ),\n\n TokenStream,\n\n> {\n\n match return_type {\n\n Type::Path(type_path) => {\n\n let (return_argument, host_to_guest_transformation) = first_output(sync, type_path)?;\n\n Ok((\n\n quote! {},\n\n return_argument,\n\n quote! {},\n", "file_path": "uptown_funk/uptown_funk_macro/src/signature/outputs.rs", "rank": 27, "score": 99276.88393654994 }, { "content": "pub fn bench_host_calls() {\n\n println!(\"Call yield_: {}\", bench(|| yield_()));\n\n // no-op\n\n println!(\n\n \"Call wasi_snapshot_preview1::proc_raise: {}\",\n\n bench(|| unsafe { host::proc_raise(0) })\n\n );\n\n}\n", "file_path": "benches/guest/rust/src/host_calls.rs", "rank": 28, "score": 99276.88393654994 }, { "content": "use crate::{api::default::DefaultApi, linker::wasmtime_engine, module::LunaticModule};\n\nuse uptown_funk::{Executor, FromWasm, HostFunctions, ToWasm};\n\n\n\nuse super::api::ProcessState;\n\n\n\nimpl FromWasm<&mut ProcessState> for LunaticModule {\n\n type From = u32;\n\n\n\n fn from(\n\n state: &mut ProcessState,\n\n _: &impl Executor,\n\n module_id: u32,\n\n ) -> Result<Self, uptown_funk::Trap>\n\n where\n\n Self: Sized,\n\n {\n\n match state.modules.get(module_id) {\n\n Some(module) => Ok(module.clone()),\n\n None => Err(uptown_funk::Trap::new(\"LunaticModule not found\")),\n\n }\n", "file_path": "src/api/process/module_linking.rs", "rank": 29, "score": 98465.95212012784 }, { "content": " }\n\n}\n\n\n\npub enum LunaticModuleResult {\n\n Ok(LunaticModule),\n\n Err(String),\n\n}\n\n\n\nimpl ToWasm<&mut ProcessState> for LunaticModuleResult {\n\n type To = u32;\n\n\n\n fn to(\n\n state: &mut ProcessState,\n\n _: &impl Executor,\n\n result: Self,\n\n ) -> Result<u32, uptown_funk::Trap> {\n\n match result {\n\n LunaticModuleResult::Ok(listener) => Ok(state.modules.add(listener)),\n\n LunaticModuleResult::Err(_err) => Ok(0),\n\n }\n", "file_path": "src/api/process/module_linking.rs", "rank": 30, "score": 98456.89913685436 }, { "content": " }\n\n}\n\n\n\npub struct Import(pub String, pub LunaticModule);\n\n\n\nimpl ToWasm<&mut ProcessState> for Import {\n\n type To = u32;\n\n\n\n fn to(\n\n state: &mut ProcessState,\n\n _: &impl Executor,\n\n import: Self,\n\n ) -> Result<u32, uptown_funk::Trap> {\n\n Ok(state.imports.add(import))\n\n }\n\n}\n\n\n\npub struct Imports<'a>(pub Vec<Option<&'a Import>>);\n\n\n\nimpl<'a> HostFunctions for Imports<'a> {\n", "file_path": "src/api/process/module_linking.rs", "rank": 31, "score": 98456.09975586331 }, { "content": " type Return = ();\n\n type Wrap = Self;\n\n\n\n fn split(self) -> (Self::Return, Self::Wrap) {\n\n ((), self)\n\n }\n\n\n\n fn add_to_linker<E>(imports: Self, executor: E, linker: &mut wasmtime::Linker)\n\n where\n\n E: Executor + Clone + 'static,\n\n {\n\n // Allow overriding default imports\n\n linker.allow_shadowing(true);\n\n\n\n // For each import create a separate instance that will be used as an import namespace.\n\n for import in imports.0 {\n\n match import {\n\n Some(import) => {\n\n let engine = wasmtime_engine();\n\n let store = wasmtime::Store::new(&engine);\n", "file_path": "src/api/process/module_linking.rs", "rank": 32, "score": 98449.94954965023 }, { "content": " let mut parent_linker = wasmtime::Linker::new(&store);\n\n let default_api = DefaultApi::new(None, import.1.clone());\n\n DefaultApi::add_to_linker(default_api, executor.clone(), &mut parent_linker);\n\n let instance = parent_linker\n\n .instantiate(import.1.module().wasmtime().unwrap())\n\n .unwrap();\n\n linker.instance(&import.0, &instance).unwrap();\n\n }\n\n None => (),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/api/process/module_linking.rs", "rank": 33, "score": 98449.74350160273 }, { "content": "#[proc_macro_attribute]\n\npub fn host_functions(attr: TokenStream, item: TokenStream) -> TokenStream {\n\n // Figure out namespace from attribute string\n\n let attribute = parse_macro_input!(attr as AttributeArgs);\n\n let namespace = match attribute::get_namespace(&attribute) {\n\n Ok(namespace) => namespace,\n\n Err(error) => return error,\n\n };\n\n\n\n let sync = match attribute::get_sync(&attribute) {\n\n Ok(sync) => sync,\n\n Err(error) => return error,\n\n };\n\n\n\n // Check if type is compatible with the state\n\n let implementation = parse_macro_input!(item as ItemImpl);\n\n let self_ty = match state_type::check(&implementation.self_ty) {\n\n Ok(ident) => ident,\n\n Err(error) => return error,\n\n };\n\n\n", "file_path": "uptown_funk/uptown_funk_macro/src/lib.rs", "rank": 34, "score": 98036.35265358556 }, { "content": "fn run_test(input: &str) -> Vec<u8> {\n\n let wasm = wat::parse_str(input).unwrap();\n\n patch(&wasm, true, false).unwrap().1\n\n}\n", "file_path": "tests/normalisation_patching_test.rs", "rank": 35, "score": 92110.56974717673 }, { "content": "#[derive(Debug)]\n\nstruct FileDesc {\n\n pub file: File,\n\n pub path: PathBuf,\n\n}\n\n\n\nimpl FileDesc {\n\n fn open<P: AsRef<Path>>(path: P) -> Result<Self, Status> {\n\n let file = File::open(&path)?;\n\n let path = PathBuf::from(path.as_ref());\n\n Ok(Self { file, path })\n\n }\n\n\n\n fn open_with_flags<P: AsRef<Path>>(path: P, flags: OpenFlags) -> Result<Self, Status> {\n\n let file = OpenOptions::new()\n\n .read(true)\n\n .write(flags.create())\n\n .create(flags.create())\n\n .truncate(flags.truncate())\n\n .open(&path)?;\n\n let path = PathBuf::from(path.as_ref());\n\n Ok(Self { file, path })\n\n }\n\n}\n", "file_path": "src/api/wasi/state.rs", "rank": 36, "score": 89714.04421642717 }, { "content": "pub trait HasOk {\n\n fn ok() -> Self;\n\n}\n\n\n\nimpl<S, T: ToWasm<S> + HasOk> ToWasm<S> for Result<(), T> {\n\n type To = T::To;\n\n\n\n fn to(state: S, executor: &impl crate::Executor, host_value: Self) -> Result<Self::To, Trap> {\n\n match host_value {\n\n Ok(_) => T::to(state, executor, T::ok()),\n\n Err(e) => T::to(state, executor, e),\n\n }\n\n }\n\n}\n", "file_path": "uptown_funk/src/types/result.rs", "rank": 37, "score": 86180.78929340141 }, { "content": "// Recursively clone instructions from a local function.\n\nfn clone_rec(\n\n fn_loc: &LocalFunction,\n\n instrs: &ir::InstrSeq,\n\n instrs_clone: &mut InstrSeqBuilder,\n\n // TODO use Rc<RefCell<HashMap<..>>> to avoid cloning in ifElse block\n\n jmp_ids: &mut HashMap<ir::InstrSeqId, ir::InstrSeqId>,\n\n) {\n\n jmp_ids.insert(instrs.id(), instrs_clone.id());\n\n instrs.instrs.iter().for_each(|(i, _)| match i {\n\n ir::Instr::Block(block) => {\n\n let block_instrs = fn_loc.block(block.seq);\n\n instrs_clone.block(block_instrs.ty, |block_clone| {\n\n clone_rec(fn_loc, block_instrs, block_clone, jmp_ids);\n\n });\n\n }\n\n ir::Instr::IfElse(if_else) => {\n\n let consequent_instrs = fn_loc.block(if_else.consequent);\n\n let jmp_ids_clone = &mut jmp_ids.clone();\n\n instrs_clone.if_else(\n\n consequent_instrs.ty,\n", "file_path": "src/module/normalisation/heap_profiler.rs", "rank": 38, "score": 84333.0052051906 }, { "content": "// Mark insertion points for reduction counter in loops that:\n\n// * don't contain any other loops\n\n// * don't contain calls to local functions\n\n//\n\n// Returns true if an insertion occurred in this block or any children, otherwise false.\n\nfn patch_sequence(\n\n insert: bool,\n\n seq_id: ir::InstrSeqId,\n\n function: &LocalFunction,\n\n insertion_points: &mut Vec<ir::InstrSeqId>,\n\n imported_functions: &Vec<FunctionId>,\n\n) -> bool {\n\n let mut child_inserts = false;\n\n let mut insert_reduction_counter = insert;\n\n let instr_seq = function.block(seq_id);\n\n\n\n for (instr, _) in &instr_seq.instrs {\n\n match instr {\n\n ir::Instr::Loop(loop_) => {\n\n patch_sequence(\n\n true,\n\n loop_.seq,\n\n function,\n\n insertion_points,\n\n imported_functions,\n", "file_path": "src/module/normalisation/reduction_counting.rs", "rank": 39, "score": 84332.25755666295 }, { "content": "fn align_pointer(ptr: usize, align: usize) -> usize {\n\n // clears bits below aligment amount (assumes power of 2) to align pointer\n\n ptr & !(align - 1)\n\n}\n\n\n", "file_path": "uptown_funk/src/types/pointers.rs", "rank": 40, "score": 83752.94168720365 }, { "content": "#[test]\n\nfn merge_profiles() {\n\n // TODO use quickcheck crate\n\n fn random_memory() -> HashMap<Ptr, Size> {\n\n let len = rand::random::<usize>() % (HISTORY_CAPACITY + 1);\n\n let mut r = HashMap::with_capacity(len);\n\n for _ in 0..len {\n\n r.insert(rand::random(), rand::random());\n\n }\n\n r\n\n }\n\n fn random_history() -> VecDeque<(i32, Duration)> {\n\n let len = rand::random::<usize>() % (HISTORY_CAPACITY + 1);\n\n let mut r = VecDeque::with_capacity(len);\n\n for _ in 0..len {\n\n r.push_back((rand::random(), Duration::from_millis(rand::random())));\n\n }\n\n // we assume HeapProfilerHistory will keep heap_history sorted\n\n // this assumption will break if user messes with system time\n\n r.make_contiguous().sort_unstable_by_key(|&(_, d)| d);\n\n r\n", "file_path": "src/api/heap_profiler.rs", "rank": 41, "score": 83404.33728441075 }, { "content": "// Find all .wat files recursively in a given folder.\n\nfn find_tests(path: &Path, tests: &mut Vec<PathBuf>) {\n\n for f in path.read_dir().unwrap() {\n\n let f = f.unwrap();\n\n if f.file_type().unwrap().is_dir() {\n\n find_tests(&f.path(), tests);\n\n continue;\n\n }\n\n match f.path().extension().and_then(|s| s.to_str()) {\n\n Some(\"wat\") => {}\n\n _ => continue,\n\n }\n\n tests.push(f.path());\n\n }\n\n}\n\n\n", "file_path": "tests/normalisation_patching_test.rs", "rank": 42, "score": 82498.93291877776 }, { "content": "// Algorithm:\n\n// 1. Increment the reduction counter global\n\n// 2. Check if the global reached REDUCTION_LIMIT, if yes yield and reset reduction counter\n\nfn insert_reduction_counter(\n\n block: &mut InstrSeqBuilder,\n\n counter: GlobalId,\n\n yield_func: FunctionId,\n\n) {\n\n block\n\n .global_get(counter)\n\n .i32_const(1)\n\n .binop(ir::BinaryOp::I32Add)\n\n .global_set(counter)\n\n .global_get(counter)\n\n .i32_const(REDUCTION_LIMIT)\n\n .binop(ir::BinaryOp::I32GtS)\n\n .if_else(\n\n None,\n\n |then| {\n\n then.call(yield_func).i32_const(0).global_set(counter);\n\n },\n\n |_else| {},\n\n );\n\n}\n", "file_path": "src/module/normalisation/reduction_counting.rs", "rank": 43, "score": 82313.42091996469 }, { "content": "pub trait HostFunctions: Sized {\n\n type Return;\n\n type Wrap;\n\n\n\n fn split(self) -> (Self::Return, Self::Wrap);\n\n\n\n fn add_to_linker<E>(api: Self::Wrap, executor: E, linker: &mut wasmtime::Linker)\n\n where\n\n E: Executor + Clone + 'static;\n\n}\n\n\n\npub struct Trap {\n\n message: String,\n\n}\n\n\n\nimpl Debug for Trap {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n Debug::fmt(&self.message, f)\n\n }\n\n}\n", "file_path": "uptown_funk/src/lib.rs", "rank": 44, "score": 82295.60479744285 }, { "content": "type OptionTrap = Result<u32, uptown_funk::Trap>;\n\n\n\nuse super::resolver::Resolver;\n\nuse super::tcp::{TcpListener, TcpStream};\n\nuse uptown_funk::state::HashMapStore;\n\n\n\nuse crate::api::channel::api::ChannelState;\n\n\n\npub struct TcpState {\n\n channel_state: ChannelState,\n\n pub resolvers: HashMapStore<Resolver>,\n\n pub listeners: HashMapStore<TcpListener>,\n\n pub streams: HashMapStore<TcpStream>,\n\n}\n\n\n\nimpl TcpState {\n\n pub fn new(channel_state: ChannelState) -> Self {\n\n Self {\n\n channel_state,\n\n resolvers: HashMapStore::new(),\n", "file_path": "src/api/networking/api.rs", "rank": 45, "score": 81532.16952758488 }, { "content": "/// Returns a number corresponding to the `ErrorKind` of an `io::Error`. Always returns a number greater\n\n/// than or equal to 1 (0 is used to indicate that the operation was successful.)\n\nfn int_of_io_error(e: &io::Error) -> u32 {\n\n match e.kind() {\n\n io::ErrorKind::NotFound => 1,\n\n io::ErrorKind::PermissionDenied => 2,\n\n io::ErrorKind::ConnectionRefused => 3,\n\n io::ErrorKind::ConnectionReset => 4,\n\n io::ErrorKind::ConnectionAborted => 5,\n\n io::ErrorKind::NotConnected => 6,\n\n io::ErrorKind::AddrInUse => 7,\n\n io::ErrorKind::AddrNotAvailable => 8,\n\n io::ErrorKind::BrokenPipe => 9,\n\n io::ErrorKind::AlreadyExists => 10,\n\n io::ErrorKind::WouldBlock => 11,\n\n io::ErrorKind::InvalidInput => 12,\n\n io::ErrorKind::InvalidData => 13,\n\n io::ErrorKind::TimedOut => 14,\n\n io::ErrorKind::WriteZero => 15,\n\n io::ErrorKind::Interrupted => 16,\n\n io::ErrorKind::UnexpectedEof => 17,\n\n io::ErrorKind::Unsupported => 18,\n\n io::ErrorKind::OutOfMemory => 19,\n\n io::ErrorKind::Other => 20,\n\n _ => 99,\n\n }\n\n}\n", "file_path": "src/api/networking/api.rs", "rank": 46, "score": 78440.30167760278 }, { "content": "use crate::{\n\n api::channel::{api::ChannelState, ChannelReceiver, Message},\n\n api::heap_profiler,\n\n module::LunaticModule,\n\n};\n\n\n\nuse super::{FunctionLookup, MemoryChoice, Process};\n\n\n\nuse anyhow::Result;\n\nuse smol::{channel::bounded, future::yield_now, Timer};\n\nuse uptown_funk::{host_functions, state::HashMapStore, HostFunctions};\n\n\n\nuse std::{\n\n mem::replace,\n\n time::{Duration, Instant},\n\n};\n\n\n\npub struct ProcessState {\n\n module: LunaticModule,\n\n channel_state: ChannelState,\n", "file_path": "src/api/process/api.rs", "rank": 56, "score": 74976.28904336621 }, { "content": " match process.task().await {\n\n Ok(_) => 0,\n\n Err(_) => 1,\n\n }\n\n }\n\n\n\n // Drops the Task and cancels the process\n\n //\n\n // It's currently not safe to cancel a process in Lunatic.\n\n // All processes are executed on a separate stack, but if we cancel it the stack memory\n\n // will be freed without actually unwinding it. This means that values and references\n\n // living on the separate stack will never be freed.\n\n async fn cancel_process(&self, _process: Process) {\n\n // _process will take ownership here of the underlying task and drop it.\n\n // See: https://docs.rs/smol/latest/smol/struct.Task.html\n\n }\n\n\n\n // Detaches process\n\n async fn detach_process(&self, process: Process) {\n\n process.task().detach()\n\n }\n\n}\n", "file_path": "src/api/process/api.rs", "rank": 57, "score": 74966.0238260694 }, { "content": " pub processes: HashMapStore<Process>,\n\n profiler: <heap_profiler::HeapProfilerState as HostFunctions>::Wrap,\n\n}\n\n\n\nimpl ProcessState {\n\n pub fn new(\n\n module: LunaticModule,\n\n channel_state: ChannelState,\n\n profiler: <heap_profiler::HeapProfilerState as HostFunctions>::Wrap,\n\n ) -> Self {\n\n Self {\n\n module,\n\n channel_state,\n\n processes: HashMapStore::new(),\n\n profiler,\n\n }\n\n }\n\n}\n\n\n\n#[host_functions(namespace = \"lunatic\")]\n", "file_path": "src/api/process/api.rs", "rank": 58, "score": 74965.05976927471 }, { "content": " .inner\n\n .borrow_mut()\n\n .next_message_host_resources;\n\n let host_resources = replace(host_resources, Vec::new());\n\n let message = Message::new(context.as_ptr(), context.len(), host_resources);\n\n let _ignore = sender.send(message).await;\n\n\n\n let future = Process::create(\n\n Some(ChannelReceiver::from(receiver)),\n\n self.module.clone(),\n\n FunctionLookup::TableIndex(index),\n\n MemoryChoice::New(None),\n\n self.profiler.clone(),\n\n );\n\n Process::spawn(future)\n\n }\n\n\n\n // Wait on child process to finish.\n\n // Returns 0 if process didn't trap, otherwise 1\n\n async fn join(&self, process: Process) -> u32 {\n", "file_path": "src/api/process/api.rs", "rank": 59, "score": 74961.85777003344 }, { "content": "impl ProcessState {\n\n // Yield this process allowing other to be scheduled on same thread.\n\n async fn yield_(&self) {\n\n yield_now().await\n\n }\n\n\n\n // Suspend process for `millis`.\n\n async fn sleep_ms(&self, millis: u64) {\n\n let now = Instant::now();\n\n let when = now + Duration::from_millis(millis);\n\n Timer::at(when).await;\n\n }\n\n\n\n // Spawn a new process with a context and call a function from the function table by `index`.\n\n //\n\n // Once the process is created the context will be passed through a Channel::Receiver to it.\n\n async fn spawn_with_context(&self, index: u32, context: &[u8]) -> Process {\n\n let (sender, receiver) = bounded(1);\n\n let host_resources = &mut self\n\n .channel_state\n", "file_path": "src/api/process/api.rs", "rank": 60, "score": 74957.25619922642 }, { "content": "pub trait ToWasm<State> {\n\n type To;\n\n\n\n fn to(state: State, executor: &impl Executor, host_value: Self) -> Result<Self::To, Trap>;\n\n}\n\n\n", "file_path": "uptown_funk/src/types/mod.rs", "rank": 61, "score": 74205.86429543507 }, { "content": "pub trait FromWasm<State> {\n\n type From;\n\n\n\n fn from(state: State, executor: &impl Executor, from: Self::From) -> Result<Self, Trap>\n\n where\n\n Self: Sized;\n\n}\n\n\n", "file_path": "uptown_funk/src/types/mod.rs", "rank": 62, "score": 74205.86429543507 }, { "content": "fn to_clockid(num: u32) -> Clockid {\n\n match num {\n\n 0 => Clockid::Realtime,\n\n 1 => Clockid::Monotonic,\n\n 2 => Clockid::ProcessCpuTimeId,\n\n 3 => Clockid::ThreadCpuTimeId,\n\n _ => Clockid::Unsupported,\n\n }\n\n}\n\n\n\nimpl CReprWasmType for Clockid {}\n\n\n\nimpl<S> FromWasm<S> for Clockid {\n\n type From = u32;\n\n\n\n fn from(_: S, _: &impl Executor, from: u32) -> Result<Self, Trap> {\n\n Ok(to_clockid(from))\n\n }\n\n}\n", "file_path": "src/api/wasi/types/clock.rs", "rank": 63, "score": 72452.19926525051 }, { "content": "fn errno_to_status(err: i32) -> Status {\n\n // TODO: map errno from clock_getres to types::Status\n\n match err {\n\n 0 => Status::Success,\n\n _ => Status::Inval,\n\n }\n\n}\n\n\n", "file_path": "src/api/wasi/unix.rs", "rank": 64, "score": 72452.19926525051 }, { "content": "use walrus::*;\n\n\n\n/// Finds memory with the index 0 and turns it into an import.\n\n/// Returns the initial and maximum memory sizes.\n", "file_path": "src/module/normalisation/shared_memory.rs", "rank": 65, "score": 68159.14156143738 }, { "content": "use async_wormhole::AsyncYielder;\n\nuse uptown_funk::memory::Memory;\n\n\n\nuse std::future::Future;\n\nuse std::mem::ManuallyDrop;\n\n\n\nuse crate::module::Runtime;\n\n\n\n/// This structure is captured inside HOST function closures passed to Wasmtime's Linker.\n\n/// It allows us to expose Lunatic runtime functionalities inside host functions, like\n\n/// async yields or Instance memory access.\n\n///\n\n/// ### Safety\n\n///\n\n/// Having a mutable slice of Wasmtime's memory is generally unsafe, but Lunatic always uses\n\n/// static memories and one memory per instance. This makes it somewhat safe?\n\npub struct ProcessEnvironment {\n\n memory: Memory,\n\n yielder: usize,\n\n runtime: Runtime,\n", "file_path": "src/api/process/env.rs", "rank": 66, "score": 67014.66602365048 }, { "content": "impl Clone for ProcessEnvironment {\n\n fn clone(&self) -> Self {\n\n match self.runtime {\n\n Runtime::Wasmtime => Self {\n\n memory: unsafe { std::ptr::read(&self.memory as *const Memory) },\n\n yielder: self.yielder,\n\n runtime: self.runtime,\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl ProcessEnvironment {\n\n pub fn new(memory: Memory, yielder: usize, runtime: Runtime) -> Self {\n\n Self {\n\n memory,\n\n runtime,\n\n yielder,\n\n }\n\n }\n\n}\n", "file_path": "src/api/process/env.rs", "rank": 67, "score": 67012.67808976106 }, { "content": "}\n\n\n\nimpl uptown_funk::Executor for ProcessEnvironment {\n\n #[inline(always)]\n\n fn async_<R, F>(&self, f: F) -> R\n\n where\n\n F: Future<Output = R>,\n\n {\n\n // The yielder should not be dropped until this process is done running.\n\n let mut yielder = unsafe {\n\n std::ptr::read(self.yielder as *const ManuallyDrop<AsyncYielder<anyhow::Result<()>>>)\n\n };\n\n yielder.async_suspend(f)\n\n }\n\n\n\n fn memory(&self) -> Memory {\n\n self.memory.clone()\n\n }\n\n}\n\n\n", "file_path": "src/api/process/env.rs", "rank": 68, "score": 67005.56505700904 }, { "content": "// Because of a bug in Wasmtime: https://github.com/bytecodealliance/wasmtime/issues/2583\n\n// we need to duplicate the Memory in the Linker before storing it in ProcessEnvironment,\n\n// to not increase the reference count.\n\n// When we are droping the memory we need to make sure we forget the value to not decrease\n\n// the reference count.\n\n// Safety: The ProcessEnvironment has the same lifetime as Memory, so it should be safe to\n\n// do this.\n\nimpl Drop for ProcessEnvironment {\n\n fn drop(&mut self) {\n\n match self.runtime {\n\n Runtime::Wasmtime => {\n\n let memory = std::mem::replace(&mut self.memory, Memory::Empty);\n\n std::mem::forget(memory)\n\n }\n\n }\n\n }\n\n}\n\n\n\n// For the same reason mentioned on the Drop trait we can't increase the reference count\n\n// on the Memory when cloning.\n", "file_path": "src/api/process/env.rs", "rank": 69, "score": 67003.99205079094 }, { "content": "pub mod api;\n\nmod env;\n\nmod err;\n\nmod process;\n\nmod tls;\n\n\n\npub use env::*;\n\npub use process::*;\n", "file_path": "src/api/process/mod.rs", "rank": 70, "score": 66999.15507424778 }, { "content": "pub struct CallThreadStateSaveWasmtime {\n\n saved: Option<wasmtime_runtime::TlsRestore>,\n\n init: bool,\n\n}\n\n\n\nimpl CallThreadStateSaveWasmtime {\n\n pub fn new() -> Self {\n\n Self {\n\n saved: None,\n\n init: false,\n\n }\n\n }\n\n\n\n pub fn swap(&mut self) {\n\n // On first poll there is nothing to preserve yet.\n\n if self.init {\n\n unsafe {\n\n if let Some(tls) = self.saved.take() {\n\n tls.replace()\n\n .expect(\"wasmtime_runtime::sys::lazy_per_thread_init() failed\");\n", "file_path": "src/api/process/tls.rs", "rank": 71, "score": 66994.60692385888 }, { "content": " } else {\n\n self.saved = Some(wasmtime_runtime::TlsRestore::take());\n\n }\n\n }\n\n } else {\n\n self.init = true;\n\n }\n\n }\n\n}\n\n\n\nunsafe impl Send for CallThreadStateSaveWasmtime {}\n", "file_path": "src/api/process/tls.rs", "rank": 72, "score": 66993.1747167311 }, { "content": "pub struct Error<T> {\n\n pub error: anyhow::Error,\n\n pub value: Option<T>,\n\n}\n\n\n\nimpl<T, E: Into<anyhow::Error>> From<E> for Error<T> {\n\n fn from(error: E) -> Self {\n\n Self {\n\n error: error.into(),\n\n value: None,\n\n }\n\n }\n\n}\n", "file_path": "src/api/process/err.rs", "rank": 73, "score": 66989.85909223904 }, { "content": "use uptown_funk::{host_functions, memory::Memory, HostFunctions};\n\nuse wasmtime;\n\n\n\nuse std::fs::read;\n\n\n\nmod common;\n\nuse common::*;\n\n\n\n#[host_functions(namespace = \"env\")]\n\nimpl Empty {\n\n fn count_a(&self, words: &str) -> i32 {\n\n words.matches(\"a\").count() as i32\n\n }\n\n\n\n fn add(&self, a: &str, b: &str, c: &mut [u8]) {\n\n c[..a.len()].copy_from_slice(a.as_bytes());\n\n c[a.len()..].copy_from_slice(b.as_bytes());\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "uptown_funk/tests/ref_str_test.rs", "rank": 74, "score": 66235.84877089105 }, { "content": "#[link(wasm_import_module = \"env\")]\n\nextern \"C\" {\n\n fn count_a(str_ptr: *const u8, str_len: usize) -> i32;\n\n fn add(\n\n a_ptr: *const u8,\n\n a_len: usize,\n\n b_ptr: *const u8,\n\n b_len: usize,\n\n r_ptr: *mut u8,\n\n r_len: usize,\n\n );\n\n}\n\n\n\n#[export_name = \"test_count\"]\n\npub extern \"C\" fn test_count() {\n\n let input = \"Hallo warld; aaaa\";\n\n let result = unsafe { count_a(input.as_ptr(), input.len()) };\n\n assert_eq!(result, 6);\n\n}\n\n\n", "file_path": "uptown_funk/tests/wasm/ref_str.rs", "rank": 75, "score": 66230.8945563364 }, { "content": "#[export_name = \"test_add\"]\n\npub extern \"C\" fn test_add() {\n\n let a = \"Hello \";\n\n let b = \"world\";\n\n let mut result: [u8; 11] = [0; 11];\n\n unsafe {\n\n add(\n\n a.as_ptr(),\n\n a.len(),\n\n b.as_ptr(),\n\n b.len(),\n\n result.as_mut_ptr(),\n\n result.len(),\n\n )\n\n };\n\n assert_eq!(result, \"Hello world\".as_bytes());\n\n}\n", "file_path": "uptown_funk/tests/wasm/ref_str.rs", "rank": 76, "score": 66223.47726253296 }, { "content": "fn convert_io_err(e: std::io::Error) -> Status {\n\n match e.kind() {\n\n std::io::ErrorKind::NotFound => Status::NoEnt,\n\n std::io::ErrorKind::PermissionDenied => Status::Acces,\n\n std::io::ErrorKind::ConnectionRefused => Status::ConnRefused,\n\n std::io::ErrorKind::ConnectionReset => Status::ConnReset,\n\n std::io::ErrorKind::ConnectionAborted => Status::ConnAborted,\n\n std::io::ErrorKind::NotConnected => Status::NotConn,\n\n std::io::ErrorKind::AddrInUse => Status::AddrInUse,\n\n std::io::ErrorKind::AddrNotAvailable => Status::AddrNotAvail,\n\n std::io::ErrorKind::BrokenPipe => Status::Pipe,\n\n std::io::ErrorKind::AlreadyExists => Status::Exist,\n\n std::io::ErrorKind::WouldBlock => Status::Again, // ??\n\n std::io::ErrorKind::InvalidInput => Status::Inval,\n\n std::io::ErrorKind::InvalidData => Status::Inval, // ??\n\n std::io::ErrorKind::TimedOut => Status::TimedOut,\n\n std::io::ErrorKind::WriteZero => Status::Inval, // ??{}\n\n std::io::ErrorKind::Interrupted => Status::Intr,\n\n std::io::ErrorKind::Other => Status::Inval, // ??\n\n std::io::ErrorKind::UnexpectedEof => Status::Inval, // ??\n", "file_path": "src/api/wasi/types/status.rs", "rank": 77, "score": 65402.90973513476 }, { "content": " pub fn as_slice(&self) -> &[u8] {\n\n self.slice\n\n }\n\n}\n\n\n\nimpl<'a> From<WasiConstIoVec<'a>> for IoSlice<'a> {\n\n fn from(wasi_io_vec: WasiConstIoVec) -> IoSlice {\n\n IoSlice::new(wasi_io_vec.slice)\n\n }\n\n}\n\n\n\n/// A region of memory used as DESTINATION for scatter READS.\n\npub struct WasiIoVec<'a> {\n\n slice: &'a mut [u8],\n\n}\n\n\n\nimpl<'a> WasiIoVec<'a> {\n\n #[inline(always)]\n\n pub fn from(memory: *mut u8, ptr: usize) -> Self {\n\n unsafe {\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 78, "score": 64687.28185108368 }, { "content": " pub buf: u32,\n\n pub buf_len: u32,\n\n}\n\n/// A region of memory used as SOURCE for gather WRITES.\n\npub struct WasiConstIoVec<'a> {\n\n slice: &'a [u8],\n\n}\n\n\n\nimpl<'a> WasiConstIoVec<'a> {\n\n #[inline(always)]\n\n pub fn from(memory: *mut u8, ptr: usize) -> Self {\n\n unsafe {\n\n let wasi_iovec = memory.add(ptr) as *const _wasi_iovec_t;\n\n let slice_ptr = memory.add((*wasi_iovec).buf as usize);\n\n let slice_len = (*wasi_iovec).buf_len as usize;\n\n let slice = slice::from_raw_parts(slice_ptr, slice_len);\n\n Self { slice }\n\n }\n\n }\n\n\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 79, "score": 64686.59733525746 }, { "content": "}\n\n\n\nimpl<'a> From<WasiIoVec<'a>> for IoSliceMut<'a> {\n\n fn from(wasi_io_vec: WasiIoVec) -> IoSliceMut {\n\n IoSliceMut::new(wasi_io_vec.slice)\n\n }\n\n}\n\n\n\n/// Array of WasiConstIoVecs, internally represented as IoSlices\n\npub struct WasiConstIoVecArray<'a> {\n\n io_slices: SmallVec<[IoSlice<'a>; 4]>,\n\n}\n\n\n\nimpl<'a> WasiConstIoVecArray<'a> {\n\n #[inline(always)]\n\n pub fn from(memory: *mut u8, ptr: usize, len: usize) -> Self {\n\n let mut io_slices = SmallVec::with_capacity(len);\n\n for i in 0..len {\n\n let ptr = ptr + i * std::mem::size_of::<_wasi_iovec_t>();\n\n let wasi_io_vec = WasiConstIoVec::from(memory, ptr);\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 80, "score": 64684.37189347397 }, { "content": " io_slices.push(wasi_io_vec.into());\n\n }\n\n Self { io_slices }\n\n }\n\n\n\n pub fn get_io_slices(&self) -> &[IoSlice<'a>] {\n\n self.io_slices.as_slice()\n\n }\n\n}\n\n\n\n/// Array of WasiIoVecs, internally represented as IoSliceMuts\n\npub struct WasiIoVecArray<'a> {\n\n io_slices: Vec<IoSliceMut<'a>>,\n\n}\n\n\n\nimpl<'a> WasiIoVecArray<'a> {\n\n #[inline(always)]\n\n pub fn from(memory: *mut u8, ptr: usize, len: usize) -> Self {\n\n let mut io_slices = Vec::with_capacity(len);\n\n for i in 0..len {\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 81, "score": 64680.655199619214 }, { "content": " let wasi_iovec = memory.add(ptr) as *mut _wasi_iovec_t;\n\n let slice_ptr = memory.add((*wasi_iovec).buf as usize);\n\n let slice_len = (*wasi_iovec).buf_len as usize;\n\n let slice = slice::from_raw_parts_mut(slice_ptr, slice_len);\n\n Self { slice }\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n pub fn from_wasi_iovec_t(memory: *mut u8, buf: usize, buf_len: usize) -> Self {\n\n unsafe {\n\n let slice_ptr = memory.add(buf);\n\n let slice = slice::from_raw_parts_mut(slice_ptr, buf_len);\n\n Self { slice }\n\n }\n\n }\n\n\n\n pub fn as_mut_slice(&mut self) -> &mut [u8] {\n\n self.slice\n\n }\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 82, "score": 64680.52841673089 }, { "content": "use core::slice;\n\nuse std::io::{IoSlice, IoSliceMut};\n\n\n\nuse smallvec::SmallVec;\n\nuse uptown_funk::types::CReprWasmType;\n\n\n\nuse super::{aliases::*, Clockid, Fdflags, Filetype, Status};\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct Dirent {\n\n /// The offset of the next directory entry stored in this directory.\n\n pub d_next: Dircookie,\n\n /// The serial number of the file referred to by this directory entry.\n\n pub d_ino: Inode,\n\n /// The length of the name of the directory entry.\n\n pub d_namlen: Dirnamlen,\n\n /// The type of the file referred to by this directory entry.\n\n pub d_type: Filetype,\n\n}\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 83, "score": 64675.67181344064 }, { "content": " pub u: SubscriptionU,\n\n}\n\n\n\nimpl CReprWasmType for Subscription {}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct Prestat {\n\n pr_type: u8,\n\n u: PrestatU,\n\n}\n\n\n\nimpl CReprWasmType for Prestat {}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct PrestatDir {\n\n /// The length of the directory name for use with `fd_prestat_dir_name`.\n\n pub pr_name_len: Size,\n\n}\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 84, "score": 64675.36542574085 }, { "content": " let ptr = ptr + i * std::mem::size_of::<_wasi_iovec_t>();\n\n let wasi_io_vec = WasiIoVec::from(memory, ptr);\n\n io_slices.push(wasi_io_vec.into());\n\n }\n\n Self { io_slices }\n\n }\n\n\n\n pub fn get_io_slices_mut(&mut self) -> &mut [IoSliceMut<'a>] {\n\n self.io_slices.as_mut_slice()\n\n }\n\n}\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 85, "score": 64675.158401733126 }, { "content": "impl CReprWasmType for Filestat {}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct EventFdReadwrite {\n\n /// The number of bytes available for reading or writing.\n\n pub nbytes: Filesize,\n\n /// The state of the file descriptor.\n\n pub flags: Eventrwflags,\n\n}\n\n\n\nimpl CReprWasmType for EventFdReadwrite {}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct Event {\n\n /// User-provided value that got attached to `subscription::userdata`.\n\n pub userdata: Userdata,\n\n /// If non-zero, an error that occurred while processing the subscription request.\n\n pub error: Status,\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 86, "score": 64674.98703373251 }, { "content": " pub flags: Subclockflags,\n\n}\n\n\n\nimpl CReprWasmType for SubscriptionClock {}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct SubscriptionFdReadwrite {\n\n /// The file descriptor on which to wait for it to become ready for reading or writing.\n\n pub file_descriptor: Fd,\n\n}\n\n\n\nimpl CReprWasmType for SubscriptionFdReadwrite {}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct SubscriptionU {\n\n pub tag: Eventtype,\n\n pub u: SubscriptionUU,\n\n}\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 87, "score": 64673.06658539123 }, { "content": "\n\nimpl CReprWasmType for SubscriptionU {}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub union SubscriptionUU {\n\n pub clock: SubscriptionClock,\n\n pub fd_read: SubscriptionFdReadwrite,\n\n pub fd_write: SubscriptionFdReadwrite,\n\n}\n\n\n\nimpl CReprWasmType for SubscriptionUU {}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct Subscription {\n\n /// User-provided value that is attached to the subscription in the\n\n /// implementation and returned through `event::userdata`.\n\n pub userdata: Userdata,\n\n /// The type of the event to which to subscribe, and its contents\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 88, "score": 64672.803091553375 }, { "content": "\n\nimpl CReprWasmType for Dirent {}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct Fdstat {\n\n /// File type.\n\n pub fs_filetype: Filetype,\n\n /// File descriptor flags.\n\n pub fs_flags: Fdflags,\n\n /// Rights that apply to this file descriptor.\n\n pub fs_rights_base: Rights,\n\n /// Maximum set of rights that may be installed on new file descriptors that\n\n /// are created through this file descriptor, e.g., through `path_open`.\n\n pub fs_rights_inheriting: Rights,\n\n}\n\n\n\nimpl CReprWasmType for Fdstat {}\n\n\n\n#[derive(Copy, Clone, Debug)]\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 89, "score": 64672.611376652865 }, { "content": "\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub union PrestatU {\n\n pub dir: PrestatDir,\n\n}\n\n\n\nimpl Prestat {\n\n pub fn directory(len: u32) -> Prestat {\n\n Prestat {\n\n pr_type: 0,\n\n u: PrestatU {\n\n dir: PrestatDir { pr_name_len: len },\n\n },\n\n }\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct _wasi_iovec_t {\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 90, "score": 64672.34484840772 }, { "content": " /// The type of event that occured\n\n pub r#type: Eventtype,\n\n /// The contents of the event, if it is an `eventtype::fd_read` or\n\n /// `eventtype::fd_write`. `eventtype::clock` events ignore this field.\n\n pub fd_readwrite: EventFdReadwrite,\n\n}\n\n\n\nimpl CReprWasmType for Event {}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C)]\n\npub struct SubscriptionClock {\n\n /// The clock against which to compare the timestamp.\n\n pub id: Clockid,\n\n /// The absolute or relative timestamp.\n\n pub timeout: Timestamp,\n\n /// The amount of time that the implementation may wait additionally\n\n /// to coalesce with other events.\n\n pub precision: Timestamp,\n\n /// Flags specifying whether the timeout is absolute or relative\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 91, "score": 64671.73384192223 }, { "content": "#[repr(C)]\n\npub struct Filestat {\n\n /// Device ID of device containing the file.\n\n pub dev: Device,\n\n /// File serial number.\n\n pub ino: Inode,\n\n /// File type.\n\n pub filetype: Filetype,\n\n /// Number of hard links to the file.\n\n pub nlink: Linkcount,\n\n /// For regular files, the file size in bytes. For symbolic links, the length in bytes of the pathname contained in the symbolic link.\n\n pub size: Filesize,\n\n /// Last data access timestamp.\n\n pub atim: Timestamp,\n\n /// Last data modification timestamp.\n\n pub mtim: Timestamp,\n\n /// Last file status change timestamp.\n\n pub ctim: Timestamp,\n\n}\n\n\n", "file_path": "src/api/wasi/types/structs.rs", "rank": 92, "score": 64669.89399113628 }, { "content": "#[derive(Clap)]\n\n#[clap(version = crate_version!())]\n\nstruct Opts {\n\n /// .wasm file\n\n input: String,\n\n /// All other arguments are forwarded to the .wasm file\n\n #[clap(min_values(0))]\n\n _args: Vec<String>,\n\n /// Save heap profile to heap.dat\n\n #[clap(short, long)]\n\n profile: bool,\n\n /// Output patched/normalised wasm to normalised.wasm\n\n #[clap(short, long)]\n\n normalised_out: bool,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 93, "score": 63902.48309138932 }, { "content": "struct Traps {}\n\n\n\nimpl uptown_funk::ToWasm<&mut Empty> for Traps {\n\n type To = u32;\n\n\n\n fn to(_: &mut Empty, _: &impl Executor, _: Self) -> Result<u32, uptown_funk::Trap> {\n\n Err(uptown_funk::Trap::new(\"Execution traped\"))\n\n }\n\n}\n\n\n", "file_path": "uptown_funk/tests/trap_test.rs", "rank": 94, "score": 60125.25787750074 }, { "content": "#[derive(Clone)]\n\nstruct MyNumber {\n\n value: i32,\n\n}\n\n\n\nimpl MyNumber {\n\n fn new(value: i32) -> Self {\n\n Self { value }\n\n }\n\n}\n\n\n\nimpl std::ops::Add<MyNumber> for MyNumber {\n\n type Output = MyNumber;\n\n\n\n fn add(self, rhs: MyNumber) -> Self::Output {\n\n MyNumber {\n\n value: self.value + rhs.value,\n\n }\n\n }\n\n}\n\n\n", "file_path": "uptown_funk/tests/mutable_state_test.rs", "rank": 95, "score": 59034.54691376436 }, { "content": "struct MyNumber {\n\n value: i32,\n\n}\n\n\n\nimpl std::ops::Add<MyNumber> for MyNumber {\n\n type Output = i32;\n\n\n\n fn add(self, rhs: MyNumber) -> Self::Output {\n\n self.value + rhs.value\n\n }\n\n}\n\n\n\nimpl uptown_funk::FromWasm<&mut Empty> for MyNumber {\n\n type From = u32;\n\n\n\n fn from(_: &mut Empty, _: &impl Executor, wasm_u32: u32) -> Result<Self, uptown_funk::Trap> {\n\n Ok(MyNumber {\n\n value: wasm_u32 as i32,\n\n })\n\n }\n\n}\n\n\n", "file_path": "uptown_funk/tests/custom_type_test.rs", "rank": 96, "score": 59034.54691376436 }, { "content": "struct MyNumber {\n\n value: i32,\n\n}\n\n\n\nimpl uptown_funk::ToWasm<&mut Empty> for MyNumber {\n\n type To = u32;\n\n\n\n fn to(_: &mut Empty, _: &impl Executor, number: Self) -> Result<u32, uptown_funk::Trap> {\n\n Ok(number.value as u32)\n\n }\n\n}\n\n\n", "file_path": "uptown_funk/tests/custom_type_return_test.rs", "rank": 97, "score": 58013.924979827665 }, { "content": "struct ArrayState {\n\n vec: Vec<MyNumber>,\n\n}\n\n\n\n#[host_functions(namespace = \"env\")]\n\nimpl ArrayState {\n\n fn create(&mut self, number: i32) -> MyNumber {\n\n MyNumber::new(number)\n\n }\n\n\n\n fn value(&self, number: MyNumber) -> i32 {\n\n number.value\n\n }\n\n\n\n fn add(&mut self, a: MyNumber, b: MyNumber) -> MyNumber {\n\n a + b\n\n }\n\n\n\n fn sum(&self) -> i32 {\n\n self.vec.iter().map(|n| n.value).sum()\n\n }\n\n}\n\n\n", "file_path": "uptown_funk/tests/mutable_state_test.rs", "rank": 98, "score": 58013.924979827665 }, { "content": "#[derive(Debug)]\n\nenum Transformation {\n\n None,\n\n CustomType,\n\n RefCustomType,\n\n RefStr,\n\n RefMutSlice,\n\n RefSliceIoSlices,\n\n RefMutSliceIoSlicesMut,\n\n Unsupported,\n\n}\n", "file_path": "uptown_funk/uptown_funk_macro/src/signature/inputs.rs", "rank": 99, "score": 57837.11393901115 } ]
Rust
connectorx/src/s3.rs
Yizhou150/connector-x
3c56c539d29bc252d205a20700edd63efec301ab
use anyhow::Error; use arrow::datatypes::{Schema, SchemaRef}; use arrow::ffi::{FFI_ArrowArray, FFI_ArrowSchema}; use arrow::json::reader::ReaderBuilder; use arrow::record_batch::RecordBatch; use fehler::throws; use flate2::read::GzDecoder; use futures::stream::{FuturesOrdered, StreamExt}; use futures::TryFutureExt; use rusoto_core::Region; use rusoto_s3::{GetObjectOutput, GetObjectRequest, S3Client, S3}; use serde_json::{from_str, Value}; use std::collections::HashMap; use std::io::{Cursor, Read}; use std::sync::Arc; use strum::EnumString; use tokio::io::AsyncReadExt; use tokio::task::spawn_blocking; #[derive(Debug, Clone, Copy, EnumString)] pub enum JsonFormat { JsonL, Array, } #[throws(Error)] pub async fn read_s3<S>( bucket: &str, objects: &[S], schema: &str, json_format: JsonFormat, ) -> HashMap<String, Vec<(*const FFI_ArrowArray, *const FFI_ArrowSchema)>> where S: AsRef<str>, { let client = S3Client::new(Region::UsWest2); let schema = Arc::new(Schema::from(&from_str::<Value>(schema)?)?); let mut futs: FuturesOrdered<_> = objects .iter() .map(|obj| { client .get_object(GetObjectRequest { bucket: bucket.into(), key: obj.as_ref().to_string(), ..Default::default() }) .err_into() .and_then(|resp| read_as_record_batch(resp, schema.clone(), json_format)) }) .collect(); let mut table = HashMap::new(); while let Some(rb) = futs.next().await { if let Some(batches) = rb? { for batch in batches { for (i, f) in batch.schema().fields().iter().enumerate() { use arrow::datatypes::DataType::*; match f.data_type() { Null | Boolean | Int8 | Int16 | Int32 | Int64 | UInt8 | UInt16 | UInt32 | UInt64 | Float16 | Float32 | Float64 | Utf8 | LargeUtf8 | Binary | LargeBinary => {} _ => continue, } table .entry(f.name().clone()) .or_insert_with(Vec::new) .push(batch.column(i).to_raw()?) } } } } table } #[throws(Error)] async fn read_as_record_batch( payload: GetObjectOutput, schema: SchemaRef, json_format: JsonFormat, ) -> Option<Vec<RecordBatch>> { if payload.body.as_ref().is_none() { return None; } let mut buf = vec![]; payload .body .unwrap() .into_async_read() .read_to_end(&mut buf) .await?; let batches = spawn_blocking(move || -> Result<_, Error> { let mut rawjson = vec![]; GzDecoder::new(&*buf).read_to_end(&mut rawjson)?; let mut reader = match json_format { JsonFormat::Array => { array_to_jsonl(rawjson.as_mut()); ReaderBuilder::new() .with_schema(schema) .build(Cursor::new(&rawjson[1..rawjson.len() - 1]))? } JsonFormat::JsonL => ReaderBuilder::new() .with_schema(schema) .build(Cursor::new(&rawjson[..]))?, }; let mut batches = vec![]; while let Some(rb) = reader.next()? { batches.push(rb); } Ok(batches) }) .await??; Some(batches) } fn array_to_jsonl(data: &mut [u8]) { let mut indent = 0; let n = data.len(); for i in 0..n { if data[i] == b',' && indent == 0 { data[i] = b'\n'; } else if data[i] == b'{' { indent += 1; } else if data[i] == b'}' { indent -= 1; } else if i < n - 6 && &data[i..i + 6] == b"[\"new\"" { data[i..i + 6].copy_from_slice(b"[10001"); } else if i < n - 9 && &data[i..i + 9] == b"[\"change\"" { data[i..i + 9].copy_from_slice(b"[10000002"); } else if i < n - 9 && &data[i..i + 9] == b"[\"delete\"" { data[i..i + 9].copy_from_slice(b"[10000004"); } } }
use anyhow::Error; use arrow::datatypes::{Schema, SchemaRef}; use arrow::ffi::{FFI_ArrowArray, FFI_ArrowSchema}; use arrow::js
um::EnumString; use tokio::io::AsyncReadExt; use tokio::task::spawn_blocking; #[derive(Debug, Clone, Copy, EnumString)] pub enum JsonFormat { JsonL, Array, } #[throws(Error)] pub async fn read_s3<S>( bucket: &str, objects: &[S], schema: &str, json_format: JsonFormat, ) -> HashMap<String, Vec<(*const FFI_ArrowArray, *const FFI_ArrowSchema)>> where S: AsRef<str>, { let client = S3Client::new(Region::UsWest2); let schema = Arc::new(Schema::from(&from_str::<Value>(schema)?)?); let mut futs: FuturesOrdered<_> = objects .iter() .map(|obj| { client .get_object(GetObjectRequest { bucket: bucket.into(), key: obj.as_ref().to_string(), ..Default::default() }) .err_into() .and_then(|resp| read_as_record_batch(resp, schema.clone(), json_format)) }) .collect(); let mut table = HashMap::new(); while let Some(rb) = futs.next().await { if let Some(batches) = rb? { for batch in batches { for (i, f) in batch.schema().fields().iter().enumerate() { use arrow::datatypes::DataType::*; match f.data_type() { Null | Boolean | Int8 | Int16 | Int32 | Int64 | UInt8 | UInt16 | UInt32 | UInt64 | Float16 | Float32 | Float64 | Utf8 | LargeUtf8 | Binary | LargeBinary => {} _ => continue, } table .entry(f.name().clone()) .or_insert_with(Vec::new) .push(batch.column(i).to_raw()?) } } } } table } #[throws(Error)] async fn read_as_record_batch( payload: GetObjectOutput, schema: SchemaRef, json_format: JsonFormat, ) -> Option<Vec<RecordBatch>> { if payload.body.as_ref().is_none() { return None; } let mut buf = vec![]; payload .body .unwrap() .into_async_read() .read_to_end(&mut buf) .await?; let batches = spawn_blocking(move || -> Result<_, Error> { let mut rawjson = vec![]; GzDecoder::new(&*buf).read_to_end(&mut rawjson)?; let mut reader = match json_format { JsonFormat::Array => { array_to_jsonl(rawjson.as_mut()); ReaderBuilder::new() .with_schema(schema) .build(Cursor::new(&rawjson[1..rawjson.len() - 1]))? } JsonFormat::JsonL => ReaderBuilder::new() .with_schema(schema) .build(Cursor::new(&rawjson[..]))?, }; let mut batches = vec![]; while let Some(rb) = reader.next()? { batches.push(rb); } Ok(batches) }) .await??; Some(batches) } fn array_to_jsonl(data: &mut [u8]) { let mut indent = 0; let n = data.len(); for i in 0..n { if data[i] == b',' && indent == 0 { data[i] = b'\n'; } else if data[i] == b'{' { indent += 1; } else if data[i] == b'}' { indent -= 1; } else if i < n - 6 && &data[i..i + 6] == b"[\"new\"" { data[i..i + 6].copy_from_slice(b"[10001"); } else if i < n - 9 && &data[i..i + 9] == b"[\"change\"" { data[i..i + 9].copy_from_slice(b"[10000002"); } else if i < n - 9 && &data[i..i + 9] == b"[\"delete\"" { data[i..i + 9].copy_from_slice(b"[10000004"); } } }
on::reader::ReaderBuilder; use arrow::record_batch::RecordBatch; use fehler::throws; use flate2::read::GzDecoder; use futures::stream::{FuturesOrdered, StreamExt}; use futures::TryFutureExt; use rusoto_core::Region; use rusoto_s3::{GetObjectOutput, GetObjectRequest, S3Client, S3}; use serde_json::{from_str, Value}; use std::collections::HashMap; use std::io::{Cursor, Read}; use std::sync::Arc; use str
random
[ { "content": "use super::{Consume, Destination, DestinationPartition};\n\nuse crate::data_order::DataOrder;\n\nuse crate::dummy_typesystem::DummyTypeSystem;\n\nuse crate::errors::{ConnectorAgentError, Result};\n\nuse crate::typesystem::{Realize, TypeAssoc, TypeSystem};\n\nuse anyhow::anyhow;\n\nuse arrow::datatypes::Schema;\n\nuse arrow::record_batch::RecordBatch;\n\nuse arrow_assoc::ArrowAssoc;\n\nuse fehler::throws;\n\nuse funcs::{FFinishBuilder, FNewBuilder, FNewField};\n\nuse itertools::Itertools;\n\nuse std::any::Any;\n\nuse std::sync::Arc;\n\n\n\nmod arrow_assoc;\n\nmod funcs;\n\n\n", "file_path": "connectorx/src/destinations/arrow/mod.rs", "rank": 0, "score": 5.1924506224552465 }, { "content": "use anyhow::Error;\n\nuse arrow::csv::reader::ReaderBuilder;\n\nuse arrow::datatypes::{Schema, SchemaRef};\n\nuse arrow::ffi::{FFI_ArrowArray, FFI_ArrowSchema};\n\nuse arrow::record_batch::RecordBatch;\n\nuse fehler::throws;\n\nuse futures::stream::{FuturesOrdered, StreamExt};\n\nuse log::debug;\n\nuse postgres::{Client, NoTls};\n\nuse serde_json::{from_str, Value};\n\nuse std::collections::HashMap;\n\nuse std::io::{Cursor, Read};\n\nuse std::sync::Arc;\n\nuse std::time::Instant;\n\nuse tokio::task::spawn_blocking;\n\n\n\n#[throws(Error)]\n\npub async fn read_pg<S>(\n\n conn: &str,\n\n sqls: &[S],\n", "file_path": "connectorx/src/pg.rs", "rank": 1, "score": 5.191223735098539 }, { "content": "mod typesystem;\n\n\n\nuse crate::data_order::DataOrder;\n\nuse crate::errors::{ConnectorAgentError, Result};\n\nuse crate::sources::{PartitionParser, Produce, Source, SourcePartition};\n\nuse crate::sql::{count_query, get_limit, limit1_query};\n\nuse anyhow::anyhow;\n\nuse chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};\n\nuse csv::{ReaderBuilder, StringRecord, StringRecordsIntoIter};\n\nuse fehler::throw;\n\nuse hex::decode;\n\nuse log::debug;\n\nuse postgres::{\n\n binary_copy::{BinaryCopyOutIter, BinaryCopyOutRow},\n\n fallible_iterator::FallibleIterator,\n\n CopyOutReader,\n\n};\n\nuse r2d2::{Pool, PooledConnection};\n\nuse r2d2_postgres::{postgres::NoTls, PostgresConnectionManager};\n\nuse rust_decimal::Decimal;\n\nuse serde_json::{from_str, Value};\n\nuse sqlparser::dialect::PostgreSqlDialect;\n\nuse std::io::BufRead;\n\nuse std::marker::PhantomData;\n\npub use typesystem::PostgresTypeSystem;\n\nuse uuid::Uuid;\n\n\n", "file_path": "connectorx/src/sources/postgres/mod.rs", "rank": 3, "score": 5.15177612069395 }, { "content": "mod typesystem;\n\n\n\nuse fallible_streaming_iterator::FallibleStreamingIterator;\n\n\n\nuse crate::data_order::DataOrder;\n\nuse crate::errors::{ConnectorAgentError, Result};\n\nuse crate::sources::{PartitionParser, Produce, Source, SourcePartition};\n\nuse crate::sql::{count_query, get_limit, limit1_query};\n\nuse anyhow::anyhow;\n\nuse chrono::{NaiveDate, NaiveDateTime, NaiveTime};\n\nuse derive_more::{Deref, DerefMut};\n\nuse fehler::throw;\n\nuse log::debug;\n\nuse owning_ref::OwningHandle;\n\nuse r2d2::{Pool, PooledConnection};\n\nuse r2d2_sqlite::SqliteConnectionManager;\n\nuse rusqlite::{Row, Rows, Statement};\n\nuse sqlparser::dialect::SQLiteDialect;\n\npub use typesystem::SqliteTypeSystem;\n\n\n\n#[derive(Deref, DerefMut)]\n", "file_path": "connectorx/src/sources/sqlite/mod.rs", "rank": 4, "score": 5.151067314720462 }, { "content": "mod tpch;\n\n\n\nuse pprof::protos::Message;\n\nuse std::env;\n\nuse std::fs::File;\n\nuse std::io::Write;\n\n\n", "file_path": "connectorx-python/examples/flame_tpch.rs", "rank": 5, "score": 5.04566582324691 }, { "content": "mod any_array;\n\n\n\nuse super::{Consume, Destination, DestinationPartition};\n\nuse crate::data_order::DataOrder;\n\nuse crate::dummy_typesystem::DummyTypeSystem;\n\nuse crate::errors::{ConnectorAgentError, Result};\n\nuse crate::typesystem::{ParameterizedFunc, ParameterizedOn, Realize, TypeAssoc, TypeSystem};\n\nuse any_array::{AnyArray, AnyArrayViewMut};\n\nuse anyhow::anyhow;\n\nuse chrono::{DateTime, NaiveDateTime, Utc};\n\nuse fehler::{throw, throws};\n\nuse itertools::Itertools;\n\nuse ndarray::{Array2, ArrayView1, ArrayView2, Axis, Ix2};\n\nuse std::any::type_name;\n\nuse std::collections::HashMap;\n\n/// This `Destination` can support mixed data type.\n\npub struct MemoryDestination {\n\n nrows: usize,\n\n schema: Vec<DummyTypeSystem>,\n\n buffers: Vec<AnyArray<Ix2>>,\n", "file_path": "connectorx/src/destinations/memory/mod.rs", "rank": 6, "score": 5.035063157500936 }, { "content": "use crate::errors::{ConnectorAgentError, Result};\n\nuse crate::sources::postgres::PostgresTypeSystem;\n\nuse crate::sql::{\n\n get_partition_range_query, get_partition_range_query_sep, single_col_partition_query,\n\n};\n\nuse anyhow::anyhow;\n\nuse fehler::{throw, throws};\n\nuse postgres::{Client, NoTls};\n\nuse rusqlite::{types::Type, Connection};\n\nuse sqlparser::dialect::{PostgreSqlDialect, SQLiteDialect};\n\nuse std::convert::TryFrom;\n\nuse url::Url;\n\n\n\npub enum SourceType {\n\n Postgres,\n\n Sqlite,\n\n}\n\n\n\npub struct SourceConn {\n\n pub ty: SourceType,\n", "file_path": "connectorx/src/source_router.rs", "rank": 7, "score": 5.031534797682842 }, { "content": "mod boolean;\n\nmod bytes;\n\nmod datetime;\n\nmod float64;\n\nmod int64;\n\nmod string;\n\n// TODO: use macro for integers\n\n\n\npub use crate::pandas::pandas_columns::bytes::{BytesBlock, BytesColumn};\n\npub use boolean::{BooleanBlock, BooleanColumn};\n\nuse connectorx::Result;\n\npub use datetime::{DateTimeBlock, DateTimeColumn};\n\nuse fehler::throw;\n\npub use float64::{Float64Block, Float64Column};\n\npub use int64::{Int64Block, Int64Column};\n\nuse pyo3::{exceptions::PyRuntimeError, PyAny, PyResult};\n\nuse std::any::TypeId;\n\npub use string::{StringBlock, StringColumn};\n\n\n", "file_path": "connectorx-python/src/pandas/pandas_columns/mod.rs", "rank": 8, "score": 5.017263286469333 }, { "content": "use super::super::pystring::{PyString, StringInfo};\n\nuse super::{check_dtype, HasPandasColumn, PandasColumn, PandasColumnObject};\n\nuse anyhow::anyhow;\n\nuse connectorx::ConnectorAgentError;\n\nuse fehler::throws;\n\nuse itertools::Itertools;\n\nuse ndarray::{ArrayViewMut2, Axis, Ix2};\n\nuse numpy::PyArray;\n\nuse pyo3::{FromPyObject, PyAny, PyResult, Python};\n\nuse std::any::TypeId;\n\nuse std::sync::{Arc, Mutex};\n\n\n\npub struct StringBlock<'a> {\n\n data: ArrayViewMut2<'a, PyString>,\n\n mutex: Arc<Mutex<()>>,\n\n buf_size_mb: usize,\n\n}\n\n\n\nimpl<'a> FromPyObject<'a> for StringBlock<'a> {\n\n fn extract(ob: &'a PyAny) -> PyResult<Self> {\n", "file_path": "connectorx-python/src/pandas/pandas_columns/string.rs", "rank": 9, "score": 4.984030276009927 }, { "content": "mod destination;\n\nmod pandas_columns;\n\nmod pystring;\n\nmod transports;\n\nmod types;\n\n\n\npub use self::destination::{PandasDestination, PandasPartitionDestination};\n\npub use self::transports::{PostgresPandasTransport, SqlitePandasTransport};\n\npub use self::types::{PandasDType, PandasTypeSystem};\n\nuse crate::errors::ConnectorAgentPythonError;\n\nuse anyhow::anyhow;\n\nuse connectorx::source_router::{SourceConn, SourceType};\n\nuse connectorx::{\n\n sources::{\n\n postgres::{Binary, PostgresSource, CSV},\n\n sqlite::SqliteSource,\n\n },\n\n Dispatcher,\n\n};\n\nuse fehler::throws;\n\nuse log::debug;\n\nuse pyo3::{PyAny, Python};\n\n\n\n#[throws(ConnectorAgentPythonError)]\n", "file_path": "connectorx-python/src/pandas/mod.rs", "rank": 10, "score": 4.980249319766815 }, { "content": "use super::{PartitionParser, Produce, Source, SourcePartition};\n\nuse crate::data_order::DataOrder;\n\nuse crate::dummy_typesystem::DummyTypeSystem;\n\nuse crate::errors::{ConnectorAgentError, Result};\n\nuse anyhow::anyhow;\n\nuse chrono::{DateTime, Utc};\n\nuse fehler::{throw, throws};\n\nuse regex::{Regex, RegexBuilder};\n\nuse std::collections::HashSet;\n\nuse std::fs::File;\n\n\n\npub struct CSVSource {\n\n schema: Vec<DummyTypeSystem>,\n\n files: Vec<String>,\n\n names: Vec<String>,\n\n}\n\n\n\nimpl CSVSource {\n\n pub fn new(schema: &[DummyTypeSystem]) -> Self {\n\n CSVSource {\n", "file_path": "connectorx/src/sources/csv.rs", "rank": 11, "score": 4.980249319766815 }, { "content": "use crate::errors::ConnectorAgentError;\n\nuse anyhow::anyhow;\n\nuse fehler::{throw, throws};\n\nuse log::{debug, trace};\n\nuse sqlparser::ast::{\n\n BinaryOperator, Expr, Function, FunctionArg, Ident, ObjectName, Query, Select, SelectItem,\n\n SetExpr, Statement, TableAlias, TableFactor, TableWithJoins, Value,\n\n};\n\nuse sqlparser::dialect::Dialect;\n\nuse sqlparser::parser::Parser;\n\n\n\n#[throws(ConnectorAgentError)]\n", "file_path": "connectorx/src/sql.rs", "rank": 12, "score": 4.952237043044119 }, { "content": "use super::pandas_columns::{\n\n BooleanBlock, BytesBlock, DateTimeBlock, Float64Block, HasPandasColumn, Int64Block,\n\n PandasColumn, PandasColumnObject, StringBlock,\n\n};\n\nuse super::types::{PandasDType, PandasTypeSystem};\n\nuse anyhow::anyhow;\n\nuse connectorx::{\n\n ConnectorAgentError, Consume, DataOrder, Destination, DestinationPartition, Result, TypeAssoc,\n\n TypeSystem,\n\n};\n\nuse fehler::{throw, throws};\n\nuse itertools::Itertools;\n\nuse log::debug;\n\nuse pyo3::{\n\n types::{PyDict, PyList},\n\n FromPyObject, PyAny, Python,\n\n};\n\nuse std::collections::HashMap;\n\nuse std::mem::transmute;\n\npub struct PandasDestination<'py> {\n", "file_path": "connectorx-python/src/pandas/destination.rs", "rank": 13, "score": 4.951410016197333 }, { "content": "use crate::errors::ConnectorAgentPythonError;\n\nuse connectorx::source_router::SourceConn;\n\nuse dict_derive::FromPyObject;\n\nuse fehler::throw;\n\nuse pyo3::prelude::*;\n\nuse pyo3::{\n\n exceptions::{PyNotImplementedError, PyValueError},\n\n PyResult,\n\n};\n\nuse std::convert::TryFrom;\n\n\n\n#[derive(FromPyObject)]\n\npub struct PartitionQuery {\n\n query: String,\n\n column: String,\n\n min: Option<i64>,\n\n max: Option<i64>,\n\n num: usize,\n\n}\n\n\n", "file_path": "connectorx-python/src/read_sql.rs", "rank": 14, "score": 4.911599158371075 }, { "content": "pub mod transports;\n\n\n\npub use crate::data_order::DataOrder;\n\npub use crate::destinations::{Consume, Destination, DestinationPartition};\n\npub use crate::dispatcher::Dispatcher;\n\npub use crate::dummy_typesystem::DummyTypeSystem;\n\npub use crate::errors::{ConnectorAgentError, Result};\n\npub use crate::sources::{PartitionParser, Source, SourcePartition};\n\npub use crate::typesystem::{\n\n ParameterizedFunc, ParameterizedOn, Realize, Transport, TypeAssoc, TypeConversion, TypeSystem,\n\n};\n", "file_path": "connectorx/src/lib.rs", "rank": 15, "score": 4.842064979397371 }, { "content": "use crate::constants::SECONDS_IN_DAY;\n\nuse crate::errors::{ConnectorAgentError, Result};\n\nuse arrow::array::{\n\n ArrayBuilder, BooleanBuilder, Date32Builder, Date64Builder, Float64Builder, Int32Builder,\n\n Int64Builder, LargeStringBuilder,\n\n};\n\nuse arrow::datatypes::Field;\n\nuse arrow::datatypes::{DataType as ArrowDataType, DateUnit};\n\nuse chrono::{Date, DateTime, NaiveDate, NaiveDateTime, Utc};\n\nuse fehler::throws;\n\n\n\n/// Associate arrow builder with native type\n", "file_path": "connectorx/src/destinations/arrow/arrow_assoc.rs", "rank": 16, "score": 4.842064979397371 }, { "content": "use super::arrow_assoc::ArrowAssoc;\n\nuse super::Builder;\n\nuse crate::errors::Result;\n\nuse crate::typesystem::{ParameterizedFunc, ParameterizedOn};\n\nuse anyhow::anyhow;\n\nuse arrow::array::{ArrayBuilder, ArrayRef};\n\nuse arrow::datatypes::Field;\n\n\n\npub struct FNewBuilder;\n\n\n\nimpl ParameterizedFunc for FNewBuilder {\n\n type Function = fn(nrows: usize) -> Builder;\n\n}\n\n\n\nimpl<T> ParameterizedOn<T> for FNewBuilder\n\nwhere\n\n T: ArrowAssoc,\n\n{\n\n fn parameterize() -> Self::Function {\n\n fn imp<T>(nrows: usize) -> Builder\n", "file_path": "connectorx/src/destinations/arrow/funcs.rs", "rank": 17, "score": 4.822558232413276 }, { "content": "use connectorx::sources::{csv::CSVSource, Produce, Source, SourcePartition};\n\nuse connectorx::{destinations::memory::MemoryDestination, Destination};\n\nuse connectorx::{transports::CSVMemoryTransport, Dispatcher, DummyTypeSystem};\n\nuse ndarray::array;\n\n\n\n#[test]\n\n#[should_panic]\n", "file_path": "connectorx/tests/test_csv.rs", "rank": 18, "score": 4.781164274913445 }, { "content": "use connectorx_python::read_sql::{read_sql, PartitionQuery};\n\nuse pyo3::Python;\n\nuse std::env;\n\n\n\nconst QUERY: &'static str = r#\"\n\nSELECT \n\n *\n\nFROM LINEITEM\"#;\n\n\n", "file_path": "connectorx-python/examples/tpch.rs", "rank": 19, "score": 4.7715434711461135 }, { "content": "use super::{check_dtype, HasPandasColumn, PandasColumn, PandasColumnObject};\n\nuse anyhow::anyhow;\n\nuse connectorx::ConnectorAgentError;\n\nuse fehler::throws;\n\nuse ndarray::{ArrayViewMut2, Axis, Ix2};\n\nuse numpy::PyArray;\n\nuse pyo3::{FromPyObject, PyAny, PyResult};\n\nuse std::any::TypeId;\n\n\n\n// Float\n\npub struct Float64Block<'a> {\n\n data: ArrayViewMut2<'a, f64>,\n\n}\n\n\n\nimpl<'a> FromPyObject<'a> for Float64Block<'a> {\n\n fn extract(ob: &'a PyAny) -> PyResult<Self> {\n\n check_dtype(ob, \"float64\")?;\n\n let array = ob.downcast::<PyArray<f64, Ix2>>()?;\n\n let data = unsafe { array.as_array_mut() };\n\n Ok(Float64Block { data })\n", "file_path": "connectorx-python/src/pandas/pandas_columns/float64.rs", "rank": 20, "score": 4.770584444038515 }, { "content": "mod csv_arrow;\n\nmod csv_memory;\n\nmod dummy_arrow;\n\nmod dummy_memory;\n\nmod postgres_arrow;\n\nmod postgres_memory;\n\n\n\npub use csv_arrow::CSVArrowTransport;\n\npub use csv_memory::CSVMemoryTransport;\n\npub use dummy_arrow::DummyArrowTransport;\n\npub use dummy_memory::DummyMemoryTransport;\n\npub use postgres_arrow::PostgresArrowTransport;\n\npub use postgres_memory::PostgresMemoryTransport;\n", "file_path": "connectorx/src/transports/mod.rs", "rank": 21, "score": 4.746500126629779 }, { "content": "use super::{check_dtype, HasPandasColumn, PandasColumn, PandasColumnObject};\n\nuse anyhow::anyhow;\n\nuse chrono::{DateTime, Utc};\n\nuse connectorx::ConnectorAgentError;\n\nuse fehler::throws;\n\nuse ndarray::{ArrayViewMut2, Axis, Ix2};\n\nuse numpy::PyArray;\n\nuse pyo3::{FromPyObject, PyAny, PyResult};\n\nuse std::any::TypeId;\n\n\n\n// datetime64 is represented in int64 in numpy\n\n// https://github.com/numpy/numpy/blob/master/numpy/core/include/numpy/npy_common.h#L1104\n\npub struct DateTimeBlock<'a> {\n\n data: ArrayViewMut2<'a, i64>,\n\n}\n\n\n\nimpl<'a> FromPyObject<'a> for DateTimeBlock<'a> {\n\n fn extract(ob: &'a PyAny) -> PyResult<Self> {\n\n check_dtype(ob, \"datetime64[ns]\")?;\n\n let array = ob.downcast::<PyArray<i64, Ix2>>()?;\n", "file_path": "connectorx-python/src/pandas/pandas_columns/datetime.rs", "rank": 22, "score": 4.745515321166108 }, { "content": "use super::{check_dtype, HasPandasColumn, PandasColumn, PandasColumnObject};\n\nuse anyhow::anyhow;\n\nuse connectorx::ConnectorAgentError;\n\nuse fehler::throws;\n\nuse ndarray::{ArrayViewMut2, Axis, Ix2};\n\nuse numpy::{npyffi::NPY_TYPES, Element, PyArray, PyArrayDescr};\n\nuse pyo3::{FromPyObject, Py, PyAny, PyResult, Python};\n\nuse std::any::TypeId;\n\nuse std::sync::{Arc, Mutex};\n\n\n\n#[derive(Clone)]\n\n#[repr(transparent)]\n\npub struct PyBytes(Py<pyo3::types::PyBytes>);\n\n\n\n// In order to put it into a numpy array\n\nimpl Element for PyBytes {\n\n const DATA_TYPE: numpy::DataType = numpy::DataType::Object;\n\n fn is_same_type(dtype: &PyArrayDescr) -> bool {\n\n unsafe { *dtype.as_dtype_ptr() }.type_num == NPY_TYPES::NPY_OBJECT as i32\n\n }\n", "file_path": "connectorx-python/src/pandas/pandas_columns/bytes.rs", "rank": 23, "score": 4.730928453338136 }, { "content": "#![allow(dead_code)]\n\n\n\nuse ndarray::{Array, ArrayView, ArrayViewMut, Axis, Dimension, Ix};\n\nuse std::any::{Any, TypeId};\n\nuse std::mem::transmute;\n\n\n", "file_path": "connectorx/src/destinations/memory/any_array.rs", "rank": 24, "score": 4.72757404807443 }, { "content": "use crate::pandas::destination::PandasDestination;\n\nuse crate::pandas::types::PandasTypeSystem;\n\nuse chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};\n\nuse connectorx::{\n\n impl_transport,\n\n sources::postgres::{Binary, PostgresSource, PostgresTypeSystem, CSV},\n\n typesystem::TypeConversion,\n\n};\n\nuse rust_decimal::prelude::*;\n\nuse serde_json::{to_string, Value};\n\nuse std::marker::PhantomData;\n\nuse uuid::Uuid;\n\n\n\npub struct PostgresPandasTransport<'py, P>(&'py (), PhantomData<P>);\n\n\n\nimpl_transport!(\n\n name = PostgresPandasTransport<'tp, Binary>,\n\n systems = PostgresTypeSystem => PandasTypeSystem,\n\n route = PostgresSource<Binary> => PandasDestination<'tp>,\n\n mappings = {\n", "file_path": "connectorx-python/src/pandas/transports/postgres.rs", "rank": 25, "score": 4.713063273856552 }, { "content": "#![feature(generic_associated_types)]\n\n#![allow(incomplete_features)]\n\n\n\nmod errors;\n\npub mod pandas;\n\npub mod read_sql;\n\n\n\nuse anyhow::Result;\n\nuse connectorx::pg;\n\nuse pyo3::prelude::*;\n\nuse pyo3::{\n\n exceptions::PyValueError,\n\n types::{IntoPyDict, PyTuple},\n\n wrap_pyfunction, PyResult,\n\n};\n\nuse std::sync::Once;\n\nuse tokio::runtime;\n\n\n\nstatic START: Once = Once::new();\n\n\n\n// https://github.com/PyO3/pyo3-built/issues/21\n\n// #[allow(dead_code)]\n\n// mod build {\n\n// include!(concat!(env!(\"OUT_DIR\"), \"/built.rs\"));\n\n// }\n\n\n\n#[pymodule]\n", "file_path": "connectorx-python/src/lib.rs", "rank": 26, "score": 4.660268227929234 }, { "content": "// Why we need to implement Transmit for TypeSystem? This is because only TypeSystem knows how to dispatch\n\n// functions to it's native type N based on our defined type T. Remember, T is value and N is a type.\n\n\n\nuse crate::destinations::{Consume, Destination, DestinationPartition};\n\nuse crate::errors::Result;\n\nuse crate::sources::{PartitionParser, Produce, Source, SourcePartition};\n\n\n\n/// `TypeSystem` describes all the types a source or destination support\n\n/// using enum variants.\n\n/// The variant can be used to type check with a static type `T` through the `check` method.\n", "file_path": "connectorx/src/typesystem.rs", "rank": 27, "score": 4.643962152655281 }, { "content": "use super::{check_dtype, HasPandasColumn, PandasColumn, PandasColumnObject};\n\nuse anyhow::anyhow;\n\nuse connectorx::ConnectorAgentError;\n\nuse fehler::throws;\n\nuse ndarray::{ArrayViewMut1, ArrayViewMut2, Axis, Ix2};\n\nuse numpy::{PyArray, PyArray1};\n\nuse pyo3::{FromPyObject, PyAny, PyResult};\n\nuse std::any::TypeId;\n\n\n\npub enum Int64Block<'a> {\n\n NumPy(ArrayViewMut2<'a, i64>),\n\n Extention(ArrayViewMut1<'a, i64>, ArrayViewMut1<'a, bool>),\n\n}\n\nimpl<'a> FromPyObject<'a> for Int64Block<'a> {\n\n fn extract(ob: &'a PyAny) -> PyResult<Self> {\n\n if let Ok(array) = ob.downcast::<PyArray<i64, Ix2>>() {\n\n check_dtype(ob, \"int64\")?;\n\n let data = unsafe { array.as_array_mut() };\n\n Ok(Int64Block::NumPy(data))\n\n } else {\n", "file_path": "connectorx-python/src/pandas/pandas_columns/int64.rs", "rank": 28, "score": 4.63325565573454 }, { "content": "use super::{check_dtype, HasPandasColumn, PandasColumn, PandasColumnObject};\n\nuse anyhow::anyhow;\n\nuse connectorx::ConnectorAgentError;\n\nuse fehler::throws;\n\nuse ndarray::{ArrayViewMut1, ArrayViewMut2, Axis, Ix2};\n\nuse numpy::{PyArray, PyArray1};\n\nuse pyo3::{FromPyObject, PyAny, PyResult};\n\nuse std::any::TypeId;\n\n\n\n// Boolean\n\npub enum BooleanBlock<'a> {\n\n NumPy(ArrayViewMut2<'a, bool>),\n\n Extention(ArrayViewMut1<'a, bool>, ArrayViewMut1<'a, bool>),\n\n}\n\nimpl<'a> FromPyObject<'a> for BooleanBlock<'a> {\n\n fn extract(ob: &'a PyAny) -> PyResult<Self> {\n\n if let Ok(array) = ob.downcast::<PyArray<bool, Ix2>>() {\n\n check_dtype(ob, \"bool\")?;\n\n let data = unsafe { array.as_array_mut() };\n\n Ok(BooleanBlock::NumPy(data))\n", "file_path": "connectorx-python/src/pandas/pandas_columns/boolean.rs", "rank": 29, "score": 4.625423297145193 }, { "content": "use connectorx::{\n\n destinations::memory::MemoryDestination,\n\n sources::{\n\n postgres::{Binary, PostgresSource, CSV},\n\n Produce, Source, SourcePartition,\n\n },\n\n transports::PostgresMemoryTransport,\n\n Dispatcher,\n\n};\n\nuse ndarray::array;\n\nuse std::env;\n\n\n\n#[test]\n", "file_path": "connectorx/tests/test_postgres.rs", "rank": 30, "score": 4.60039708309393 }, { "content": "use super::{PartitionParser, Produce, Source, SourcePartition};\n\nuse crate::data_order::DataOrder;\n\nuse crate::dummy_typesystem::DummyTypeSystem;\n\nuse crate::errors::{ConnectorAgentError, Result};\n\nuse chrono::{offset, Date, DateTime, Utc};\n\nuse fehler::{throw, throws};\n\nuse num_traits::cast::FromPrimitive;\n\n\n\npub struct DummySource {\n\n names: Vec<String>,\n\n schema: Vec<DummyTypeSystem>,\n\n queries: Vec<String>,\n\n}\n\n\n\nimpl DummySource {\n\n pub fn new<S: AsRef<str>>(names: &[S], schema: &[DummyTypeSystem]) -> Self {\n\n assert_eq!(names.len(), schema.len());\n\n DummySource {\n\n names: names.iter().map(|s| s.as_ref().to_string()).collect(),\n\n schema: schema.to_vec(),\n", "file_path": "connectorx/src/sources/dummy.rs", "rank": 31, "score": 4.6001711908074725 }, { "content": "use chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};\n\nuse postgres::types::Type;\n\nuse rust_decimal::Decimal;\n\nuse serde_json::Value;\n\nuse uuid::Uuid;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum PostgresTypeSystem {\n\n Bool(bool),\n\n Float4(bool),\n\n Float8(bool),\n\n Numeric(bool),\n\n Int2(bool),\n\n Int4(bool),\n\n Int8(bool),\n\n Date(bool),\n\n Char(bool),\n\n BpChar(bool),\n\n VarChar(bool),\n\n Text(bool),\n", "file_path": "connectorx/src/sources/postgres/typesystem.rs", "rank": 32, "score": 4.581751465187202 }, { "content": "mod postgres;\n\nmod sqlite;\n\n\n\npub use postgres::PostgresPandasTransport;\n\npub use sqlite::SqlitePandasTransport;\n", "file_path": "connectorx-python/src/pandas/transports/mod.rs", "rank": 33, "score": 4.502185593381883 }, { "content": "use crate::{\n\n data_order::{coordinate, DataOrder},\n\n destinations::{Destination, DestinationPartition},\n\n errors::Result,\n\n sources::{Source, SourcePartition},\n\n typesystem::{Transport, TypeSystem},\n\n};\n\nuse itertools::Itertools;\n\nuse log::debug;\n\nuse rayon::prelude::*;\n\nuse std::marker::PhantomData;\n\n\n\n/// A dispatcher owns a `SourceBuilder` `SB` and a vector of `queries`\n\n/// `schema` is a temporary input before we implement infer schema or get schema from DB.\n\npub struct Dispatcher<'a, S, W, TP> {\n\n src: S,\n\n dst: &'a mut W,\n\n queries: Vec<String>,\n\n _phantom: PhantomData<TP>,\n\n}\n", "file_path": "connectorx/src/dispatcher.rs", "rank": 34, "score": 4.485608464156666 }, { "content": "use crate::destinations::memory::MemoryDestination;\n\nuse crate::dummy_typesystem::DummyTypeSystem;\n\nuse crate::sources::postgres::{Binary, PostgresSource, PostgresTypeSystem, CSV};\n\nuse crate::typesystem::TypeConversion;\n\nuse chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};\n\nuse std::marker::PhantomData;\n\nuse uuid::Uuid;\n\n\n\npub struct PostgresMemoryTransport<P>(PhantomData<P>);\n\n\n\nimpl_transport!(\n\n name = PostgresMemoryTransport<CSV>,\n\n systems = PostgresTypeSystem => DummyTypeSystem,\n\n route = PostgresSource<CSV> => MemoryDestination,\n\n mappings = {\n\n { Float4[f32] => F64[f64] | conversion all }\n\n { Float8[f64] => F64[f64] | conversion all }\n\n { Int2[i16] => I64[i64] | conversion all }\n\n { Int4[i32] => I64[i64] | conversion all }\n\n { Int8[i64] => I64[i64] | conversion all }\n", "file_path": "connectorx/src/transports/postgres_memory.rs", "rank": 35, "score": 4.479669061144285 }, { "content": "use connectorx::{\n\n destinations::memory::MemoryDestination, sources::dummy::DummySource,\n\n transports::DummyMemoryTransport, DataOrder, Destination, DestinationPartition, Dispatcher,\n\n DummyTypeSystem, Result, Source,\n\n};\n\nuse ndarray::array;\n\nuse rayon::iter::{IntoParallelIterator, ParallelIterator};\n\n\n\n#[test]\n\n#[should_panic]\n", "file_path": "connectorx/tests/test_mix.rs", "rank": 36, "score": 4.4219635266274775 }, { "content": "use arrow::array::{BooleanArray, Float64Array, Int64Array, LargeStringArray};\n\nuse arrow::record_batch::RecordBatch;\n\nuse connectorx::{\n\n destinations::arrow::ArrowDestination, sources::dummy::DummySource,\n\n transports::DummyArrowTransport, Dispatcher, DummyTypeSystem,\n\n};\n\n\n\n#[test]\n", "file_path": "connectorx/tests/test_arrow.rs", "rank": 37, "score": 4.4219635266274775 }, { "content": " PandasTypeSystem::Bool(true) => true,\n\n PandasTypeSystem::Char(_) => false, // we use object instead of string (Extension) for now\n\n PandasTypeSystem::Str(_) => false, // we use object instead of string (Extension) for now\n\n PandasTypeSystem::BoxStr(_) => false, // we use object instead of string (Extension) for now\n\n PandasTypeSystem::String(_) => false, // we use object instead of string (Extension) for now\n\n PandasTypeSystem::Bytes(_) => false, // we use object instead of string (Extension) for now\n\n PandasTypeSystem::DateTime(_) => false,\n\n }\n\n }\n\n\n\n fn block_name(&self) -> &'static str {\n\n match *self {\n\n PandasTypeSystem::I64(false) => \"IntBlock\",\n\n PandasTypeSystem::I64(true) => \"ExtensionBlock\",\n\n PandasTypeSystem::F64(_) => \"FloatBlock\",\n\n PandasTypeSystem::Bool(false) => \"BoolBlock\",\n\n PandasTypeSystem::Bool(true) => \"ExtensionBlock\",\n\n PandasTypeSystem::Char(_) => \"ObjectBlock\", // we use object instead of string (Extension) for now\n\n PandasTypeSystem::Str(_) => \"ObjectBlock\", // we use object instead of string (Extension) for now\n\n PandasTypeSystem::BoxStr(_) => \"ObjectBlock\", // we use object instead of string (Extension) for now\n\n PandasTypeSystem::String(_) => \"ObjectBlock\", // we use object instead of string (Extension) for now\n\n PandasTypeSystem::Bytes(_) => \"ObjectBlock\", // we use object instead of string (Extension) for now\n\n PandasTypeSystem::DateTime(_) => \"DatetimeBlock\",\n\n }\n\n }\n\n}\n", "file_path": "connectorx-python/src/pandas/types.rs", "rank": 38, "score": 4.42027921462314 }, { "content": "/// `TypeSystem` describes all the types a source or destination support\n\n/// using enum variants.\n\n/// The variant can be used to type check with a static type `T` through the `check` method.\n\npub trait TypeSystem: Copy + Clone + Send + Sync {\n\n /// Check whether T is the same type as defined by self.\n\n fn check<T: TypeAssoc<Self>>(self) -> Result<()> {\n\n T::check(self)\n\n }\n\n}\n\n\n", "file_path": "connectorx/src/typesystem.rs", "rank": 39, "score": 4.386697845449235 }, { "content": "pub mod arrow;\n\npub mod memory;\n\n\n\nuse crate::data_order::DataOrder;\n\nuse crate::errors::Result;\n\nuse crate::typesystem::{TypeAssoc, TypeSystem};\n\n\n\n/// A `Destination` is associated with a `TypeSystem` and a `PartitionDestination`.\n\n/// `PartitionDestination` allows multiple threads write data into the buffer owned by `Destination`.\n", "file_path": "connectorx/src/destinations/mod.rs", "rank": 40, "score": 4.384175291524713 }, { "content": "use crate::destinations::arrow::ArrowDestination;\n\nuse crate::dummy_typesystem::DummyTypeSystem;\n\nuse crate::sources::postgres::{Binary, PostgresSource, PostgresTypeSystem};\n\nuse crate::typesystem::TypeConversion;\n\nuse chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};\n\nuse uuid::Uuid;\n\n\n\npub struct PostgresArrowTransport;\n\n\n\nimpl_transport!(\n\n name = PostgresArrowTransport,\n\n systems = PostgresTypeSystem => DummyTypeSystem,\n\n route = PostgresSource<Binary> => ArrowDestination,\n\n mappings = {\n\n { Float4[f32] => F64[f64] | conversion all }\n\n { Float8[f64] => F64[f64] | conversion all }\n\n { Int2[i16] => I64[i64] | conversion all }\n\n { Int4[i32] => I64[i64] | conversion all }\n\n { Int8[i64] => I64[i64] | conversion all }\n\n { Bool[bool] => Bool[bool] | conversion all }\n", "file_path": "connectorx/src/transports/postgres_arrow.rs", "rank": 41, "score": 4.372320004896305 }, { "content": "// When implementing a data source, be make sure to implement Queryable and\n\n// Producer for all supported types in crate::types::DataType.\n\n\n\npub mod csv;\n\npub mod dummy;\n\npub mod postgres;\n\npub mod sqlite;\n\n\n\nuse crate::data_order::DataOrder;\n\nuse crate::errors::Result;\n\nuse crate::typesystem::{TypeAssoc, TypeSystem};\n\n\n", "file_path": "connectorx/src/sources/mod.rs", "rank": 42, "score": 4.365522335280436 }, { "content": "// Unfortunately, due to the orphan rule, typesystem implementation should be in this crate.\n\nuse chrono::{DateTime, Utc};\n\nuse connectorx::errors::{ConnectorAgentError, Result};\n\nuse connectorx::impl_typesystem;\n\nuse fehler::throws;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub enum PandasTypeSystem {\n\n F64(bool),\n\n I64(bool),\n\n Bool(bool),\n\n Char(bool),\n\n Str(bool),\n\n BoxStr(bool),\n\n String(bool),\n\n Bytes(bool),\n\n DateTime(bool),\n\n}\n\n\n\nimpl_typesystem! {\n", "file_path": "connectorx-python/src/pandas/types.rs", "rank": 43, "score": 4.327460074101177 }, { "content": "use crate::data_order::DataOrder;\n\nuse std::any::type_name;\n\nuse std::fmt;\n\nuse thiserror::Error;\n\n\n\npub type Result<T> = std::result::Result<T, ConnectorAgentError>;\n\n\n\n/// Errors that can be raised from this library.\n\n#[derive(Error, Debug)]\n\npub enum ConnectorAgentError {\n\n /// The required type does not same as the schema defined.\n\n #[error(\"Data type unexpected: {0:?} expected, {1} found.\")]\n\n TypeCheckFailed(String, &'static str),\n\n\n\n #[error(\"Index operation out of bound.\")]\n\n OutOfBound,\n\n\n\n #[error(\"Data order not supported {0:?}.\")]\n\n UnsupportedDataOrder(DataOrder),\n\n\n", "file_path": "connectorx/src/errors.rs", "rank": 44, "score": 4.286781058232852 }, { "content": "use crate::destinations::memory::MemoryDestination;\n\nuse crate::dummy_typesystem::DummyTypeSystem;\n\nuse crate::sources::dummy::DummySource;\n\nuse crate::typesystem::TypeConversion;\n\nuse chrono::{DateTime, NaiveDate, NaiveDateTime, Utc};\n\n\n\npub struct DummyMemoryTransport;\n\n\n\nimpl_transport!(\n\n name = DummyMemoryTransport,\n\n systems = DummyTypeSystem => DummyTypeSystem,\n\n route = DummySource => MemoryDestination,\n\n mappings = {\n\n { F64[f64] => F64[f64] | conversion all}\n\n { I64[i64] => I64[i64] | conversion all}\n\n { Bool[bool] => Bool[bool] | conversion all}\n\n { String[String] => String[String] | conversion all}\n\n { DateTime[DateTime<Utc>] => DateTime[DateTime<Utc>] | conversion all}\n\n }\n\n);\n", "file_path": "connectorx/src/transports/dummy_memory.rs", "rank": 45, "score": 4.262023792970766 }, { "content": "use crate::destinations::arrow::ArrowDestination;\n\nuse crate::dummy_typesystem::DummyTypeSystem;\n\nuse crate::sources::dummy::DummySource;\n\nuse crate::typesystem::TypeConversion;\n\nuse chrono::{DateTime, NaiveDate, NaiveDateTime, Utc};\n\n\n\npub struct DummyArrowTransport;\n\n\n\nimpl_transport!(\n\n name = DummyArrowTransport,\n\n systems = DummyTypeSystem => DummyTypeSystem,\n\n route = DummySource => ArrowDestination,\n\n mappings = {\n\n { F64[f64] => F64[f64] | conversion all}\n\n { I64[i64] => I64[i64] | conversion all}\n\n { Bool[bool] => Bool[bool] | conversion all}\n\n { String[String] => String[String] | conversion all}\n\n { DateTime[DateTime<Utc>] => DateTime[DateTime<Utc>] | conversion all}\n\n }\n\n);\n", "file_path": "connectorx/src/transports/dummy_arrow.rs", "rank": 46, "score": 4.262023792970766 }, { "content": "use crate::destinations::memory::MemoryDestination;\n\nuse crate::dummy_typesystem::DummyTypeSystem;\n\nuse crate::sources::csv::CSVSource;\n\nuse crate::typesystem::TypeConversion;\n\nuse chrono::{DateTime, NaiveDate, NaiveDateTime, Utc};\n\n\n\npub struct CSVMemoryTransport;\n\n\n\nimpl_transport!(\n\n name = CSVMemoryTransport,\n\n systems = DummyTypeSystem => DummyTypeSystem,\n\n route = CSVSource => MemoryDestination,\n\n mappings = {\n\n { F64[f64] => F64[f64] | conversion all}\n\n { I64[i64] => I64[i64] | conversion all}\n\n { Bool[bool] => Bool[bool] | conversion all}\n\n { String[String] => String[String] | conversion all}\n\n { DateTime[DateTime<Utc>] => DateTime[DateTime<Utc>] | conversion all}\n\n }\n\n);\n", "file_path": "connectorx/src/transports/csv_memory.rs", "rank": 47, "score": 4.262023792970766 }, { "content": "use crate::destinations::arrow::ArrowDestination;\n\nuse crate::dummy_typesystem::DummyTypeSystem;\n\nuse crate::sources::csv::CSVSource;\n\nuse crate::typesystem::TypeConversion;\n\nuse chrono::{DateTime, NaiveDate, NaiveDateTime, Utc};\n\n\n\npub struct CSVArrowTransport;\n\n\n\nimpl_transport!(\n\n name = CSVArrowTransport,\n\n systems = DummyTypeSystem => DummyTypeSystem,\n\n route = CSVSource => ArrowDestination,\n\n mappings = {\n\n { F64[f64] => F64[f64] | conversion all}\n\n { I64[i64] => I64[i64] | conversion all}\n\n { Bool[bool] => Bool[bool] | conversion all}\n\n { String[String] => String[String] | conversion all}\n\n { DateTime[DateTime<Utc>] => DateTime[DateTime<Utc>] | conversion all}\n\n }\n\n);\n", "file_path": "connectorx/src/transports/csv_arrow.rs", "rank": 48, "score": 4.262023792970766 }, { "content": "\"\"\"\n\nUsage:\n\n tpch-cx.py [--protocol=<protocol>] [--conn=<conn>] <num>\n\n\n\nOptions:\n\n --protocol=<protocol> The protocol to use [default: binary].\n\n --conn=<conn> The connection url to use [default: POSTGRES_URL].\n\n -h --help Show this screen.\n\n --version Show version.\n\n\"\"\"\n\nimport os\n\n\n\nimport connectorx as cx\n\nfrom contexttimer import Timer\n\nfrom docopt import docopt\n\n\n\nif __name__ == \"__main__\":\n\n\n\n args = docopt(__doc__, version=\"Naval Fate 2.0\")\n\n conn = os.environ[args[\"--conn\"]]\n\n table = os.environ[\"TPCH_TABLE\"]\n\n\n\n with Timer() as timer:\n\n df = cx.read_sql(\n\n conn,\n\n f\"\"\"SELECT * FROM {table}\"\"\",\n\n partition_on=\"L_ORDERKEY\",\n\n partition_num=int(args[\"<num>\"]),\n\n protocol=args[\"--protocol\"],\n\n )\n\n print(\"time in total:\", timer.elapsed)\n\n\n\n print(df.head())\n\n print(df.tail())\n\n print(len(df))\n", "file_path": "benchmarks/tpch-cx.py", "rank": 49, "score": 4.122337636406324 }, { "content": "use bitfield::bitfield;\n\nuse numpy::{npyffi::NPY_TYPES, Element, PyArrayDescr};\n\nuse pyo3::{ffi, Py, Python};\n\nuse std::str::from_utf8_unchecked;\n\n#[derive(Clone)]\n\n#[repr(transparent)]\n\npub struct PyString(Py<pyo3::types::PyString>);\n\n\n\n// In order to put it into a numpy array\n\nimpl Element for PyString {\n\n const DATA_TYPE: numpy::DataType = numpy::DataType::Object;\n\n fn is_same_type(dtype: &PyArrayDescr) -> bool {\n\n unsafe { *dtype.as_dtype_ptr() }.type_num == NPY_TYPES::NPY_OBJECT as i32\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub enum StringInfo {\n\n ASCII(usize), // len of the string, not byte length\n\n UCS1(usize),\n", "file_path": "connectorx-python/src/pandas/pystring.rs", "rank": 50, "score": 4.094342733914117 }, { "content": "use crate::pandas::destination::PandasDestination;\n\nuse crate::pandas::types::PandasTypeSystem;\n\nuse chrono::{DateTime, NaiveDate, NaiveDateTime, NaiveTime, Utc};\n\nuse connectorx::{\n\n impl_transport,\n\n sources::sqlite::{SqliteSource, SqliteTypeSystem},\n\n typesystem::TypeConversion,\n\n};\n\n\n\npub struct SqlitePandasTransport<'py>(&'py ());\n\n\n\nimpl_transport!(\n\n name = SqlitePandasTransport<'tp>,\n\n systems = SqliteTypeSystem => PandasTypeSystem,\n\n route = SqliteSource => PandasDestination<'tp>,\n\n mappings = {\n\n { Bool[bool] => Bool[bool] | conversion all }\n\n { Int8[i64] => I64[i64] | conversion all }\n\n { Int4[i32] => I64[i64] | conversion all }\n\n { Int2[i16] => I64[i64] | conversion all }\n", "file_path": "connectorx-python/src/pandas/transports/sqlite.rs", "rank": 51, "score": 4.033979473029046 }, { "content": "use pyo3::exceptions::PyRuntimeError;\n\nuse pyo3::PyErr;\n\nuse thiserror::Error;\n\n\n\n#[allow(unused)]\n\npub type Result<T> = std::result::Result<T, ConnectorAgentPythonError>;\n\n\n\n/// Errors that can be raised from this library.\n\n#[derive(Error, Debug)]\n\npub enum ConnectorAgentPythonError {\n\n /// The required type does not same as the schema defined.\n\n #[error(\"Unknown pandas data type: {0}.\")]\n\n UnknownPandasType(String),\n\n\n\n #[error(\"Python: {0}.\")]\n\n PythonError(String),\n\n\n\n #[error(transparent)]\n\n ConnectorAgentError(#[from] connectorx::ConnectorAgentError),\n\n\n", "file_path": "connectorx-python/src/errors.rs", "rank": 52, "score": 3.9610563334979627 }, { "content": "use crate::errors::ConnectorAgentError;\n\nuse fehler::{throw, throws};\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\n\npub enum DataOrder {\n\n RowMajor,\n\n ColumnMajor,\n\n}\n\n\n\n/// Given the supported data order from source and destination, decide the optimal data order\n\n/// for producing and writing.\n\n#[throws(ConnectorAgentError)]\n", "file_path": "connectorx/src/data_order.rs", "rank": 53, "score": 3.9554764937616467 }, { "content": "use chrono::{NaiveDate, NaiveDateTime, NaiveTime};\n\nuse rusqlite::types::Type;\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum SqliteTypeSystem {\n\n Bool(bool),\n\n Int8(bool),\n\n Int4(bool),\n\n Int2(bool),\n\n Real(bool),\n\n Text(bool),\n\n Date(bool),\n\n Time(bool),\n\n Timestamp(bool),\n\n Blob(bool),\n\n}\n\n\n\nimpl_typesystem! {\n\n system = SqliteTypeSystem,\n\n mappings = {\n", "file_path": "connectorx/src/sources/sqlite/typesystem.rs", "rank": 54, "score": 3.7392550783403493 }, { "content": "\"\"\"\n\nUsage:\n\n tpch-pandas.py [--conn=<conn>]\n\n\n\nOptions:\n\n --conn=<conn> The connection url to use [default: POSTGRES_URL].\n\n -h --help Show this screen.\n\n --version Show version.\n\n\"\"\"\n\n\n\nimport os\n\n\n\nfrom contexttimer import Timer\n\nfrom sqlalchemy import create_engine\n\nfrom docopt import docopt\n\nimport pandas as pd\n\nimport sqlite3\n\n\n\nif __name__ == \"__main__\":\n\n args = docopt(__doc__, version=\"1.0\")\n\n conn = os.environ[args[\"--conn\"]]\n\n table = os.environ[\"TPCH_TABLE\"]\n\n\n\n if conn.startswith(\"sqlite://\"):\n\n conn = sqlite3.connect(conn[9:])\n\n with Timer() as timer:\n\n df = pd.read_sql(\n\n f\"SELECT * FROM {table}\",\n\n conn,\n\n parse_dates=[\n\n \"l_shipdate\",\n\n \"l_commitdate\",\n\n \"l_receiptdate\",\n\n ],\n\n )\n\n print(f\"[Total] {timer.elapsed:.2f}s\")\n\n conn.close()\n\n\n\n else:\n\n engine = create_engine(conn)\n\n conn = engine.connect()\n\n with Timer() as timer:\n\n df = pd.read_sql(\n\n f\"SELECT * FROM {table}\",\n\n conn,\n\n parse_dates=[\n\n \"l_shipdate\",\n\n \"l_commitdate\",\n\n \"l_receiptdate\",\n\n ],\n\n )\n\n print(f\"[Total] {timer.elapsed:.2f}s\")\n\n conn.close()\n\n\n\n print(df.head())\n\n print(df.tail())\n\n print(len(df))\n", "file_path": "benchmarks/tpch-pandas.py", "rank": 55, "score": 3.495965309862383 }, { "content": "def read_sql(\n\n conn: str,\n\n query: Union[List[str], str],\n\n *,\n\n return_type: str = \"pandas\",\n\n protocol: str = \"binary\",\n\n partition_on: Optional[str] = None,\n\n partition_range: Optional[Tuple[int, int]] = None,\n\n partition_num: Optional[int] = None,\n\n) -> pd.DataFrame:\n\n \"\"\"\n\n Run the SQL query, download the data from database into a Pandas dataframe.\n\n\n\n Parameters\n\n ==========\n\n conn\n\n the connection string.\n\n query\n\n a SQL query or a list of SQL query.\n\n return_type\n\n the return type of this function. Currently only \"pandas\" is supported.\n\n partition_on\n\n the column to partition the result.\n\n partition_range\n\n the value range of the partition column.\n\n partition_num\n\n how many partition to generate.\n\n\n\n Examples\n\n ========\n\n Read a DataFrame from a SQL using a single thread:\n\n\n\n >>> postgres_url = \"postgresql://username:password@server:port/database\"\n\n >>> query = \"SELECT * FROM lineitem\"\n\n >>> read_sql(postgres_url, query)\n\n\n\n Read a DataFrame parallelly using 10 threads by automatically partitioning the provided SQL on the partition column:\n\n\n\n >>> postgres_url = \"postgresql://username:password@server:port/database\"\n\n >>> query = \"SELECT * FROM lineitem\"\n\n >>> read_sql(postgres_url, query, partition_on=\"partition_col\", partition_num=10)\n\n\n\n Read a DataFrame parallelly using 2 threads by manually providing two partition SQLs:\n\n\n\n >>> postgres_url = \"postgresql://username:password@server:port/database\"\n\n >>> queries = [\"SELECT * FROM lineitem WHERE partition_col <= 10\", \"SELECT * FROM lineitem WHERE partition_col > 10\"]\n\n >>> read_sql(postgres_url, queries)\n\n\n\n \"\"\"\n\n\n\n if isinstance(query, list) and len(query) == 1:\n\n query = query[0]\n\n\n\n if isinstance(query, str):\n\n if partition_on is None:\n\n queries = [query]\n\n partition_query = None\n\n else:\n\n partition_query = {\n\n \"query\": query,\n\n \"column\": partition_on,\n\n \"min\": partition_range[0] if partition_range else None,\n\n \"max\": partition_range[1] if partition_range else None,\n\n \"num\": partition_num,\n\n }\n\n queries = None\n\n elif isinstance(query, list):\n\n queries = query\n\n partition_query = None\n\n\n\n if partition_on is not None:\n\n raise ValueError(\"Partition on multiple queries is not supported.\")\n\n else:\n\n raise ValueError(\"query must be either str or a list of str\")\n\n\n\n return _read_sql(\n\n conn,\n\n return_type,\n\n queries=queries,\n\n protocol=protocol,\n\n partition_query=partition_query,\n", "file_path": "connectorx-python/connectorx/__init__.py", "rank": 56, "score": 3.137449077452092 }, { "content": "## Examples\n\n- Read a DataFrame from a SQL using a single thread\n\n\n\n ```python\n\n import connectorx as cx\n\n\n\n postgres_url = \"postgresql://username:password@server:port/database\"\n\n query = \"SELECT * FROM lineitem\"\n\n\n\n cx.read_sql(postgres_url, query)\n\n ```\n\n\n\n- Read a DataFrame parallelly using 10 threads by automatically partitioning the provided SQL on the partition column (`partition_range` will be automatically queried if not given)\n\n\n\n ```python\n\n import connectorx as cx\n\n\n\n postgres_url = \"postgresql://username:password@server:port/database\"\n\n query = \"SELECT * FROM lineitem\"\n\n\n\n cx.read_sql(postgres_url, query, partition_on=\"l_orderkey\", partition_num=10)\n\n ```\n\n\n\n- Read a DataFrame parallelly using 2 threads by manually providing two partition SQLs (the schemas of all the query results should be same)\n\n\n\n ```python\n\n import connectorx as cx\n\n\n\n postgres_url = \"postgresql://username:password@server:port/database\"\n\n queries = [\"SELECT * FROM lineitem WHERE l_orderkey <= 30000000\", \"SELECT * FROM lineitem WHERE l_orderkey > 30000000\"]\n\n\n\n cx.read_sql(postgres_url, queries)\n\n\n\n ```\n\n \n\n- Read a DataFrame parallelly using 4 threads from a more complex query\n\n\n\n ```python\n\n import connectorx as cx\n\n\n\n postgres_url = \"postgresql://username:password@server:port/database\"\n\n query = f\"\"\"\n\n SELECT l_orderkey,\n\n SUM(l_extendedprice * ( 1 - l_discount )) AS revenue,\n\n o_orderdate,\n\n o_shippriority\n\n FROM customer,\n\n orders,\n\n lineitem\n\n WHERE c_mktsegment = 'BUILDING'\n\n AND c_custkey = o_custkey\n\n AND l_orderkey = o_orderkey\n\n AND o_orderdate < DATE '1995-03-15'\n\n AND l_shipdate > DATE '1995-03-15'\n\n GROUP BY l_orderkey,\n\n o_orderdate,\n\n o_shippriority \n\n \"\"\"\n\n\n\n cx.read_sql(postgres_url, query, partition_on=\"l_orderkey\", partition_num=4)\n\n\n\n ```\n\n\n\n# Next Plan\n\n\n\nCheckout our [discussions](https://github.com/sfu-db/connector-x/discussions) to participate in deciding our next plan!\n", "file_path": "README.md", "rank": 57, "score": 3.1296279382906804 }, { "content": "// Each variant in DataType represents a type that connectorx currently\n\n// supports to read from a data source and write into a destination.\n\n// When adding a new supported type T and associate it to the native representation N, please do\n\n// 1. Add a T variant to DataType.\n\n// 2. Add `DataType::T => N` to the macro impl_typesystem!.\n\n// 3. Add `DataType::T => N` to the macro impl_transmit!.\n\n//\n\n\n\nuse chrono::{DateTime, Utc};\n\n/// This is a dummy type system used in this library.\n\n/// For all the sources, their output values must be one of the types defined by DummyTypeSystem.\n\n/// For all the destinations, they must support writing any value whose type is defined by DummyTypeSystem.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub enum DummyTypeSystem {\n\n F64(bool),\n\n I64(bool),\n\n Bool(bool),\n\n String(bool),\n\n DateTime(bool),\n\n}\n", "file_path": "connectorx/src/dummy_typesystem.rs", "rank": 58, "score": 2.94928201841463 }, { "content": " { Bool => bool }\n\n { Int8 => i64 }\n\n { Int4 => i32 }\n\n { Int2 => i16 }\n\n { Real => f64 }\n\n { Text => Box<str> }\n\n { Date => NaiveDate}\n\n { Time => NaiveTime}\n\n { Timestamp => NaiveDateTime}\n\n { Blob => Vec<u8>}\n\n }\n\n}\n\n\n\nimpl From<Type> for SqliteTypeSystem {\n\n fn from(ty: Type) -> SqliteTypeSystem {\n\n use SqliteTypeSystem::*;\n\n match ty {\n\n Type::Integer => Int8(true),\n\n Type::Real => Real(true),\n\n Type::Text => Text(true),\n", "file_path": "connectorx/src/sources/sqlite/typesystem.rs", "rank": 60, "score": 2.6141823013090018 }, { "content": "/// PostgresSource => PandasDestination<'py>,\n\n/// ([PostgresDTypes::Float4], [PandasTypes::F64]) => (f32, f64) conversion all\n\n/// }\n\n/// ```\n\n/// This implements `Transport` to `PostgresPandasTransport<'py>`.\n\n/// The lifetime used must be declare in the first argument in the bracket.\n\n#[macro_export]\n\nmacro_rules! impl_transport {\n\n (\n\n name = $TP:ty,\n\n systems = $TSS:tt => $TSD:tt,\n\n route = $S:ty => $D:ty,\n\n mappings = {\n\n $(\n\n { $($TOKENS:tt)+ }\n\n )*\n\n }\n\n ) => {\n\n $(\n\n impl_transport!(@cvt $TP, $($TOKENS)+);\n", "file_path": "connectorx/src/macros.rs", "rank": 61, "score": 2.5943528945342322 }, { "content": " zero_tuple = false;\n\n break;\n\n }\n\n Ok(None) => {}\n\n Err(e) => {\n\n debug!(\"cannot get metadata for '{}', try next query: {}\", query, e);\n\n error = Some(e);\n\n zero_tuple = false;\n\n }\n\n }\n\n }\n\n\n\n if !success {\n\n if zero_tuple {\n\n // try to use COPY command get the column headers\n\n let copy_query = format!(\"COPY ({}) TO STDOUT WITH CSV HEADER\", self.queries[0]);\n\n let mut reader = conn.copy_out(&*copy_query)?;\n\n let mut buf = String::new();\n\n reader.read_line(&mut buf)?;\n\n self.names = buf[0..buf.len() - 1] // remove last '\\n'\n", "file_path": "connectorx/src/sources/postgres/mod.rs", "rank": 62, "score": 2.5943528945342322 }, { "content": "#![feature(generic_associated_types)]\n\n#![feature(log_syntax)]\n\n#![allow(incomplete_features)]\n\n\n\n#[doc(hidden)]\n\npub mod pg;\n\n#[doc(hidden)]\n\npub mod s3;\n\npub mod typesystem;\n\n#[macro_use]\n\npub mod macros;\n\npub(crate) mod constants;\n\npub mod data_order;\n\npub mod destinations;\n\npub mod dispatcher;\n\npub mod dummy_typesystem;\n\npub mod errors;\n\npub mod source_router;\n\npub mod sources;\n\npub mod sql;\n", "file_path": "connectorx/src/lib.rs", "rank": 63, "score": 2.555583068733442 }, { "content": " py: Python<'py>,\n\n nrows: Option<usize>,\n\n schema: Option<Vec<PandasTypeSystem>>,\n\n buffers: Option<&'py PyList>,\n\n buffer_column_index: Option<Vec<Vec<usize>>>,\n\n dataframe: Option<&'py PyAny>, // Using this field other than the return purpose should be careful: this refers to the same data as buffers\n\n}\n\n\n\nimpl<'a> PandasDestination<'a> {\n\n pub fn new(py: Python<'a>) -> Self {\n\n PandasDestination {\n\n py,\n\n nrows: None,\n\n schema: None,\n\n buffers: None,\n\n buffer_column_index: None,\n\n dataframe: None,\n\n }\n\n }\n\n\n", "file_path": "connectorx-python/src/pandas/destination.rs", "rank": 64, "score": 2.5366294632661264 }, { "content": " _ => unimplemented!(\"{}\", ty),\n\n }\n\n }\n\n}\n\n\n\nimpl From<(Option<&str>, Type)> for SqliteTypeSystem {\n\n fn from(types: (Option<&str>, Type)) -> SqliteTypeSystem {\n\n use SqliteTypeSystem::*;\n\n match types {\n\n // derive from column's declare type, some rules refer to:\n\n // https://www.sqlite.org/datatype3.html#affname\n\n (Some(decl_type), ty) => {\n\n let s = decl_type.to_lowercase();\n\n match s.as_str() {\n\n \"int4\" => Int4(true),\n\n \"int2\" => Int2(true),\n\n \"boolean\" | \"bool\" => Bool(true),\n\n \"date\" => Date(true),\n\n \"time\" => Time(true),\n\n \"datetime\" | \"timestamp\" => Timestamp(true),\n", "file_path": "connectorx/src/sources/sqlite/typesystem.rs", "rank": 65, "score": 2.499553346620979 }, { "content": " { Char => i8 }\n\n { Text | BpChar | VarChar | Enum => &'r str }\n\n { ByteA => Vec<u8> }\n\n { Time => NaiveTime }\n\n { Timestamp => NaiveDateTime }\n\n { TimestampTz => DateTime<Utc> }\n\n { Date => NaiveDate }\n\n { UUID => Uuid }\n\n { JSON | JSONB => Value }\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a Type> for PostgresTypeSystem {\n\n fn from(ty: &'a Type) -> PostgresTypeSystem {\n\n use PostgresTypeSystem::*;\n\n match ty.name() {\n\n \"int2\" => Int2(true),\n\n \"int4\" => Int4(true),\n\n \"int8\" => Int8(true),\n\n \"float4\" => Float4(true),\n", "file_path": "connectorx/src/sources/postgres/typesystem.rs", "rank": 66, "score": 2.4814187760581725 }, { "content": " }\n\n}\n\n\n\n// Link PostgresDTypes back to the one defiend by the postgres crate.\n\nimpl<'a> From<PostgresTypeSystem> for Type {\n\n fn from(ty: PostgresTypeSystem) -> Type {\n\n use PostgresTypeSystem::*;\n\n match ty {\n\n Int2(_) => Type::INT2,\n\n Int4(_) => Type::INT4,\n\n Int8(_) => Type::INT8,\n\n Float4(_) => Type::FLOAT4,\n\n Float8(_) => Type::FLOAT8,\n\n Numeric(_) => Type::NUMERIC,\n\n Bool(_) => Type::BOOL,\n\n Text(_) => Type::TEXT,\n\n BpChar(_) => Type::BPCHAR,\n\n VarChar(_) => Type::VARCHAR,\n\n Char(_) => Type::CHAR,\n\n ByteA(_) => Type::BYTEA,\n", "file_path": "connectorx/src/sources/postgres/typesystem.rs", "rank": 67, "score": 2.445927755604063 }, { "content": " /// type conversion using `convert_type` to get value with type T2, which is associated to\n\n /// TSD. Finally, it will write the value with type T2 to the destination.\n\n fn process<'s, 'd, 'r>(\n\n ts1: Self::TSS,\n\n ts2: Self::TSD,\n\n src: &'r mut <<Self::S as Source>::Partition as SourcePartition>::Parser<'s>,\n\n dst: &'r mut <Self::D as Destination>::Partition<'d>,\n\n ) -> Result<()>;\n\n\n\n #[allow(clippy::type_complexity)]\n\n fn processor<'s, 'd>(\n\n ts1: Self::TSS,\n\n ts2: Self::TSD,\n\n ) -> Result<\n\n fn(\n\n src: &mut <<Self::S as Source>::Partition as SourcePartition>::Parser<'s>,\n\n dst: &mut <Self::D as Destination>::Partition<'d>,\n\n ) -> Result<()>,\n\n >;\n\n}\n\n\n", "file_path": "connectorx/src/typesystem.rs", "rank": 68, "score": 2.411437655035617 }, { "content": " std::any::type_name::<DateTime<Utc>>()\n\n }\n\n}\n\n\n\nimpl<'a> PandasColumn<DateTime<Utc>> for DateTimeColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: DateTime<Utc>) {\n\n unsafe { *self.data.get_unchecked_mut(self.i) = val.timestamp_nanos() };\n\n self.i += 1;\n\n }\n\n}\n\n\n\nimpl<'a> PandasColumn<Option<DateTime<Utc>>> for DateTimeColumn<'a> {\n\n #[throws(ConnectorAgentError)]\n\n fn write(&mut self, val: Option<DateTime<Utc>>) {\n\n // numpy use i64::MIN as NaT\n\n unsafe {\n\n *self.data.get_unchecked_mut(self.i) =\n\n val.map(|t| t.timestamp_nanos()).unwrap_or(i64::MIN);\n\n };\n", "file_path": "connectorx-python/src/pandas/pandas_columns/datetime.rs", "rank": 69, "score": 2.3452954910540003 }, { "content": " }\n\n}\n\n\n\nimpl PyString {\n\n // the val should be same as the val used for new\n\n pub unsafe fn write(&mut self, data: &[u8], info: StringInfo) {\n\n match info {\n\n StringInfo::ASCII(len) => {\n\n let pyobj = PyASCIIObject::from_owned(self.0.clone());\n\n let buf = std::slice::from_raw_parts_mut(\n\n (pyobj as *mut PyASCIIObject).offset(1) as *mut u8,\n\n len as usize,\n\n );\n\n\n\n buf.copy_from_slice(data);\n\n }\n\n StringInfo::UCS1(len) => {\n\n let pyobj = PyCompactUnicodeObject::from_owned(self.0.clone());\n\n let buf = std::slice::from_raw_parts_mut(\n\n (pyobj as *mut PyCompactUnicodeObject).offset(1) as *mut u8,\n", "file_path": "connectorx-python/src/pandas/pystring.rs", "rank": 70, "score": 2.267551032316407 }, { "content": "# Benchmark Setup\n\n\n\n## Postgres (Docker)\n\n\n\n1. Download PostgreSQL from docker\n\n```\n\ndocker pull postgres\n\n```\n\n\n\n2. Create a directory for mount point (Optional)\n\n```\n\nmkdir -p $YOUR_DOCKER_DIR/docker/volumes/postgres\n\n```\n\n\n\n3. Run PostgreSQL:\n\n```\n\n# With local mount point\n\ndocker run --rm --name pg-connector -e POSTGRES_USER=postgres -e POSTGRES_DB=tpch -e POSTGRES_PASSWORD=postgres -d -p 5432:5432 -v $YOUR_DOCKER_DIR/docker/volumes/postgres:/var/lib/postgresql/data postgres -c shared_buffers=1024MB\n\n\n\n# Without local mount point\n\ndocker run --rm --name pg-connector -e POSTGRES_USER=postgres -e POSTGRES_DB=tpch -e POSTGRES_PASSWORD=postgres -d -p 5432:5432 -c shared_buffers=1024MB\n\n```\n\n\n\n## TPC-H\n\n\n\n1. Download TPC-H toolkit and compile:\n\n```\n\ngit clone https://github.com/gregrahn/tpch-kit.git\n\ncd tpch-kit/dbgen && make MACHINE=LINUX DATABASE=POSTGRESQL\n\n```\n\n\n\n2. Generate `LINEITEM` table with scale factor 10\n\n```\n\n# Generate all tables\n\n./dbgen -s 10\n\n\n\n# Alternatively you can only generate LINEITEM table using -T option\n\n./dbgen -s 10 -T L\n\n```\n\n\n\n3. Create table and load schema\n\n```\n\ncreatedb -h localhost -U postgres tpch\n\npsql -h localhost -U postgres -d tpch < dss.ddl\n\n```\n\n\n\n4. Load data into PostgreSQL\n\n```\n\npsql -h localhost -U postgres -d tpch -c \"\\copy LINEITEM FROM '$YOUR_TPCH_DIR/tpch-kit/dbgen/lineitem.tbl' DELIMITER '|' ENCODING 'LATIN1';\"\n\n```\n\n\n\n5. Create index for `LINEITEM` on `l_orderkey`\n\n```\n\npsql -h localhost -U postgres -d tpch -c \"CREATE INDEX lineitem_l_orderkey_idx ON LINEITEM USING btree (l_orderkey);\"\n\n```\n", "file_path": "Benchmark.md", "rank": 71, "score": 2.185894147992166 }, { "content": " schema: &str,\n\n) -> HashMap<String, Vec<(*const FFI_ArrowArray, *const FFI_ArrowSchema)>>\n\nwhere\n\n S: AsRef<str>,\n\n{\n\n let schema = Arc::new(Schema::from(&from_str::<Value>(schema)?)?);\n\n let mut futs: FuturesOrdered<_> = sqls\n\n .iter()\n\n .map(|sql| read_sql_as_batch(conn, sql, schema.clone()))\n\n .collect();\n\n let mut table = HashMap::new();\n\n debug!(\"start queries\");\n\n let start = Instant::now();\n\n while let Some(rb) = futs.next().await {\n\n if let Some(batches) = rb? {\n\n for batch in batches {\n\n for (i, f) in batch.schema().fields().iter().enumerate() {\n\n use arrow::datatypes::DataType::*;\n\n match f.data_type() {\n\n Null | Boolean | Int8 | Int16 | Int32 | Int64 | UInt8 | UInt16 | UInt32\n", "file_path": "connectorx/src/pg.rs", "rank": 72, "score": 2.153340896743519 }, { "content": "## Time chart, lower is better.\n\n\n\n<p align=\"center\"><img alt=\"time chart\" src=\"https://raw.githubusercontent.com/sfu-db/connector-agent/main/assets/time.jpg\"/></p>\n\n\n\n## Memory consumption chart, lower is better.\n\n\n\n<p align=\"center\"><img alt=\"memory chart\" src=\"https://raw.githubusercontent.com/sfu-db/connector-agent/main/assets/memory.jpg\"/></p>\n\n\n\nIn conclusion, ConnectorX uses up to **3x** less memory and **11x** less time.\n\n\n\n## How does ConnectorX achieve a lightening speed while keeping the memory footprint low?\n\n\n\nWe observe that existing solutions more or less do data copy multiple times when downloading the data.\n\nAdditionally, implementing a data intensive application in Python brings additional cost.\n\n\n\nConnectorX is written in Rust and follows \"zero-copy\" principle.\n\nThis allows it to make full use of the CPU by becoming cache and branch predictor friendly. Moreover, the architecture of ConnectorX ensures the data will be copied exactly once, directly from the source to the destination.\n\n\n\n# Detailed Usage and Examples\n\n\n\n## API\n\n\n\n```python\n\nconnectorx.read_sql(conn: str, query: Union[List[str], str], *, return_type: str = \"pandas\", protocol: str = \"binary\", partition_on: Optional[str] = None, partition_range: Optional[Tuple[int, int]] = None, partition_num: Optional[int] = None)\n\n```\n\n\n\nRun the SQL query, download the data from database into a Pandas dataframe.\n\n\n\n## Parameters\n\n- **conn**(str): Connection string uri. Currently only PostgreSQL is supported.\n\n- **query**(string or list of string): SQL query or list of SQL queries for fetching data.\n\n- **return_type**(string, optional(default `\"pandas\"`)): The return type of this function. Currently only \"pandas\" is supported.\n\n- **partition_on**(string, optional(default `None`)): The column to partition the result.\n\n- **partition_range**(tuple of int, optional(default `None`)): The value range of the partition column.\n\n- **partition_num**(int, optional(default `None`)): The number of partitions to generate.\n\n\n", "file_path": "README.md", "rank": 73, "score": 1.921820422186613 }, { "content": "# ConnectorX [![status][ci_badge]][ci_page] [![docs][docs_badge]][docs_page]\n\n\n\n[ci_badge]: https://github.com/sfu-db/connector-agent/workflows/ci/badge.svg\n\n[ci_page]: https://github.com/sfu-db/connector-agent/actions\n\n[docs_badge]: https://github.com/sfu-db/connector-agent/workflows/docs/badge.svg\n\n[docs_page]: https://sfu-db.github.io/connector-agent/connector_agent/\n\n\n\nLoad data from <img src=\"https://raw.githubusercontent.com/sfu-db/connector-agent/main/assets/sources.gif\" width=\"6.5%\" style=\"margin-bottom: -2px\"/> to <img src=\"https://raw.githubusercontent.com/sfu-db/connector-agent/main/assets/destinations.gif\" width=\"7%\" style=\"margin-bottom: -2px\"/>, the fastest way.\n\n\n\n**Currently only support Postgres to Pandas. MySQL is in development.**\n\nFor more data sources, please check out our [discussion](https://github.com/sfu-db/connector-x/discussions/61).\n\n\n\nConnectorX enables you to load data from databases into Python in the fastest and most memory efficient way.\n\n\n\nWhat you need is one line of code:\n\n\n\n```python\n\nimport connectorx as cx\n\n\n\ncx.read_sql(\"postgresql://username:password@server:port/database\", \"SELECT * FROM lineitem\")\n\n```\n\n\n\nOptionally, you can accelerate the data loading using parallelism by specifying a partition column.\n\n\n\n```python\n\nimport connectorx as cx\n\n\n\ncx.read_sql(\"postgresql://username:password@server:port/database\", \"SELECT * FROM lineitem\", partition_on=\"l_orderkey\", partition_num=10)\n\n```\n\n\n\nThe function will partition the query by **evenly** splitting the specified column to the amount of partitions.\n\nConnectorX will assign one thread for each partition to load and write data in parallel.\n\nCurrently, we support partitioning on **integer** columns for **SPJA** queries.\n\n\n\nCheck out more detailed usage and examples [here](#detailed-usage-and-examples).\n\n\n\n# Installation\n\n\n\n```bash\n\npip install connectorx\n\n```\n\n\n\n# Performance\n\n\n\nWe compared different solutions in Python that provides the `read_sql` function, by loading a 10x TPC-H lineitem table (8.6GB) from Postgres into a DataFrame, with 4 cores parallelism.\n\n\n", "file_path": "README.md", "rank": 74, "score": 1.0863608538362337 } ]
Rust
rpc/src/streaming.rs
jjs-dev/commons
f53ebfa3bd973aa711f79f4d824fa297ac21b1ad
use futures_util::StreamExt; use std::{convert::Infallible, marker::PhantomData, pin::Pin}; use tokio::io::AsyncBufReadExt; pub struct Streaming<E, F>(Infallible, PhantomData<(E, F)>); impl<E, F> crate::Direction for Streaming<E, F> where E: serde::Serialize + serde::de::DeserializeOwned + Send + Sync + 'static, F: serde::Serialize + serde::de::DeserializeOwned + Send + Sync + 'static, { type Tx = StreamingTx<E, F>; type Rx = StreamingRx<E, F>; } #[derive(serde::Serialize, serde::Deserialize)] enum Item<E, F> { Event(E), Finish(F), } pub struct StreamingTx<E, F> { phantom: PhantomData<(E, F)>, sender: hyper::body::Sender, } impl<E, F> crate::Transmit for StreamingTx<E, F> where E: serde::Serialize + Send + Sync + 'static, F: serde::Serialize + Send + Sync + 'static, { type BatchError = SendError; type BatchData = (futures_util::stream::BoxStream<'static, E>, F); type BatchFuture = futures_util::future::BoxFuture<'static, Result<(), SendError>>; fn from_body_sender(sender: hyper::body::Sender) -> Self { Self { phantom: PhantomData, sender, } } fn send_batch(mut self, mut batch: Self::BatchData) -> Self::BatchFuture { Box::pin(async move { while let Some(event) = batch.0.next().await { self.send_event(event).await?; } self.finish(batch.1).await }) } } #[derive(Debug, thiserror::Error)] pub enum SendError { #[error("serialization failed")] Serialize(serde_json::Error), #[error("data not sent to client")] Network(hyper::Error), } impl<E: serde::Serialize, F: serde::Serialize> StreamingTx<E, F> { async fn priv_send(&mut self, item: Item<E, F>) -> Result<(), SendError> { let mut message = serde_json::to_vec(&item).map_err(SendError::Serialize)?; message.push(b'\n'); self.sender .send_data(message.into()) .await .map_err(SendError::Network) } pub async fn send_event(&mut self, event: E) -> Result<(), SendError> { self.priv_send(Item::Event(event)).await } pub async fn finish(&mut self, finish: F) -> Result<(), SendError> { self.priv_send(Item::Finish(finish)).await } } impl<E, F> StreamingTx<E, F> {} pub struct StreamingRx<E, F> { phantom: PhantomData<(E, F)>, body: tokio::io::BufReader<Pin<Box<dyn tokio::io::AsyncRead + Send + Sync + 'static>>>, finish: Option<F>, } impl<E, F> crate::Receive for StreamingRx<E, F> where E: serde::de::DeserializeOwned + Send + 'static, F: serde::de::DeserializeOwned + Send + 'static, { type BatchData = (Vec<E>, F); type BatchError = RecvError; type BatchFuture = futures_util::future::BoxFuture<'static, Result<Self::BatchData, RecvError>>; fn from_body(body: hyper::Body) -> Self { let body = body.map(|chunk| { chunk.map_err(|hyper_err| std::io::Error::new(std::io::ErrorKind::Other, hyper_err)) }); Self { phantom: PhantomData, body: tokio::io::BufReader::new(Box::pin(tokio::io::stream_reader(body))), finish: None, } } fn recv_batch(mut self) -> Self::BatchFuture { Box::pin(async move { let mut events = Vec::new(); loop { let item = self.recv_next_item().await?; match item { Item::Event(ev) => events.push(ev), Item::Finish(fin) => break Ok((events, fin)), } } }) } } #[derive(Debug, thiserror::Error)] pub enum RecvError { #[error("unable to read response")] Network(hyper::Error), #[error("io error")] Io(std::io::Error), #[error("parsing failed")] Deserialize(serde_json::Error), #[error("unexpected data")] Unexpected, } impl<E: serde::de::DeserializeOwned, F: serde::de::DeserializeOwned> StreamingRx<E, F> { async fn recv_next_item(&mut self) -> Result<Item<E, F>, RecvError> { let mut line = String::new(); self.body .read_line(&mut line) .await .map_err(RecvError::Io)?; Ok(serde_json::from_str(line.trim()).map_err(RecvError::Deserialize)?) } pub async fn next_event(&mut self) -> Result<Option<E>, RecvError> { if self.finish.is_some() { return Ok(None); } match self.recv_next_item().await? { Item::Event(ev) => Ok(Some(ev)), Item::Finish(fin) => { self.finish = Some(fin); Ok(None) } } } pub async fn finish(mut self) -> Result<F, RecvError> { if let Some(f) = self.finish { return Ok(f); } match self.recv_next_item().await? { Item::Event(_) => Err(RecvError::Unexpected), Item::Finish(fin) => Ok(fin), } } }
use futures_util::StreamExt; use std::{convert::Infallible, marker::PhantomData, pin::Pin}; use tokio::io::AsyncBufReadExt; pub struct Streaming<E, F>(Infallible, PhantomData<(E, F)>); impl<E, F> crate::Direction for Streaming<E, F> where E: serde::Serialize + serde::de::DeserializeOwned + Send + Sync + 'static, F: serde::Serialize + serde::de::DeserializeOwned + Send + Sync + 'static, { type Tx = StreamingTx<E, F>; type Rx = StreamingRx<E, F>; } #[derive(serde::Serialize, serde::Deserialize)] enum Item<E, F> { Event(E), Finish(F), } pub struct StreamingTx<E, F> { phantom: PhantomData<(E, F)>, sender: hyper::body::Sender, } impl<E, F> crate::Transmit for StreamingTx<E, F> where E: serde::Serialize + Send + Sync + 'static, F: serde::Serialize + Send + Sync + 'static, { type BatchError = SendError; type BatchData = (futures_util::stream::BoxStream<'static, E>, F); type BatchFuture = futures_util::future::BoxFuture<'static, Result<(), SendError>>; fn from_body_sender(sender: hyper::body::Sender) -> Self { Self { phantom: PhantomData, sender, } }
} #[derive(Debug, thiserror::Error)] pub enum SendError { #[error("serialization failed")] Serialize(serde_json::Error), #[error("data not sent to client")] Network(hyper::Error), } impl<E: serde::Serialize, F: serde::Serialize> StreamingTx<E, F> { async fn priv_send(&mut self, item: Item<E, F>) -> Result<(), SendError> { let mut message = serde_json::to_vec(&item).map_err(SendError::Serialize)?; message.push(b'\n'); self.sender .send_data(message.into()) .await .map_err(SendError::Network) } pub async fn send_event(&mut self, event: E) -> Result<(), SendError> { self.priv_send(Item::Event(event)).await } pub async fn finish(&mut self, finish: F) -> Result<(), SendError> { self.priv_send(Item::Finish(finish)).await } } impl<E, F> StreamingTx<E, F> {} pub struct StreamingRx<E, F> { phantom: PhantomData<(E, F)>, body: tokio::io::BufReader<Pin<Box<dyn tokio::io::AsyncRead + Send + Sync + 'static>>>, finish: Option<F>, } impl<E, F> crate::Receive for StreamingRx<E, F> where E: serde::de::DeserializeOwned + Send + 'static, F: serde::de::DeserializeOwned + Send + 'static, { type BatchData = (Vec<E>, F); type BatchError = RecvError; type BatchFuture = futures_util::future::BoxFuture<'static, Result<Self::BatchData, RecvError>>; fn from_body(body: hyper::Body) -> Self { let body = body.map(|chunk| { chunk.map_err(|hyper_err| std::io::Error::new(std::io::ErrorKind::Other, hyper_err)) }); Self { phantom: PhantomData, body: tokio::io::BufReader::new(Box::pin(tokio::io::stream_reader(body))), finish: None, } } fn recv_batch(mut self) -> Self::BatchFuture { Box::pin(async move { let mut events = Vec::new(); loop { let item = self.recv_next_item().await?; match item { Item::Event(ev) => events.push(ev), Item::Finish(fin) => break Ok((events, fin)), } } }) } } #[derive(Debug, thiserror::Error)] pub enum RecvError { #[error("unable to read response")] Network(hyper::Error), #[error("io error")] Io(std::io::Error), #[error("parsing failed")] Deserialize(serde_json::Error), #[error("unexpected data")] Unexpected, } impl<E: serde::de::DeserializeOwned, F: serde::de::DeserializeOwned> StreamingRx<E, F> { async fn recv_next_item(&mut self) -> Result<Item<E, F>, RecvError> { let mut line = String::new(); self.body .read_line(&mut line) .await .map_err(RecvError::Io)?; Ok(serde_json::from_str(line.trim()).map_err(RecvError::Deserialize)?) } pub async fn next_event(&mut self) -> Result<Option<E>, RecvError> { if self.finish.is_some() { return Ok(None); } match self.recv_next_item().await? { Item::Event(ev) => Ok(Some(ev)), Item::Finish(fin) => { self.finish = Some(fin); Ok(None) } } } pub async fn finish(mut self) -> Result<F, RecvError> { if let Some(f) = self.finish { return Ok(f); } match self.recv_next_item().await? { Item::Event(_) => Err(RecvError::Unexpected), Item::Finish(fin) => Ok(fin), } } }
fn send_batch(mut self, mut batch: Self::BatchData) -> Self::BatchFuture { Box::pin(async move { while let Some(event) = batch.0.next().await { self.send_event(event).await?; } self.finish(batch.1).await }) }
function_block-function_prefix_line
[ { "content": "/// Creates new channel, returning one sender and one receiver.\n\n/// Other can be created using `Clone::clone`\n\npub fn channel<T: Send + 'static>() -> (Sender<T>, Receiver<T>) {\n\n let inner = Inner {\n\n q: Mutex::new(Queue {\n\n values: VecDeque::new(),\n\n wakers: VecDeque::new(),\n\n }),\n\n senders_dummy: Arc::new(SenderDummyType),\n\n };\n\n\n\n let inner = Arc::new(inner);\n\n let tx = Sender {\n\n inner: inner.clone(),\n\n dummy: Some(inner.senders_dummy.clone()),\n\n };\n\n let rx = Receiver { inner };\n\n (tx, rx)\n\n}\n\n\n\nimpl<T> Sender<T> {\n\n /// Sends new value to channel\n", "file_path": "async-mpmc/src/lib.rs", "rank": 1, "score": 110470.82479252815 }, { "content": "/// Produces one Sender and one Receiver, tied together.\n\n/// Use `Clone` if needed.\n\npub fn multiwake() -> (Sender, Receiver) {\n\n let inner = Inner {\n\n waiters: vec![],\n\n generation: 0,\n\n senders_count: 1,\n\n };\n\n let inner = Arc::new(Mutex::new(inner));\n\n\n\n let tx = Sender {\n\n inner: inner.clone(),\n\n };\n\n let rx = Receiver {\n\n inner,\n\n last_observed_generation: 0,\n\n };\n\n (tx, rx)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "multiwake/src/lib.rs", "rank": 2, "score": 92392.32919988089 }, { "content": "/// Type that can be constructed from the http request.\n\npub trait Receive: Send + 'static {\n\n type BatchData;\n\n type BatchError: std::error::Error + Send + Sync + 'static;\n\n type BatchFuture: std::future::Future<Output = Result<Self::BatchData, Self::BatchError>>;\n\n\n\n fn from_body(req: hyper::Body) -> Self;\n\n\n\n fn recv_batch(self) -> Self::BatchFuture;\n\n}\n\n\n", "file_path": "rpc/src/lib.rs", "rank": 3, "score": 92002.49768480945 }, { "content": "/// Type that can produce http request.\n\npub trait Transmit: Send + 'static {\n\n type BatchError: std::error::Error + Send + Sync + 'static;\n\n type BatchData;\n\n type BatchFuture: std::future::Future<Output = Result<(), Self::BatchError>>;\n\n\n\n fn from_body_sender(send: hyper::body::Sender) -> Self;\n\n\n\n fn send_batch(self, uf: Self::BatchData) -> Self::BatchFuture;\n\n}\n\n\n", "file_path": "rpc/src/lib.rs", "rank": 4, "score": 92002.49768480945 }, { "content": "/// Type that can handle requests for specific route.\n\n/// `C` is context type.\n\npub trait Handler<R: crate::Route>: Clone + Send + Sync + 'static {\n\n /// Fatal error that can occur during processing request.\n\n /// this error will be logged, and response will be aborted.\n\n type Error: std::fmt::Display + Send + 'static;\n\n type Fut: Future<Output = Result<(), Self::Error>> + Send + 'static;\n\n fn handle(\n\n self,\n\n request: <R::Request as Direction>::Rx,\n\n response: <R::Response as Direction>::Tx,\n\n ) -> Self::Fut;\n\n}\n\n\n", "file_path": "rpc/src/server.rs", "rank": 5, "score": 90800.32504403355 }, { "content": "#[derive(Debug)]\n\nstruct ErrorWrapper<E>(E);\n\n\n\nimpl<E: std::error::Error + 'static> std::fmt::Display for ErrorWrapper<E> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let mut err = &self.0 as &(dyn std::error::Error + 'static);\n\n loop {\n\n writeln!(f, \"{}\", err)?;\n\n err = match err.source() {\n\n Some(s) => s,\n\n None => break,\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<E: std::error::Error + 'static> std::error::Error for ErrorWrapper<E> {}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n", "file_path": "puller/examples/cli.rs", "rank": 6, "score": 76741.10442040119 }, { "content": "struct SenderDummyType;\n\n\n", "file_path": "async-mpmc/src/lib.rs", "rank": 7, "score": 72587.68804305473 }, { "content": "/// One queue item\n\nstruct Item<T> {\n\n /// Value itself\n\n value: T,\n\n /// Span of request\n\n span: tracing::Span,\n\n}\n\n\n", "file_path": "async-mpmc/src/lib.rs", "rank": 8, "score": 68394.46552658208 }, { "content": "/// Get \"Pi\" number, with growing precision\n\nstruct StreamingPi;\n\n\n\nimpl rpc::Route for StreamingPi {\n\n const ENDPOINT: &'static str = \"/pi\";\n\n type Request = rpc::Unary<StreamingPiRequest>;\n\n type Response = rpc::Streaming<StreamingPiResponse, ()>;\n\n}\n\n\n\n// server\n", "file_path": "rpc/tests/calc.rs", "rank": 9, "score": 57198.6467522445 }, { "content": "#[derive(serde::Serialize, serde::Deserialize)]\n\nstruct StreamingPiResponse {\n\n digits_chunk: String,\n\n}\n", "file_path": "rpc/tests/calc.rs", "rank": 10, "score": 54945.739759845776 }, { "content": "#[derive(serde::Serialize, serde::Deserialize)]\n\nstruct StreamingPiRequest {}\n\n\n", "file_path": "rpc/tests/calc.rs", "rank": 11, "score": 54945.739759845776 }, { "content": "#[derive(Clone)]\n\nstruct Handler;\n\n\n\nimpl rpc::Handler<Echo> for Handler {\n\n type Error = Box<dyn std::error::Error + Send + Sync + 'static>;\n\n type Fut = std::pin::Pin<\n\n Box<dyn std::future::Future<Output = Result<(), Self::Error>> + Send + Sync + 'static>,\n\n >;\n\n fn handle(self, rx: rpc::UnaryRx<EchoResponse>, tx: rpc::UnaryTx<EchoResponse>) -> Self::Fut {\n\n Box::pin(async move {\n\n let data = rx.recv().await?;\n\n tx.send(data).await?;\n\n Ok(())\n\n })\n\n }\n\n}\n\n\n\nasync fn server_main() -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {\n\n let mut router = rpc::RouterBuilder::new();\n\n router.add_route(Handler);\n\n let router = router.build().as_make_service();\n", "file_path": "rpc/examples/http.rs", "rank": 12, "score": 37410.19768551695 }, { "content": "struct Echo;\n\n\n\nimpl rpc::Route for Echo {\n\n const ENDPOINT: &'static str = \"/echo\";\n\n type Request = rpc::Unary<EchoRequest>;\n\n type Response = rpc::Unary<EchoResponse>;\n\n}\n\n\n", "file_path": "rpc/examples/http.rs", "rank": 13, "score": 37410.19768551695 }, { "content": "struct Inner {\n\n waiters: Vec<Waker>,\n\n generation: usize,\n\n senders_count: usize,\n\n}\n\n\n\npub struct Sender {\n\n inner: Arc<Mutex<Inner>>,\n\n}\n\n\n\nimpl Clone for Sender {\n\n fn clone(&self) -> Self {\n\n {\n\n let mut inner = self.inner.lock().unwrap();\n\n inner.senders_count += 1;\n\n }\n\n Sender {\n\n inner: self.inner.clone(),\n\n }\n\n }\n", "file_path": "multiwake/src/lib.rs", "rank": 14, "score": 37410.19768551695 }, { "content": "#[derive(Clone)]\n\nstruct Server {\n\n counter: Arc<AtomicU64>,\n\n}\n\n\n\nimpl rpc::Handler<AddRpc> for Server {\n\n type Error = Box<dyn std::error::Error + Send + Sync + 'static>;\n\n type Fut = futures_util::future::BoxFuture<'static, Result<(), Self::Error>>;\n\n fn handle(self, rx: rpc::UnaryRx<AddRequest>, tx: rpc::UnaryTx<AddResponse>) -> Self::Fut {\n\n Box::pin(async move {\n\n let req = rx.recv().await?;\n\n\n\n let cnt = self.counter.fetch_add(1, SeqCst);\n\n\n\n let res = AddResponse {\n\n sum: req.a + req.b + cnt,\n\n };\n\n tx.send(res).await?;\n\n Ok(())\n\n })\n\n }\n", "file_path": "rpc/tests/calc.rs", "rank": 15, "score": 37410.19768551695 }, { "content": "#[derive(serde::Serialize, serde::Deserialize)]\n\nstruct AddResponse {\n\n sum: u64,\n\n}\n\n\n", "file_path": "rpc/tests/calc.rs", "rank": 16, "score": 36154.047821553744 }, { "content": "/// Calculate sum of two numbers and internal server counter.\n\nstruct AddRpc;\n\n\n\nimpl rpc::Route for AddRpc {\n\n const ENDPOINT: &'static str = \"/add\";\n\n type Request = rpc::Unary<AddRequest>;\n\n type Response = rpc::Unary<AddResponse>;\n\n}\n\n\n", "file_path": "rpc/tests/calc.rs", "rank": 17, "score": 36154.047821553744 }, { "content": "struct DynRoute {\n\n /// Route endpoint\n\n endpoint: &'static str,\n\n /// Handler for this endpoint\n\n func: Box<dyn Fn(hyper::Body) -> hyper::Body + Send + Sync + 'static>,\n\n}\n\n\n\n/// Builder for Router.\n\npub struct RouterBuilder {\n\n /// Contains DynRoutes, sorted by endpoint.\n\n /// Why do we sort? Because we want efficiently find handlers later,\n\n /// and we use binary search for it. We could use stuff like HashMap,\n\n /// but it performs badly on small route count.\n\n routes: Vec<DynRoute>,\n\n}\n\n\n\nimpl RouterBuilder {\n\n /// Creates new builder with empty set of routes.\n\n pub fn new() -> RouterBuilder {\n\n RouterBuilder { routes: vec![] }\n", "file_path": "rpc/src/server.rs", "rank": 18, "score": 36154.047821553744 }, { "content": "#[derive(serde::Serialize, serde::Deserialize, Clone)]\n\nstruct AddRequest {\n\n a: u64,\n\n b: u64,\n\n}\n\n\n", "file_path": "rpc/tests/calc.rs", "rank": 19, "score": 36154.047821553744 }, { "content": "/// One side of RPC communication.\n\npub trait Direction {\n\n /// Type which is used to read from this direction.\n\n type Rx: Receive;\n\n /// Type which is used to write into this direction.\n\n type Tx: Transmit;\n\n}\n\n\n", "file_path": "rpc/src/lib.rs", "rank": 20, "score": 35867.41955769166 }, { "content": "/// Single RPC call offered by the server.\n\npub trait Route {\n\n /// How client should interact with server.\n\n type Request: Direction;\n\n /// How server should respond to client.\n\n type Response: Direction;\n\n /// URL at which this endpoint should be available.\n\n /// `ENDPOINT` must start with `/`.\n\n const ENDPOINT: &'static str;\n\n}\n", "file_path": "rpc/src/lib.rs", "rank": 21, "score": 35867.41955769166 }, { "content": "//! This examples shows how to expose RPC over standard HTTP+TCP.\n\n//! # Running\n\n//! Pass `serve` as argument to run the server.\n\n//! Pass `echo <smth>` to get echo from the server.\n\ntype EchoRequest = String;\n\n\n", "file_path": "rpc/examples/http.rs", "rank": 22, "score": 35574.62409935436 }, { "content": "type EchoResponse = String;\n\n\n", "file_path": "rpc/examples/http.rs", "rank": 23, "score": 35574.62409935436 }, { "content": "/// Synchronized state of channel\n\nstruct Queue<T> {\n\n /// Items that were not processed yet\n\n values: VecDeque<Item<T>>,\n\n /// If queue is empty it is possible that some receivers wait for values\n\n /// This queue stores their wakers.\n\n wakers: VecDeque<Waker>,\n\n}\n\n\n", "file_path": "async-mpmc/src/lib.rs", "rank": 24, "score": 32771.92327734458 }, { "content": "/// Shared state for channel\n\nstruct Inner<T> {\n\n q: Mutex<Queue<T>>,\n\n /// Used to track current sender count.\n\n senders_dummy: Arc<SenderDummyType>,\n\n}\n\n\n\n// TODO: describe why this is correct\n\nimpl<T> Inner<T> {\n\n /// Checks if no senders exist\n\n pub(crate) fn senders_closed(&self) -> bool {\n\n Arc::is_unique(&self.senders_dummy)\n\n }\n\n}\n\n\n", "file_path": "async-mpmc/src/lib.rs", "rank": 25, "score": 32771.92327734458 }, { "content": "fn make_server() -> rpc::Router {\n\n let mut builder = rpc::RouterBuilder::new();\n\n let srv = Server {\n\n counter: Arc::new(AtomicU64::new(0)),\n\n };\n\n builder.add_route::<AddRpc, _>(srv.clone());\n\n builder.add_route::<StreamingPi, _>(srv);\n\n builder.build()\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_simple() {\n\n let server = make_server();\n\n let mut client = rpc::Client::new(server, \"\".to_string());\n\n let data = AddRequest { a: 2, b: 3 };\n\n let resp1 = client.call::<AddRpc>(data.clone()).await.unwrap();\n\n assert_eq!(resp1.sum, 5);\n\n let resp2 = client.call::<AddRpc>(data).await.unwrap();\n\n assert_eq!(resp2.sum, 6);\n\n\n", "file_path": "rpc/tests/calc.rs", "rank": 26, "score": 32460.614350848606 }, { "content": "use std::{convert::Infallible, marker::PhantomData};\n\n/// Unary side: only one `Message` is passed in this direction\n\npub struct Unary<M>(Infallible, PhantomData<M>);\n\n\n\nimpl<M: serde::Serialize + serde::de::DeserializeOwned + Send + Sync + 'static> crate::Direction\n\n for Unary<M>\n\n{\n\n type Tx = UnaryTx<M>;\n\n type Rx = UnaryRx<M>;\n\n}\n\n\n\n/// Unary transmitter\n\npub struct UnaryTx<M> {\n\n phantom: PhantomData<M>,\n\n sender: hyper::body::Sender,\n\n}\n\n\n\nimpl<M: serde::Serialize + Send + Sync + 'static> crate::Transmit for UnaryTx<M> {\n\n type BatchError = SendError;\n\n\n", "file_path": "rpc/src/unary.rs", "rank": 35, "score": 21.57230096140026 }, { "content": "}\n\n\n\nimpl rpc::Handler<StreamingPi> for Server {\n\n type Error = Box<dyn std::error::Error + Send + Sync + 'static>;\n\n type Fut = futures_util::future::BoxFuture<'static, Result<(), Self::Error>>;\n\n fn handle(\n\n self,\n\n rx: rpc::UnaryRx<StreamingPiRequest>,\n\n mut tx: rpc::StreamingTx<StreamingPiResponse, ()>,\n\n ) -> Self::Fut {\n\n Box::pin(async move {\n\n let _req = rx.recv().await?;\n\n const CHUNKS: &[&str] = &[\"3.\", \"14\", \"159\", \"26\"];\n\n for &chunk in CHUNKS {\n\n tx.send_event(StreamingPiResponse {\n\n digits_chunk: chunk.to_string(),\n\n })\n\n .await?;\n\n }\n\n tx.finish(()).await?;\n\n Ok(())\n\n })\n\n }\n\n}\n\n\n", "file_path": "rpc/tests/calc.rs", "rank": 36, "score": 20.17763812295995 }, { "content": " type BatchData = M;\n\n\n\n type BatchFuture = futures_util::future::BoxFuture<'static, Result<(), SendError>>;\n\n\n\n fn from_body_sender(sender: hyper::body::Sender) -> Self {\n\n Self {\n\n phantom: PhantomData,\n\n sender,\n\n }\n\n }\n\n\n\n fn send_batch(self, uf: Self::BatchData) -> Self::BatchFuture {\n\n Box::pin(self.send(uf))\n\n }\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub enum SendError {\n\n #[error(\"serialization failed\")]\n\n Serialize(serde_json::Error),\n", "file_path": "rpc/src/unary.rs", "rank": 37, "score": 16.16676233918133 }, { "content": " /// Receives all values from the channel and processes it with\n\n /// provided future constructor. This future will be spawned onto\n\n /// current Tokio runtime in the same `tracing` span `Sender::send` was\n\n /// called in. Receiving is done in background task. That task stops\n\n /// when all Senders are dropped, and channel is empty.\n\n pub fn process_all<F, C>(self, mut cons: C)\n\n where\n\n C: FnMut(T) -> F + Send + 'static,\n\n F: Future<Output = ()> + Send + 'static,\n\n {\n\n tokio::task::spawn(async move {\n\n loop {\n\n let maybe_item = self.recv().await;\n\n match maybe_item {\n\n None => {\n\n // channel is empty, and no senders are alive. We should stop.\n\n break;\n\n }\n\n Some(item) => {\n\n let fut = cons(item.value);\n", "file_path": "async-mpmc/src/lib.rs", "rank": 38, "score": 15.021218428164703 }, { "content": " #[error(\"data not sent to client\")]\n\n Network(hyper::Error),\n\n}\n\n\n\nimpl<M: serde::Serialize> UnaryTx<M> {\n\n /// Sends message to the server.\n\n /// Consumes transmitter because multiple messages are not allowed\n\n /// by Unary direction.\n\n pub async fn send(mut self, message: M) -> Result<(), SendError> {\n\n let message = serde_json::to_vec(&message).map_err(SendError::Serialize)?;\n\n self.sender\n\n .send_data(message.into())\n\n .await\n\n .map_err(SendError::Network)\n\n }\n\n}\n\n\n\n/// Unary reciever\n\npub struct UnaryRx<M> {\n\n phantom: PhantomData<M>,\n", "file_path": "rpc/src/unary.rs", "rank": 39, "score": 14.724934501376259 }, { "content": "//! Simple HTTP+JSON based RPC\n\n//!\n\n//! `rpc` used client-server model:\n\n//! server exposes routes, and client calls them.\n\n//! Each route is composed from request direction (client -> server)\n\n//! and response direction (server -> client).\n\n//! Each direction can be unary or streaming.\n\nmod client;\n\nmod server;\n\nmod streaming;\n\nmod unary;\n\n\n\npub use client::{Client, ReqwestEngine, ReqwestError};\n\npub use server::{Handler, MakeRouter, Router, RouterBuilder};\n\npub use streaming::{\n\n RecvError as StreamingRecvError, SendError as StreamingSendError, Streaming, StreamingRx,\n\n StreamingTx,\n\n};\n\npub use unary::{\n\n RecvError as UnaryRecvError, SendError as UnarySendError, Unary, UnaryRx, UnaryTx,\n\n};\n\n\n\n/// One side of RPC communication.\n", "file_path": "rpc/src/lib.rs", "rank": 40, "score": 14.60999610553836 }, { "content": " body: hyper::Body,\n\n}\n\n\n\nimpl<M: serde::de::DeserializeOwned + Send + 'static> crate::Receive for UnaryRx<M> {\n\n type BatchData = M;\n\n type BatchError = RecvError;\n\n type BatchFuture = futures_util::future::BoxFuture<'static, Result<M, RecvError>>;\n\n\n\n fn from_body(body: hyper::Body) -> Self {\n\n Self {\n\n phantom: PhantomData,\n\n body,\n\n }\n\n }\n\n\n\n fn recv_batch(self) -> Self::BatchFuture {\n\n Box::pin(self.recv())\n\n }\n\n}\n\n\n", "file_path": "rpc/src/unary.rs", "rank": 41, "score": 13.78196133697168 }, { "content": "use once_cell::sync::Lazy;\n\nuse serde::Serialize;\n\n\n\n#[non_exhaustive]\n\n#[derive(Copy, Clone, Debug, Serialize)]\n\npub struct BuildInfo {\n\n pub build_date: Option<&'static str>,\n\n pub git_revision: Option<&'static str>,\n\n pub version: Option<&'static str>,\n\n}\n\n\n\nimpl BuildInfo {\n\n pub fn get() -> Self {\n\n let v = BuildInfo::do_get();\n\n if option_env!(\"JJS_BUILD_INFO_VERIFY_FULL\").is_some() {\n\n assert!(v.build_date.is_some());\n\n assert!(v.git_revision.is_some());\n\n assert!(v.version.is_some());\n\n }\n\n v\n", "file_path": "buildinfo/src/lib.rs", "rank": 42, "score": 12.384477758523794 }, { "content": "//! High-level opinionated multi-producer multi-consumer channel.\n\n//! Features:\n\n//! - Async support\n\n//! - `tracing` aware\n\n\n\nuse std::collections::VecDeque;\n\nuse std::future::Future;\n\nuse std::sync::Mutex;\n\nuse std::task::{Context, Poll, Waker};\n\nuse tracing_futures::Instrument;\n\nuse triomphe::Arc;\n\n\n\n/// Used to send objects into channel\n\npub struct Sender<T> {\n\n /// Reference to state\n\n inner: Arc<Inner<T>>,\n\n /// See `Inner.senders_dummy`\n\n // wrapped in Option for Drop::drop\n\n dummy: Option<Arc<SenderDummyType>>,\n\n}\n", "file_path": "async-mpmc/src/lib.rs", "rank": 43, "score": 11.587189596196389 }, { "content": " .body(body)\n\n .expect(\"invalid data\");\n\n\n\n let tx = <R::Request as crate::Direction>::Tx::from_body_sender(body_sender);\n\n let response = (&mut self.engine).oneshot(req).await?;\n\n let rx = <R::Response as crate::Direction>::Rx::from_body(response.into_body());\n\n Ok((tx, rx))\n\n }\n\n}\n\n\n\n/// Engine based on reqwest.\n\n#[derive(Clone)]\n\npub struct ReqwestEngine(reqwest::Client);\n\n\n\nimpl ReqwestEngine {\n\n pub fn wrap_client(cl: reqwest::Client) -> ReqwestEngine {\n\n ReqwestEngine(cl)\n\n }\n\n\n\n pub fn new() -> ReqwestEngine {\n", "file_path": "rpc/src/client.rs", "rank": 44, "score": 11.465269619040654 }, { "content": " tx.send_batch(data).await.map_err(CallError::Send)?;\n\n rx.recv_batch().await.map_err(CallError::Recv)\n\n }\n\n\n\n /// Starts new RPC call.\n\n /// Returns transmitter that can send messages to server,\n\n /// and receiver that can receive messages from server.\n\n pub async fn start<R: crate::Route>(\n\n &mut self,\n\n ) -> Result<\n\n (\n\n <R::Request as crate::Direction>::Tx,\n\n <R::Response as crate::Direction>::Rx,\n\n ),\n\n <E as hyper::service::Service<hyper::Request<hyper::Body>>>::Error,\n\n > {\n\n let (body_sender, body) = hyper::Body::channel();\n\n let req = hyper::Request::builder()\n\n .method(hyper::Method::POST)\n\n .uri(format!(\"{}{}\", self.base, R::ENDPOINT))\n", "file_path": "rpc/src/client.rs", "rank": 45, "score": 11.27044909706893 }, { "content": "use crate::{Receive, Transmit};\n\nuse std::convert::TryInto;\n\nuse tower_util::ServiceExt;\n\n\n\n/// RPC Client. `Engine` is something that can actually send requests to the\n\n/// RPC server, e.g. hyper::Client or reqwest::Client.\n\n#[derive(Clone)]\n\npub struct Client<Engine = ReqwestEngine> {\n\n engine: Engine,\n\n base: String,\n\n}\n\n\n\nimpl<E> Client<E> {\n\n /// Constructs new client from the given engine and base url.\n\n /// Base usually should not end with '/'.\n\n /// All requests will be sent to \"{self.base}{R::ENDPOINT}\".\n\n pub fn new(engine: E, base: String) -> Self {\n\n Client { engine, base }\n\n }\n\n}\n", "file_path": "rpc/src/client.rs", "rank": 46, "score": 11.244999938530217 }, { "content": " Self {\n\n inner: self.inner.clone(),\n\n dummy: self.dummy.clone(),\n\n }\n\n }\n\n}\n\n\n\n/// Used to receive objects from channel\n\npub struct Receiver<T> {\n\n /// Reference to state\n\n inner: Arc<Inner<T>>,\n\n}\n\n\n\nimpl<T> Clone for Receiver<T> {\n\n fn clone(&self) -> Self {\n\n Self {\n\n inner: self.inner.clone(),\n\n }\n\n }\n\n}\n\n\n\n/// One queue item\n", "file_path": "async-mpmc/src/lib.rs", "rank": 47, "score": 10.8505794125929 }, { "content": " return Poll::Ready(WaitResult::Ok);\n\n }\n\n if inner.senders_count == 0 {\n\n return Poll::Ready(WaitResult::Closed);\n\n }\n\n inner.waiters.push(cx.waker().clone());\n\n Poll::Pending\n\n }\n\n\n\n /// Waits for a new generation\n\n pub fn wait(&mut self) -> Wait<'_> {\n\n Wait(self)\n\n }\n\n}\n\n\n\n/// Future resolving when new generation is observed or all senders are dropped.\n\npub struct Wait<'a>(&'a mut Receiver);\n\n\n\nimpl Future for Wait<'_> {\n\n type Output = WaitResult;\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n self.0.poll_wait(cx)\n\n }\n\n}\n\n\n", "file_path": "multiwake/src/lib.rs", "rank": 48, "score": 10.513385141783196 }, { "content": " use super::*;\n\n\n\n fn make_cx() -> Context<'static> {\n\n Context::from_waker(futures_util::task::noop_waker_ref())\n\n }\n\n\n\n #[test]\n\n fn simple() {\n\n let (tx, mut rx) = multiwake();\n\n // at start, rx would block\n\n assert_eq!(rx.poll_wait(&mut make_cx()), Poll::Pending);\n\n tx.wake();\n\n // now rx should observe new generation\n\n assert_eq!(rx.poll_wait(&mut make_cx()), Poll::Ready(WaitResult::Ok));\n\n // but next poll should block again\n\n assert_eq!(rx.poll_wait(&mut make_cx()), Poll::Pending);\n\n }\n\n\n\n #[test]\n\n fn multiple_notifications_are_observed_at_once() {\n", "file_path": "multiwake/src/lib.rs", "rank": 49, "score": 9.623848310206967 }, { "content": "//! This library implements simple syncronization primitive MultiWake.\n\n//! It consists of Senders and Receivers. Receiver can wait until event happens (i.e. generation increases),\n\n//! blocking current task. Each Sender can produce an event (i.e. increase generation), waking all waiting\n\n//! Receivers.\n\n//! # Performance\n\n//! Currently very poor.\n\n\n\nuse std::future::Future;\n\nuse std::pin::Pin;\n\nuse std::sync::{Arc, Mutex};\n\nuse std::task::{Context, Poll, Waker};\n\n\n", "file_path": "multiwake/src/lib.rs", "rank": 50, "score": 9.50511381325874 }, { "content": "\n\n#[derive(Debug)]\n\npub enum CallError<TransportError, RecvBatchError, SendBatchError> {\n\n Transport(TransportError),\n\n Recv(RecvBatchError),\n\n Send(SendBatchError),\n\n}\n\n\n\nimpl<TE, RE, SE> CallError<TE, RE, SE> {\n\n pub fn description(&self) -> &'static str {\n\n match self {\n\n CallError::Transport(_) => \"transport error\",\n\n CallError::Send(_) => \"failed to send batch\",\n\n CallError::Recv(_) => \"failed to receive batch\",\n\n }\n\n }\n\n}\n\n\n\nimpl<TE, RE, SE> std::fmt::Display for CallError<TE, RE, SE> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n", "file_path": "rpc/src/client.rs", "rank": 51, "score": 9.35962308951574 }, { "content": " self.description().fmt(f)\n\n }\n\n}\n\n\n\nimpl<\n\n TE: std::error::Error + 'static,\n\n RE: std::error::Error + 'static,\n\n SE: std::error::Error + 'static,\n\n > std::error::Error for CallError<TE, RE, SE>\n\n{\n\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n\n match self {\n\n CallError::Transport(inner) => Some(inner),\n\n CallError::Recv(inner) => Some(inner),\n\n CallError::Send(inner) => Some(inner),\n\n }\n\n }\n\n}\n\n\n\nimpl<E> Client<E>\n", "file_path": "rpc/src/client.rs", "rank": 52, "score": 9.312817245971246 }, { "content": " pub fn send(&self, value: T) {\n\n tracing::debug!(\"sent to channel\");\n\n let item = Item {\n\n value,\n\n span: tracing::Span::current(),\n\n };\n\n let mut q = self.inner.q.lock().unwrap();\n\n q.values.push_back(item);\n\n if let Some(waker) = q.wakers.pop_front() {\n\n waker.wake();\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Send + 'static> Receiver<T> {\n\n /// Polls the channel for a value.\n\n /// It is not exposed because it would be footgun: since some task started\n\n /// polling, it must not interrupt this poll. Otherwise, internal waker\n\n /// storage will contain \"dead\" wakers and some wakeups will be lost,\n\n /// which can lead to deadlock.\n", "file_path": "async-mpmc/src/lib.rs", "rank": 53, "score": 9.157951451472899 }, { "content": " }\n\n /// Adds a route to the router. Takes route handler - function that will\n\n /// handle requests to this route.\n\n /// # Panics\n\n /// Panics if other route with the same endpoint was added earlier.\n\n pub fn add_route<R: crate::Route, H: Handler<R>>(&mut self, handler: H) {\n\n let func = move |req: hyper::Body| {\n\n // create handler for this request.\n\n let handler = handler.clone();\n\n let (resp_body_sender, response_body) = hyper::Body::channel();\n\n let tx = <R::Response as Direction>::Tx::from_body_sender(resp_body_sender);\n\n let rx = <R::Request as Direction>::Rx::from_body(req);\n\n // start background task that will make response stream.\n\n tokio::task::spawn(async {\n\n let handler_fut = handler.handle(rx, tx);\n\n if let Err(err) = handler_fut.await {\n\n tracing::warn!(error=%err, \"request failed\");\n\n }\n\n });\n\n response_body\n", "file_path": "rpc/src/server.rs", "rank": 54, "score": 9.039229887628661 }, { "content": "where\n\n E: hyper::service::Service<\n\n hyper::Request<hyper::Body>,\n\n Response = hyper::Response<hyper::Body>,\n\n >,\n\n{\n\n /// Executes RPC call, writing all Request-sided messages upfront and\n\n /// returning batch of server responses.\n\n pub async fn call<R: crate::Route>(\n\n &mut self,\n\n data: <<R::Request as crate::Direction>::Tx as crate::Transmit>::BatchData,\n\n ) -> Result<\n\n <<R::Response as crate::Direction>::Rx as crate::Receive>::BatchData,\n\n CallError<\n\n <E as hyper::service::Service<hyper::Request<hyper::Body>>>::Error,\n\n <<R::Response as crate::Direction>::Rx as crate::Receive>::BatchError,\n\n <<R::Request as crate::Direction>::Tx as crate::Transmit>::BatchError,\n\n >,\n\n > {\n\n let (tx, rx) = self.start::<R>().await.map_err(CallError::Transport)?;\n", "file_path": "rpc/src/client.rs", "rank": 55, "score": 8.669715610111636 }, { "content": "\n\n pub fn add_detail<T: Serialize>(\n\n &mut self,\n\n key: &str,\n\n value: &T,\n\n ) -> Result<(), serde_json::Error> {\n\n self.details\n\n .insert(key.to_string(), serde_json::to_value(value)?);\n\n Ok(())\n\n }\n\n\n\n pub fn reply(&self) -> impl warp::Reply {\n\n warp::reply::with_status(warp::reply::json(&self), self.kind.http_status())\n\n }\n\n}\n\n\n\n/// Use this as a `.recover` on routes.\n\npub async fn recover(rej: warp::Rejection) -> Result<impl warp::Reply, warp::Rejection> {\n\n if rej.is_not_found() {\n\n return Err(rej);\n", "file_path": "api-util/src/lib.rs", "rank": 56, "score": 8.466089186980753 }, { "content": " let (tx, mut rx) = multiwake();\n\n tx.wake();\n\n tx.wake();\n\n tx.wake();\n\n // first poll should succeed\n\n assert_eq!(rx.poll_wait(&mut make_cx()), Poll::Ready(WaitResult::Ok));\n\n // but second poll should be pending\n\n assert_eq!(rx.poll_wait(&mut make_cx()), Poll::Pending);\n\n }\n\n}\n", "file_path": "multiwake/src/lib.rs", "rank": 57, "score": 8.290576008497585 }, { "content": "use crate::{Direction, Receive, Transmit};\n\nuse std::future::Future;\n\nuse std::sync::Arc;\n\n\n\n/// Type that can handle requests for specific route.\n\n/// `C` is context type.\n", "file_path": "rpc/src/server.rs", "rank": 58, "score": 8.156021823620677 }, { "content": " pub code: String,\n\n pub details: serde_json::Map<String, serde_json::Value>,\n\n}\n\n\n\nimpl std::fmt::Display for ApiError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n \"api error\".fmt(f)\n\n }\n\n}\n\n\n\nimpl std::error::Error for ApiError {}\n\n\n\nimpl ApiError {\n\n pub fn new(kind: ErrorKind, code: &str) -> Self {\n\n ApiError {\n\n kind,\n\n code: code.to_string(),\n\n details: serde_json::Map::new(),\n\n }\n\n }\n", "file_path": "api-util/src/lib.rs", "rank": 59, "score": 8.123445678978387 }, { "content": " }\n\n}\n\n\n\nimpl Default for RouterBuilder {\n\n fn default() -> Self {\n\n RouterBuilder::new()\n\n }\n\n}\n\n\n\n/// Tower Service which can be used to serve requests.\n\n#[derive(Clone)]\n\npub struct Router {\n\n routes: Arc<[DynRoute]>,\n\n}\n\n\n\n/// Wrapper around Router, implementing MakeService.\n\npub struct MakeRouter(Router);\n\n\n\nimpl Router {\n\n fn find_route(&self, req: &hyper::Request<hyper::Body>) -> Option<&DynRoute> {\n", "file_path": "rpc/src/server.rs", "rank": 60, "score": 8.117338457961813 }, { "content": "#[derive(Clone)]\n\npub struct Receiver {\n\n inner: Arc<Mutex<Inner>>,\n\n last_observed_generation: usize,\n\n}\n\n\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug)]\n\npub enum WaitResult {\n\n /// Generation was incremented by a Sender\n\n Ok,\n\n /// All Senders are dropped, so no generations can be observed in future.\n\n Closed,\n\n}\n\n\n\nimpl Receiver {\n\n /// Tries to wait for a new generation\n\n pub fn poll_wait(&mut self, cx: &mut Context<'_>) -> Poll<WaitResult> {\n\n let mut inner = self.inner.lock().unwrap();\n\n if self.last_observed_generation < inner.generation {\n\n self.last_observed_generation = inner.generation;\n", "file_path": "multiwake/src/lib.rs", "rank": 61, "score": 8.06293251717095 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\npub struct AnyhowRejection(pub anyhow::Error);\n\n\n\nimpl std::fmt::Debug for AnyhowRejection {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\nimpl warp::reject::Reject for AnyhowRejection {}\n\n\n\n#[derive(Clone, Copy, Serialize, Deserialize, Debug)]\n\n#[serde(rename_all = \"SCREAMING_SNAKE_CASE\")]\n\npub enum ErrorKind {\n\n /// Some parameters were invalid\n\n InvalidInput,\n\n /// Internal or unknown error\n\n Internal,\n\n /// Resource not found\n", "file_path": "api-util/src/lib.rs", "rank": 62, "score": 7.807318487808002 }, { "content": " type Response = hyper::Response<hyper::Body>;\n\n type Error = ReqwestError;\n\n type Future = futures_util::future::BoxFuture<'static, Result<Self::Response, Self::Error>>;\n\n\n\n fn poll_ready(\n\n &mut self,\n\n _cx: &mut std::task::Context<'_>,\n\n ) -> std::task::Poll<Result<(), Self::Error>> {\n\n std::task::Poll::Ready(Ok(()))\n\n }\n\n\n\n fn call(&mut self, hyper_req: hyper::Request<hyper::Body>) -> Self::Future {\n\n let client = self.0.clone();\n\n Box::pin(async move {\n\n let reqwest_req: reqwest::Request =\n\n hyper_req.map(reqwest::Body::wrap_stream).try_into()?;\n\n let response = client.execute(reqwest_req).await?;\n\n let mut builder = hyper::Response::builder().status(response.status());\n\n for (k, v) in response.headers() {\n\n builder = builder.header(k, v);\n\n }\n\n builder\n\n .body(hyper::Body::wrap_stream(response.bytes_stream()))\n\n .map_err(Into::into)\n\n })\n\n }\n\n}\n", "file_path": "rpc/src/client.rs", "rank": 63, "score": 7.623627087048631 }, { "content": "impl Default for Tls {\n\n fn default() -> Self {\n\n Tls::Enable\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct PullSettings {\n\n /// Tls mode\n\n pub tls: Tls,\n\n /// Skip downloading layers\n\n pub skip_layers: bool,\n\n}\n\n\n\nimpl Puller {\n\n /// Creates new Puller.\n\n pub async fn new() -> Puller {\n\n Puller {\n\n secrets: Vec::new(),\n\n }\n", "file_path": "puller/src/lib.rs", "rank": 64, "score": 7.442637263148745 }, { "content": "}\n\n\n\nimpl Drop for Sender {\n\n fn drop(&mut self) {\n\n let mut inner = self.inner.lock().unwrap();\n\n inner.senders_count -= 1;\n\n }\n\n}\n\n\n\nimpl Sender {\n\n /// Increases current generation, notifying all waiting Receivers.\n\n pub fn wake(&self) {\n\n let mut inner = self.inner.lock().unwrap();\n\n inner.generation += 1;\n\n for waker in inner.waiters.drain(..) {\n\n waker.wake();\n\n }\n\n }\n\n}\n\n\n", "file_path": "multiwake/src/lib.rs", "rank": 65, "score": 7.440823416706101 }, { "content": "\n\nimpl<T> Drop for Sender<T> {\n\n fn drop(&mut self) {\n\n self.dummy\n\n .take()\n\n .expect(\"`dummy` field should be Some(_) for all sender lifecycle\");\n\n // maybe, we were last sender. Let's check it.\n\n if self.inner.senders_closed() {\n\n // yes, we were last. let's wake all waiting receivers.\n\n // they will see that the channel is closed and will handle it.\n\n let mut q = self.inner.q.lock().unwrap();\n\n for waker in q.wakers.drain(..) {\n\n waker.wake()\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T> Clone for Sender<T> {\n\n fn clone(&self) -> Self {\n", "file_path": "async-mpmc/src/lib.rs", "rank": 66, "score": 7.290468439997493 }, { "content": " ReqwestEngine(reqwest::Client::new())\n\n }\n\n}\n\n\n\nimpl Default for ReqwestEngine {\n\n fn default() -> Self {\n\n ReqwestEngine::new()\n\n }\n\n}\n\n\n\n/// Possible errors when using Reqwest-based Engine\n\n#[derive(Debug, thiserror::Error)]\n\npub enum ReqwestError {\n\n #[error(\"transport error\")]\n\n Reqwest(#[from] reqwest::Error),\n\n #[error(\"error from http crate\")]\n\n Http(#[from] hyper::http::Error),\n\n}\n\n\n\nimpl hyper::service::Service<hyper::Request<hyper::Body>> for ReqwestEngine {\n", "file_path": "rpc/src/client.rs", "rank": 67, "score": 7.004366839182668 }, { "content": " ///\n\n /// Returns Some(T) on success and None if channel is empty and no\n\n /// senders exist\n\n fn poll_recv(&self, cx: &mut Context<'_>) -> Poll<Option<Item<T>>> {\n\n if self.inner.senders_closed() {\n\n return Poll::Ready(None);\n\n }\n\n let mut q = self.inner.q.lock().unwrap();\n\n if let Some(item) = q.values.pop_front() {\n\n return Poll::Ready(Some(item));\n\n }\n\n q.wakers.push_back(cx.waker().clone());\n\n Poll::Pending\n\n }\n\n\n\n /// See `poll_recv` for caveats\n\n async fn recv(&self) -> Option<Item<T>> {\n\n tokio::future::poll_fn(|cx| self.poll_recv(cx)).await\n\n }\n\n\n", "file_path": "async-mpmc/src/lib.rs", "rank": 68, "score": 6.783311087397411 }, { "content": " hyper::Server::bind(&([127u8, 0, 0, 1], 8000).into())\n\n .serve(router)\n\n .await?;\n\n Ok(())\n\n}\n\n\n\nasync fn client_main(\n\n greeting: EchoRequest,\n\n) -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {\n\n let engine = rpc::ReqwestEngine::new();\n\n let base = \"http://127.0.0.1:8000\".to_string();\n\n let mut client = rpc::Client::new(engine, base);\n\n let response = client.call::<Echo>(greeting).await;\n\n println!(\"RPC result: {:?}\", response);\n\n Ok(())\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let res = match std::env::args().nth(1).as_deref() {\n", "file_path": "rpc/examples/http.rs", "rank": 69, "score": 6.644454389852144 }, { "content": "//! This library implements high-level image puller on top of\n\n//! `dkregistry` crate. See [`Puller`](Puller) for more.\n\nuse std::path::{Path, PathBuf};\n\nuse tokio_util::sync::CancellationToken;\n\nuse tracing::{error, instrument, trace};\n\n/// Main type of library, which supports loading and unpacking\n\n/// images.\n\npub struct Puller {\n\n /// These secrets will be used to authenticate in registry.\n\n secrets: Vec<ImagePullSecret>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Tls {\n\n /// Require TLS\n\n Enable,\n\n /// Disable TLS (insecure).\n\n Disable,\n\n}\n\n\n", "file_path": "puller/src/lib.rs", "rank": 70, "score": 6.564927510717847 }, { "content": " fn poll_ready(\n\n &mut self,\n\n _cx: &mut std::task::Context<'_>,\n\n ) -> std::task::Poll<Result<(), Self::Error>> {\n\n std::task::Poll::Ready(Ok(()))\n\n }\n\n fn call(&mut self, req: hyper::Request<hyper::Body>) -> Self::Future {\n\n futures_util::future::ready(Ok(self.call_inner(req)))\n\n }\n\n}\n\n\n\nimpl<T> hyper::service::Service<T> for MakeRouter {\n\n type Response = Router;\n\n type Error = std::convert::Infallible;\n\n type Future = futures_util::future::Ready<Result<Self::Response, Self::Error>>;\n\n fn poll_ready(\n\n &mut self,\n\n _cx: &mut std::task::Context<'_>,\n\n ) -> std::task::Poll<Result<(), Self::Error>> {\n\n std::task::Poll::Ready(Ok(()))\n\n }\n\n\n\n fn call(&mut self, _req: T) -> Self::Future {\n\n futures_util::future::ready(Ok(self.0.clone()))\n\n }\n\n}\n", "file_path": "rpc/src/server.rs", "rank": 71, "score": 6.285329637183477 }, { "content": " };\n\n\n\n let item = DynRoute {\n\n endpoint: R::ENDPOINT,\n\n func: Box::new(func),\n\n };\n\n match self\n\n .routes\n\n .binary_search_by_key(&R::ENDPOINT, |dr| dr.endpoint)\n\n {\n\n Ok(_) => panic!(\"duplicate endpoint {}\", R::ENDPOINT),\n\n Err(pos) => self.routes.insert(pos, item),\n\n }\n\n }\n\n\n\n /// Converts this builder into service, which can be further used with hyper.\n\n pub fn build(self) -> Router {\n\n Router {\n\n routes: self.routes.into(),\n\n }\n", "file_path": "rpc/src/server.rs", "rank": 72, "score": 5.9963452655344724 }, { "content": "#[derive(Debug, thiserror::Error)]\n\npub enum RecvError {\n\n #[error(\"unable to read response\")]\n\n Network(hyper::Error),\n\n #[error(\"parsing failed\")]\n\n Deserialize(serde_json::Error),\n\n}\n\n\n\nimpl<M: serde::de::DeserializeOwned> UnaryRx<M> {\n\n /// Receives value, passed by client.\n\n /// Consumes receiver because only one message could be sent.\n\n pub async fn recv(self) -> Result<M, RecvError> {\n\n let data = hyper::body::to_bytes(self.body)\n\n .await\n\n .map_err(RecvError::Network)?;\n\n let message = serde_json::from_slice(&*data).map_err(RecvError::Deserialize)?;\n\n Ok(message)\n\n }\n\n}\n", "file_path": "rpc/src/unary.rs", "rank": 73, "score": 5.979245867541947 }, { "content": " let pi_batch = client\n\n .call::<StreamingPi>(StreamingPiRequest {})\n\n .await\n\n .unwrap();\n\n let mut pi = String::new();\n\n for item in pi_batch.0 {\n\n pi.push_str(&item.digits_chunk);\n\n }\n\n assert_eq!(pi, \"3.1415926\")\n\n}\n", "file_path": "rpc/tests/calc.rs", "rank": 74, "score": 5.893242754690554 }, { "content": "impl std::fmt::Display for ImagePullSecret {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let username = match &self.username {\n\n Some(u) => u.as_str(),\n\n None => \"<missing>\",\n\n };\n\n let password = match &self.password {\n\n Some(_) => \"<provided>\",\n\n None => \"<missing>\",\n\n };\n\n write!(\n\n f,\n\n \"registry: {}; username: {}, password: {}\",\n\n &self.registry, username, password\n\n )\n\n }\n\n}\n\n\n\n#[derive(thiserror::Error, Debug)]\n\npub enum Error {\n", "file_path": "puller/src/lib.rs", "rank": 75, "score": 5.231770388303624 }, { "content": "use std::sync::{\n\n atomic::{AtomicU64, Ordering::SeqCst},\n\n Arc,\n\n};\n\n\n\n// protocol\n\n#[derive(serde::Serialize, serde::Deserialize, Clone)]\n", "file_path": "rpc/tests/calc.rs", "rank": 76, "score": 4.976158630101079 }, { "content": " }\n\n let dyn_route = match self.find_route(&req) {\n\n Some(dr) => dr,\n\n None => {\n\n tracing::error!(\"unknown endpoint\");\n\n return hyper::Response::builder()\n\n .status(hyper::StatusCode::NOT_FOUND)\n\n .body(\"Unknown endpoint\".into())\n\n .unwrap();\n\n }\n\n };\n\n let response_body = (dyn_route.func)(req.into_body());\n\n hyper::Response::new(response_body)\n\n }\n\n}\n\n\n\nimpl hyper::service::Service<hyper::Request<hyper::Body>> for Router {\n\n type Response = hyper::Response<hyper::Body>;\n\n type Error = std::convert::Infallible;\n\n type Future = futures_util::future::Ready<Result<Self::Response, Self::Error>>;\n", "file_path": "rpc/src/server.rs", "rank": 77, "score": 4.919621176167443 }, { "content": " {\n\n let _enter = item.span.enter();\n\n tracing::debug!(\"received from channel\");\n\n }\n\n tokio::task::spawn(fut.instrument(item.span));\n\n }\n\n }\n\n }\n\n });\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[tokio::test]\n\n async fn simple() {\n\n let (tx1, rx1) = channel::<u8>();\n\n let tx2 = tx1.clone();\n\n let rx2 = rx1.clone();\n", "file_path": "async-mpmc/src/lib.rs", "rank": 78, "score": 4.642898561129752 }, { "content": " pub username: Option<String>,\n\n pub password: Option<String>,\n\n}\n\n\n\nimpl ImagePullSecret {\n\n /// Tries to find `ImagePullSecret`s in parsed docker config.\n\n /// ```no_run\n\n /// # fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n /// let data = std::fs::read(\"~/.docker/config.json\")?;\n\n /// let data = serde_json::from_slice(&data)?;\n\n /// let secrets = puller::ImagePullSecret::parse_docker_config(&data);\n\n /// for sec in secrets.unwrap_or_default() {\n\n /// println!(\"{}\", sec);\n\n /// } \n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn parse_docker_config(docker_config: &serde_json::Value) -> Option<Vec<ImagePullSecret>> {\n\n let docker_config = docker_config.as_object()?;\n\n let auths = docker_config.get(\"auths\")?.as_object()?;\n", "file_path": "puller/src/lib.rs", "rank": 79, "score": 4.612232313322658 }, { "content": " }\n\n\n\n pub fn set_secrets(&mut self, secrets: Vec<ImagePullSecret>) -> &mut Self {\n\n self.secrets = secrets;\n\n self\n\n }\n\n\n\n /// Starts pulling an image.\n\n ///\n\n /// `image` is image reference in usual format, e.g. `alpine`,\n\n /// `foo/bar`, `cr.com/repo1/repo2/repo3/superimage`.\n\n ///\n\n /// `destination` is filesystem path image should be unpacked to.\n\n ///\n\n /// `cancel` is CancellationToken that can be used to cancel\n\n /// pending operation on best effort basis.\n\n ///\n\n /// On success returns channel that will receive final outcome when operation\n\n /// completed.\n\n #[instrument(skip(self, cancel))]\n", "file_path": "puller/src/lib.rs", "rank": 80, "score": 4.442330276196686 }, { "content": " }\n\n\n\n fn do_get() -> Self {\n\n BuildInfo {\n\n build_date: option_env!(\"JJS_BUILD_INFO_DATE\").filter(|s| !s.is_empty()),\n\n git_revision: option_env!(\"JJS_BUILD_INFO_COMMIT\").filter(|s| !s.is_empty()),\n\n version: option_env!(\"JJS_BUILD_INFO_VERSION\").filter(|s| !s.is_empty()),\n\n }\n\n }\n\n\n\n pub fn wrap_clap<'a>(app: clap::App<'a>) -> clap::App<'a> {\n\n static STRING_LONG_VERSION: Lazy<String> = Lazy::new(|| format!(\"{:#?}\", BuildInfo::get()));\n\n static STRING_SHORT_VERSION: Lazy<String> =\n\n Lazy::new(|| BuildInfo::get().version.unwrap_or(\"dev\").to_string());\n\n app.version(STRING_SHORT_VERSION.as_str())\n\n .long_version(STRING_LONG_VERSION.as_str())\n\n }\n\n}\n", "file_path": "buildinfo/src/lib.rs", "rank": 81, "score": 4.363425229081768 }, { "content": " NotFound,\n\n /// Request rejected by authorizer\n\n Unauthorized,\n\n}\n\n\n\nimpl ErrorKind {\n\n pub fn http_status(self) -> http::StatusCode {\n\n match self {\n\n ErrorKind::InvalidInput => http::StatusCode::BAD_REQUEST,\n\n ErrorKind::Internal => http::StatusCode::INTERNAL_SERVER_ERROR,\n\n ErrorKind::NotFound => http::StatusCode::NOT_FOUND,\n\n ErrorKind::Unauthorized => http::StatusCode::UNAUTHORIZED,\n\n }\n\n }\n\n}\n\n\n\n/// Put this struct into anyhow error context to return user-facing API error\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct ApiError {\n\n pub kind: ErrorKind,\n", "file_path": "api-util/src/lib.rs", "rank": 82, "score": 3.746157565512243 }, { "content": "use std::io::Write;\n\nuse tokio::io::AsyncBufReadExt;\n\nuse tokio_util::sync::CancellationToken;\n\n\n\n#[tracing::instrument]\n\nasync fn read_image_pull_secrets() -> Vec<puller::ImagePullSecret> {\n\n let docker_config_path = home::home_dir().unwrap().join(\".docker/config.json\");\n\n let docker_config = tokio::fs::read(&docker_config_path)\n\n .await\n\n .expect(\"docker config not found\");\n\n let docker_config = serde_json::from_slice(&docker_config).unwrap();\n\n match puller::ImagePullSecret::parse_docker_config(&docker_config) {\n\n Some(secs) => {\n\n for sec in &secs {\n\n println!(\"found secret: {}\", sec);\n\n }\n\n secs\n\n }\n\n None => {\n\n eprintln!(\"Warning: parsing ~/.docker/config.json failed\");\n\n Vec::new()\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "puller/examples/cli.rs", "rank": 83, "score": 3.4067978007746045 }, { "content": " let mut secrets = Vec::new();\n\n for (registry, auth_data) in auths {\n\n if let Some(sec) = Self::parse_from_docker_config_auth_entry(registry, auth_data) {\n\n secrets.push(sec);\n\n }\n\n }\n\n Some(secrets)\n\n }\n\n\n\n /// Tries to find `ImagePullSecret`s in Kubernetes Secret.\n\n #[cfg(feature = \"k8s\")]\n\n pub fn parse_kubernetes_secret(\n\n secret: &k8s_openapi::api::core::v1::Secret,\n\n ) -> Option<Vec<ImagePullSecret>> {\n\n let data = secret.string_data.as_ref()?;\n\n let dockerconfig = data.get(\".dockerconfigjson\")?;\n\n let dockerconfig = base64::decode(dockerconfig).ok()?;\n\n let dockerconfig = serde_json::from_slice(&dockerconfig).ok()?;\n\n Self::parse_docker_config(&dockerconfig)\n\n }\n", "file_path": "puller/src/lib.rs", "rank": 84, "score": 3.252050334599276 }, { "content": " .pull(\n\n &image_name,\n\n tempdir.path(),\n\n Default::default(),\n\n tokio_util::sync::CancellationToken::new(),\n\n )\n\n .await\n\n {\n\n eprintln!(\"Pull failed\");\n\n let mut err: &(dyn std::error::Error + 'static) = &err;\n\n loop {\n\n eprintln!(\"{}\", err);\n\n err = match err.source() {\n\n Some(e) => e,\n\n None => break,\n\n }\n\n }\n\n std::process::exit(1);\n\n }\n\n println!(\"OK\");\n\n print!(\"(press any key to continue)\");\n\n std::io::stdout().flush().unwrap();\n\n let mut line = String::new();\n\n let mut rdr = tokio::io::BufReader::new(tokio::io::stdin());\n\n rdr.read_line(&mut line).await.unwrap();\n\n}\n", "file_path": "puller/examples/batch.rs", "rank": 85, "score": 2.7729918957313386 }, { "content": " Ok(manifest)\n\n }\n\n\n\n /// This function is called by [`Puller::pull`](Puller::pull) when\n\n /// client is successfully created.\n\n #[instrument(skip(client, cancel, destination, manifest))]\n\n async fn fetch_layers(\n\n client: dkregistry::v2::Client,\n\n image_ref: dkregistry::reference::Reference,\n\n cancel: CancellationToken,\n\n destination: PathBuf,\n\n manifest: &dkregistry::v2::manifest::Manifest,\n\n ) -> Result<(), Error> {\n\n let digests = manifest.layers_digests(Some(IMAGE_ARCHITECTURE))?;\n\n let digests_count = digests.len();\n\n trace!(\"will fetch {} layers\", digests_count);\n\n for layer_digest in digests.into_iter() {\n\n Self::fetch_layer(\n\n client.clone(),\n\n image_ref.repository(),\n", "file_path": "puller/src/lib.rs", "rank": 86, "score": 2.6676236039713532 }, { "content": " tx1.send(1);\n\n tx2.send(2);\n\n tx1.send(57);\n\n\n\n // check that order is FIFO\n\n assert_eq!(rx2.recv().await.unwrap().value, 1);\n\n assert_eq!(rx1.recv().await.unwrap().value, 2);\n\n\n\n {\n\n let mut first_call = true;\n\n rx1.process_all(move |message| {\n\n assert_eq!(message, 57);\n\n assert!(first_call);\n\n first_call = false;\n\n async {}\n\n })\n\n }\n\n std::mem::drop((tx1, tx2));\n\n #[allow(unreachable_code)]\n\n rx2.process_all(|_| {\n\n unreachable!();\n\n // type inference fails otherwise\n\n // should be unneeded when fallback type is never,\n\n // but currently it's unstable feature.\n\n async move {}\n\n })\n\n }\n\n}\n", "file_path": "async-mpmc/src/lib.rs", "rank": 87, "score": 2.6146409527860284 }, { "content": " pub async fn pull(\n\n &self,\n\n image: &str,\n\n destination: &Path,\n\n pull_settings: PullSettings,\n\n cancel: CancellationToken,\n\n ) -> Result<dkregistry::v2::manifest::Manifest, Error> {\n\n let image_ref: dkregistry::reference::Reference = image.parse()?;\n\n trace!(\n\n registry = image_ref.registry().as_str(),\n\n repository = image_ref.repository().as_str(),\n\n image = image_ref.version().as_str()\n\n );\n\n let mut config = dkregistry::v2::Config::default();\n\n let creds = self.find_credentials(&image_ref.registry());\n\n config = config\n\n .username(creds.0)\n\n .password(creds.1)\n\n .registry(&image_ref.registry());\n\n if let Tls::Disable = pull_settings.tls {\n", "file_path": "puller/src/lib.rs", "rank": 88, "score": 2.5773933251060392 }, { "content": " /// Tries to lookup credentials for `registry`\n\n #[instrument(skip(self))]\n\n fn find_credentials(&self, registry: &str) -> (Option<String>, Option<String>) {\n\n for cred in &self.secrets {\n\n if cred.registry == registry {\n\n trace!(credentials=%cred, \"found credentials\");\n\n return (cred.username.clone(), cred.password.clone());\n\n }\n\n }\n\n trace!(\"no credentials found\");\n\n (None, None)\n\n }\n\n}\n\n\n\n// TODO support other architectures\n\nconst IMAGE_ARCHITECTURE: &str = \"amd64\";\n\n\n\n// debug missing because it can accidentally reveal sensitive data\n\npub struct ImagePullSecret {\n\n pub registry: String,\n", "file_path": "puller/src/lib.rs", "rank": 89, "score": 2.118930651255365 }, { "content": "use std::io::Write;\n\nuse tokio::io::AsyncBufReadExt;\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n tracing_subscriber::FmtSubscriber::builder()\n\n .with_env_filter(\"info,puller=trace,cli=trace\")\n\n .init();\n\n\n\n let tempdir = tempfile::tempdir().expect(\"failed to get a tempdir\");\n\n let image_name = match std::env::args().nth(1) {\n\n Some(name) => name,\n\n None => {\n\n eprintln!(\"Usage: batch <image_name>\");\n\n std::process::exit(1);\n\n }\n\n };\n\n println!(\"Pulling {} to {}\", &image_name, tempdir.path().display());\n\n let puller = puller::Puller::new().await;\n\n if let Err(err) = puller\n", "file_path": "puller/examples/batch.rs", "rank": 90, "score": 2.071866880271717 }, { "content": " layer_digest,\n\n cancel.clone(),\n\n destination.clone(),\n\n )\n\n .await?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Tries to download and unpack one layer\n\n #[instrument(skip(client, repo, cancel, destination))]\n\n async fn fetch_layer(\n\n client: dkregistry::v2::Client,\n\n repo: String,\n\n layer_digest: String,\n\n cancel: CancellationToken,\n\n destination: PathBuf,\n\n ) -> Result<(), Error> {\n\n trace!(layer_digest = layer_digest.as_str(), \"download started\");\n", "file_path": "puller/src/lib.rs", "rank": 91, "score": 1.639619815229592 }, { "content": " config = config.insecure_registry(true);\n\n }\n\n\n\n let mut client = config.build()?;\n\n if !client.is_auth().await? {\n\n let token_scope = format!(\"repository:{}:pull\", image_ref.repository());\n\n\n\n client = client.authenticate(&[&token_scope]).await?;\n\n if !client.is_auth().await? {\n\n return Err(Error::LoginFailed);\n\n }\n\n }\n\n // this check is used to report missing image nicier\n\n {\n\n let types = client\n\n .has_manifest(&image_ref.repository(), &image_ref.version(), None)\n\n .await?;\n\n if types.is_none() {\n\n return Err(Error::ImageNotExists(image.to_string()));\n\n }\n", "file_path": "puller/src/lib.rs", "rank": 92, "score": 1.5202526058009802 }, { "content": " }\n\n let destination = destination.to_path_buf();\n\n tokio::fs::create_dir_all(&destination)\n\n .await\n\n .map_err(Error::CreateDest)?;\n\n\n\n trace!(\n\n image = image_ref.to_raw_string().as_str(),\n\n destination = %destination.display(),\n\n \"fetching manifest for {}\",\n\n IMAGE_ARCHITECTURE\n\n );\n\n let manifest = client\n\n .get_manifest(&image_ref.repository(), &image_ref.version())\n\n .await?;\n\n\n\n trace!(manifest = ?manifest, \"fetched manifest\");\n\n if !pull_settings.skip_layers {\n\n Self::fetch_layers(client, image_ref, cancel, destination, &manifest).await?;\n\n }\n", "file_path": "puller/src/lib.rs", "rank": 93, "score": 1.490440296192297 }, { "content": " let requested_endpoint = req.uri().path();\n\n match self\n\n .routes\n\n .binary_search_by_key(&requested_endpoint, |dr| dr.endpoint)\n\n {\n\n Ok(pos) => Some(&self.routes[pos]),\n\n Err(_) => None,\n\n }\n\n }\n\n\n\n pub fn as_make_service(&self) -> MakeRouter {\n\n MakeRouter(self.clone())\n\n }\n\n\n\n fn call_inner(&self, req: hyper::Request<hyper::Body>) -> hyper::Response<hyper::Body> {\n\n if req.method() != hyper::Method::POST {\n\n return hyper::Response::builder()\n\n .status(hyper::StatusCode::METHOD_NOT_ALLOWED)\n\n .body(\"only POST is allowed\".into())\n\n .unwrap();\n", "file_path": "rpc/src/server.rs", "rank": 94, "score": 1.4040925727737212 } ]
Rust
tests/parse_test.rs
msanou/crystallake
f72eada9f9ea3dbb6846a8549095394f90959108
extern crate crystalrake; use crystalrake::json::*; #[test] fn true_value() { let t = "true".parse::<JsonValue>(); if let Ok(JsonValue::Boolean(b)) = t { assert!(b, "expect true, but {}", b); } else { panic!("unexpect value : {:?}", t); } } #[test] fn true_string() { let t = "\"true\"".parse::<JsonValue>().unwrap(); assert_eq!(t, JsonValue::String("true".to_string())); } #[test] fn false_value() { let f = "false".parse::<JsonValue>(); if let Ok(JsonValue::Boolean(b)) = f { assert!(!b, "expect false, but {}", b); } else { panic!("unexpect value : {:?}", f); } } #[test] fn false_string() { let t = "\"false\"".parse::<JsonValue>().unwrap(); assert_eq!(t, JsonValue::String("false".to_string())); } #[test] fn number_value() { let json_value = "1234567890.0987654321".parse::<JsonValue>(); if let Ok(JsonValue::Number(number)) = json_value { let integer = number as u64; assert_eq!(integer, 1234567890); assert_eq!(number.fract(),0.0987654321f64); } else { panic!("unexpect value : {:?}", json_value); } } #[test] fn number_string() { let t = "\"1234567890.0987654321\"".parse::<JsonValue>().unwrap(); assert_eq!(t, JsonValue::String("1234567890.0987654321".to_string())); } #[test] fn empty_object() { let json_value = "{}".parse::<JsonValue>().unwrap(); let expect_result = JsonValue::Objects(Vec::new()); assert_eq!(json_value, expect_result); } #[test] fn array_value() { let json_value = r#" [ 12345, true, false, null, "Hello, world", { "object" : {} } ]"#.parse::<JsonValue>().unwrap(); let object : JsonObject = JsonObject::new("object", JsonValue::Objects(Vec::new())); let values = Vec::from([ JsonValue::Number(12345f64), JsonValue::Boolean(true), JsonValue::Boolean(false), JsonValue::Null, JsonValue::String("Hello, world".to_string()), object.into()]); assert_eq!(JsonValue::Array(values), json_value); } #[test] fn one_object() { let json_value = r#"{"name" : 12345}"#.parse::<JsonValue>().unwrap(); let expect_result = JsonValue::Objects(Vec::from([JsonObject::new("name", 12345f64)])); assert_eq!(json_value, expect_result); } #[test] fn nested_empty_object() { let json_value = r#"{ "empty" : {} }"#.parse::<JsonValue>().unwrap(); let expect_result = JsonValue::Objects(Vec::from([JsonObject::new("empty", JsonValue::Objects(Vec::new()))])); assert_eq!(json_value, expect_result); } #[test] fn nested_empty_array() { let json_value = r#"[ {"empty" : []} ]"#.parse::<JsonValue>().unwrap(); let expect_result = JsonValue::Array(vec![JsonValue::Objects(vec![JsonObject::new("empty", JsonValue::Array(Vec::new()))])]); assert_eq!(json_value, expect_result); } #[test] fn null_value() { let json_value = "null".parse::<JsonValue>(); if let Ok(v) = json_value { assert!(v.is_null()); } else { panic!("unexpect value : {:?}", json_value); } } #[test] fn null_string() { let json_value = "\"null\"".parse::<JsonValue>().unwrap(); assert_eq!(json_value, JsonValue::String("null".to_string())); } #[test] fn contain_utf16() { let json_value = r#""\u3042\u3044\u3046abc""#.parse::<JsonValue>(); if let Ok(JsonValue::String(v)) = json_value { assert_eq!(v, "あいうabc".to_string()); } else { panic!("unexpect value : {:?}", json_value); } } #[test] fn contain_emoji() { let json_value = r#""\uD83D\uDE04\uD83D\uDE07\uD83D\uDC7A""#.parse::<JsonValue>(); if let Ok(JsonValue::String(v)) = json_value { assert_eq!(v, r#"😄😇👺"#.to_string()); } else { panic!("unexpect value : {:?}", json_value); } }
extern crate crystalrake; use crystalrake::json::*; #[test] fn true_value() { let t = "true".parse::<JsonValue>(); if let Ok(JsonValue::Boolean(b)) = t { assert!(b, "expect true, but {}", b); } else { panic!("unexpect value : {:?}", t); } } #[test] fn true_string() { let t = "\"true\"".parse::<JsonValue>().unwrap(); assert_eq!(t, JsonValue::String("true".to_string())); } #[test] fn false_value() { let f = "false".parse::<JsonValue>(); if let Ok(JsonValue::Boolean(b)) = f { assert!(!b, "expect false, but {}", b); } else { panic!("unexpect value : {:?}", f); } } #[test] fn false_string() { let t = "\"false\"".parse::<JsonValue>().unwrap(); assert_eq!(t, JsonValue::String("false".to_string())); } #[test] fn number_value() { let json_value = "1234567890.0987654321".parse::<JsonValue>(); if let Ok(JsonValue::Number(number)) = json_value { let integer = number as u64; assert_eq!(integer, 1234567890); assert_eq!(number.fract(),0.0987654321f64); } else { panic!("unexpect value : {:?}", json_value); } } #[test] fn number_string() { let t = "\"1234567890.0987654321\"".parse::<JsonValue>().unwrap(); assert_eq!(t, JsonValue::String("1234567890.0987654321".to_string())); } #[test] fn empty_object() { let json_value = "{}".parse::<JsonValue>().unwrap(); let expect_result = JsonValue::Objects(Vec::new()); assert_eq!(json_value, expect_result); } #[test] fn array_value() { let json_value = r#" [ 12345, true, false, null, "Hello, world", { "object" : {} } ]"#.parse::<JsonValue>().unwrap(); let object : JsonObject = JsonObject::new("object", JsonValue::Objects(Vec::new())); let values = Vec::from([ JsonValue::Number(12345f64), JsonValue::Boolean(true), JsonValue::Boolean(false), JsonValue::Null, JsonValue::String("Hello, world".to_string()), object.into()]); assert_eq!(JsonValue::Array(values), json_value); } #[test] fn one_object() { let json_value = r#"{"name" : 12345}"#.parse::<JsonValue>().unwrap(); let expect_result = JsonValue::Objects(Vec::from([JsonObject::new("name", 12345f64)])); assert_eq!(json_value, expect_result); } #[test] fn nested_empty_object() { let json_value = r#"{ "empty" : {} }"#.parse::<JsonValue>().unwrap(); let expect_result = JsonValue::Objects(Vec::from([JsonObject::new("empty", JsonValue::Objects(Vec::new()))])); assert_eq!(json_value, expect_result); } #[test] fn nested_empty_array() { let json_value = r#"[ {"empty" : []} ]"#.parse::<JsonValue>().unwrap(); let expect_result = JsonValue::Array(vec![JsonValue::Objects(vec![JsonObject::new("empty", JsonValue::Array(Vec::new()))])]); assert_eq!(json_value, expect_result); } #[test] fn null_value() { let json_value = "null".parse::<JsonValue>(); if let Ok(v) = json_value { assert!(v.is_null()); } else { panic!("unexpect value : {:?}", json_value); } } #[test] fn null_string() { let json_value = "\"null\"".parse::<JsonValue>().unwrap(); assert_eq!(json_value, JsonValue::String("null".to_string())); } #[test] fn contain_utf16() { let json_value = r#""\u3042\u3044\u3046abc""#.parse::<JsonValue>(); if let Ok(JsonValue::String(v)) = json_value { assert_eq!(v, "あいうabc".to_string()); } else { panic!("unexpect value : {:?}", json_value); } } #[test]
fn contain_emoji() { let json_value = r#""\uD83D\uDE04\uD83D\uDE07\uD83D\uDC7A""#.parse::<JsonValue>(); if let Ok(JsonValue::String(v)) = json_value { assert_eq!(v, r#"😄😇👺"#.to_string()); } else { panic!("unexpect value : {:?}", json_value); } }
function_block-function_prefix_line
[ { "content": "#[test]\n\n#[allow(overflowing_literals)]\n\nfn simple_deserialize() {\n\n let a : A = \"1000000000000000\".parse::<JsonValue>().unwrap().deserialize().unwrap();\n\n assert_eq!(a, A::new(1000000000000000));\n\n}", "file_path": "tests/deserialize_test.rs", "rank": 15, "score": 47322.0088987154 }, { "content": "#[test]\n\nfn tokenize_null() {\n\n use lexer::*;\n\n let mut lexer = JsonLexer::new(\"null\");\n\n match lexer.tokenize() {\n\n Ok(tokens) => {\n\n let v = Vec::from([JsonToken::Null]);\n\n assert!(v.iter().eq(tokens.tokens.iter()), \"{:?}\", tokens.tokens);\n\n },\n\n Err(e) => {\n\n panic!(\"{}\", e);\n\n }\n\n }\n\n}", "file_path": "src/lib.rs", "rank": 16, "score": 44181.167803467484 }, { "content": "extern crate crystalrake;\n\n\n\nuse crystalrake::json::*;\n\n\n\n#[derive(Debug, Eq, PartialEq)]\n", "file_path": "tests/deserialize_test.rs", "rank": 18, "score": 25562.711143600263 }, { "content": "#[derive(Debug, Eq, PartialEq)]\n\nstruct A {\n\n number : i32\n\n}\n\n\n\nimpl A {\n\n fn new(number : i32) -> A {\n\n A { number }\n\n }\n\n}\n\n\n", "file_path": "tests/deserialize_test.rs", "rank": 19, "score": 25552.876408898595 }, { "content": "#[derive(Debug)]\n\nstruct JsonDeserializeError {\n\n message: String,\n\n}\n\n\n\nimpl FromJson for A {\n\n type Err = JsonDeserializeError;\n\n fn from_json(json: &JsonValue) -> Result<Self, Self::Err> {\n\n match json {\n\n // This code must change. It's panicable.\n\n JsonValue::Number(n) => Ok(A { number: *n as i32}) ,\n\n _ => Err(JsonDeserializeError{ message: \"fail\".to_owned()})\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/deserialize_test.rs", "rank": 20, "score": 22188.54785349502 }, { "content": " pub fn is_number(&self) -> bool {\n\n if let JsonValue::Number(_) = self {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n pub fn is_string(&self) -> bool {\n\n if let JsonValue::String(_) = self {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n pub fn is_objects(&self) -> bool {\n\n if let JsonValue::Objects(_) = self {\n\n true\n\n } else {\n", "file_path": "src/json.rs", "rank": 21, "score": 10.167969782685304 }, { "content": " if let JsonValue::Array(_) = self {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n pub fn deserialize<F: FromJson>(&self) -> Result<F, F::Err> {\n\n FromJson::from_json(self)\n\n }\n\n}\n\n\n\nimpl From<f64> for JsonValue {\n\n fn from(v: f64) -> Self {\n\n Self::Number(v)\n\n }\n\n}\n\n\n\nimpl TryInto<f64> for JsonValue {\n\n type Error = ();\n", "file_path": "src/json.rs", "rank": 22, "score": 8.23455904739302 }, { "content": "#![allow(dead_code)]\n\nuse std::{str::FromStr, fmt::Display};\n\n\n\nuse crate::error::{JsonParseError, ParseErrorKind};\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct JsonNumber {\n\n pub(crate) integer: i128,\n\n pub(crate) frac: f64,\n\n pub(crate) exp: i128\n\n}\n\n\n", "file_path": "src/json.rs", "rank": 23, "score": 7.4441448046081575 }, { "content": " false\n\n }\n\n }\n\n\n\n pub fn is_bool(&self) -> bool {\n\n if let JsonValue::Boolean(_) = self {\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n\n\n pub fn is_null(&self) -> bool {\n\n match self {\n\n JsonValue::Null => true,\n\n _ => false\n\n }\n\n }\n\n\n\n pub fn is_array(&self) -> bool {\n", "file_path": "src/json.rs", "rank": 24, "score": 7.3652993777361475 }, { "content": " },\n\n JsonToken::WhiteSpace(_) => {\n\n //self.next();\n\n return self.next_value();\n\n },\n\n JsonToken::Number(number) => {\n\n let number = number.to_string();\n\n match number.parse() {\n\n Ok(v) => {\n\n //self.next();\n\n return Ok(Some(JsonValue::Number(v)));\n\n },\n\n Err(e) => {\n\n return Err(JsonParseError{ kind: ParseErrorKind::ParseFloatError(e)});\n\n }\n\n }\n\n },\n\n JsonToken::True => {\n\n return Ok(Some(JsonValue::Boolean(true)));\n\n },\n", "file_path": "src/parser.rs", "rank": 25, "score": 6.961077319912879 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum JsonToken {\n\n BeginArray,\n\n BeginObject,\n\n EndArray,\n\n EndObject,\n\n NameSeparator,\n\n ValueSeparator,\n\n Digit(char),\n\n Number(JsonNumberToken),\n\n DecimalPoint,\n\n WhiteSpace(char),\n\n Exponent,\n\n Minus,\n\n Plus,\n\n True,\n\n False,\n", "file_path": "src/lexer.rs", "rank": 26, "score": 6.64624945815795 }, { "content": " return crate::parser::JsonParser::new(tokens).get_value();\n\n },\n\n Err(e) => {\n\n return Err( JsonParseError { kind: ParseErrorKind::LexError(e)});\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct JsonObject {\n\n pub name: String,\n\n pub value: JsonValue\n\n} \n\n\n\nimpl JsonObject {\n\n pub fn new<T>(name: &str, value: T) -> JsonObject where T: Into<JsonValue> {\n\n JsonObject {name: name.to_string() , value: value.into()}\n\n }\n\n}\n", "file_path": "src/json.rs", "rank": 27, "score": 6.059042102273233 }, { "content": "\n\nimpl Display for JsonValue {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Self::Number(n) => write!(f, \"{}\", n),\n\n Self::String(s) => write!(f, \"\\\"{}\\\"\", s),\n\n Self::Objects(o) => write!(f, \"{{{}}}\", o.iter().map(|v| v.to_string()).collect::<Vec<_>>().join(\",\")),\n\n Self::Boolean(b) => write!(f, \"{}\", b),\n\n Self::Array(a) => write!(f, \"[{}]\", a.iter().map(|v| v.to_string()).collect::<Vec<_>>().join(\",\")),\n\n Self::Null => write!(f, \"null\"),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for JsonValue {\n\n type Err = crate::error::JsonParseError;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let tokens = crate::lexer::JsonLexer::new(s).tokenize();\n\n match tokens {\n\n Ok(tokens) => {\n", "file_path": "src/json.rs", "rank": 28, "score": 5.9884429889762 }, { "content": "use crate::json::{JsonValue, JsonObject};\n\nuse crate::error::{JsonParseError, ParseErrorKind};\n\nuse crate::lexer::*;\n\n\n\npub struct JsonParser {\n\n tokens : Vec<JsonToken>,\n\n position : usize\n\n}\n\n\n\nimpl JsonParser {\n\n pub fn new(tokens: JsonTokens) -> JsonParser {\n\n JsonParser { tokens : tokens.tokens, position: 0 }\n\n }\n\n\n\n fn peek(&self) -> Option<&JsonToken> {\n\n self.tokens.get(self.position)\n\n }\n\n\n\n fn next(&mut self) -> Option<&JsonToken> {\n\n let current = self.tokens.get(self.position);\n", "file_path": "src/parser.rs", "rank": 29, "score": 5.915538789566682 }, { "content": "use std::fmt::Display;\n\n\n\nuse crate::error::{ JsonLexerError, LexErrorKind };\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum JsonCharToken {\n\n UnEscaped(String),\n\n Escape(String),\n\n Unicode(u16)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub struct JsonNumberToken {\n\n is_minus: bool,\n\n integer: String,\n\n frac: String,\n\n exp: String\n\n}\n\n\n\nimpl Display for JsonNumberToken {\n", "file_path": "src/lexer.rs", "rank": 30, "score": 5.691150412674839 }, { "content": " fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let mut s = String::new();\n\n if self.is_minus { s.push('-'); }\n\n s.push_str(&self.integer);\n\n if !self.frac.is_empty() { s.push('.'); }\n\n s.push_str(&self.frac);\n\n if !self.exp.is_empty() {s.push('E'); }\n\n s.push_str(&self.exp);\n\n write!(f, \"{}\", s)\n\n }\n\n}\n\n\n\nimpl JsonNumberToken {\n\n pub fn new(is_minus: bool) -> JsonNumberToken {\n\n JsonNumberToken { \n\n is_minus, \n\n integer: String::new(), \n\n frac: String::new(), \n\n exp: String::new() \n\n }\n", "file_path": "src/lexer.rs", "rank": 31, "score": 5.50647981989857 }, { "content": "}\n\n\n\nimpl From<JsonObject> for JsonValue {\n\n fn from(o: JsonObject) -> Self {\n\n Self::Objects(Vec::from([o]))\n\n }\n\n}\n\n\n\nimpl From<Vec<JsonObject>> for JsonValue {\n\n fn from(o: Vec<JsonObject>) -> Self {\n\n Self::Objects(o)\n\n }\n\n}\n\n\n\nimpl From<bool> for JsonValue {\n\n fn from(b: bool) -> Self {\n\n Self::Boolean(b)\n\n }\n\n}\n\n\n", "file_path": "src/json.rs", "rank": 32, "score": 5.475928835155516 }, { "content": "# crystalrake\n", "file_path": "README.md", "rank": 33, "score": 5.3698707593994595 }, { "content": " JsonToken::False => {\n\n return Ok(Some(JsonValue::Boolean(false)));\n\n },\n\n JsonToken::Null => {\n\n return Ok(Some(JsonValue::Null));\n\n },\n\n JsonToken::String(s) => {\n\n return JsonParser::token_to_string(s); \n\n },\n\n _ => {\n\n return Err(JsonParseError{ kind: ParseErrorKind::InvalidToken}); \n\n }\n\n }\n\n }\n\n Ok(None)\n\n }\n\n\n\n fn get_object(&mut self) -> Result<JsonObject, JsonParseError> {\n\n while let Some(token) = self.peek() {\n\n match token {\n", "file_path": "src/parser.rs", "rank": 34, "score": 5.284651202729665 }, { "content": "impl From<Vec<Self>> for JsonValue {\n\n fn from(v: Vec<Self>) -> Self {\n\n Self::Array(v)\n\n }\n\n}\n\n\n\nimpl<T> From<Option<T>> for JsonValue where T: Into<JsonValue> {\n\n fn from(op: Option<T>) -> Self {\n\n match op {\n\n Some(o) => o.into(),\n\n None => JsonValue::Null\n\n }\n\n }\n\n}\n\n\n\nimpl<T> From<(String, T)> for JsonValue where T: Into<JsonValue> {\n\n fn from(value: (String, T)) -> Self {\n\n Self::Objects(Vec::from([value.into()]))\n\n }\n\n}\n", "file_path": "src/json.rs", "rank": 35, "score": 5.2604360483129184 }, { "content": " },\n\n JsonToken::EndObject => {\n\n self.next();\n\n return Ok(Some(JsonValue::Objects(objects)));\n\n },\n\n _ => {\n\n objects.push(self.get_object()?);\n\n while let Some(token) = self.peek() {\n\n match token {\n\n JsonToken::WhiteSpace(_) => { \n\n self.next();\n\n continue; \n\n },\n\n JsonToken::ValueSeparator => {\n\n self.next();\n\n objects.push(self.get_object()?);\n\n },\n\n JsonToken::EndObject => {\n\n self.next();\n\n return Ok(Some(JsonValue::Objects(objects)));\n", "file_path": "src/parser.rs", "rank": 36, "score": 5.155930558336472 }, { "content": "\n\nimpl<T> From<(&str, T)> for JsonObject where T: Into<JsonValue> {\n\n fn from(value: (&str, T)) -> Self {\n\n Self { name: value.0.to_string(), value : value.1.into() }\n\n }\n\n}\n\n\n\nimpl<T> From<(String, T)> for JsonObject where T: Into<JsonValue> {\n\n fn from(value: (String, T)) -> Self {\n\n Self { name: value.0, value : value.1.into() }\n\n }\n\n}\n\n\n\nimpl Display for JsonObject {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"\\\"{}\\\":{}\", self.name, self.value.to_string())\n\n }\n\n}", "file_path": "src/json.rs", "rank": 37, "score": 5.100000050939139 }, { "content": "\n\n fn try_into(self) -> Result<f64, Self::Error> {\n\n if let JsonValue::Number(n) = self {\n\n Ok(n)\n\n } else {\n\n Err(())\n\n }\n\n }\n\n}\n\n\n\nimpl From<&str> for JsonValue {\n\n fn from(s: &str) -> Self {\n\n Self::String(s.to_string())\n\n }\n\n}\n\n\n\nimpl From<String> for JsonValue {\n\n fn from(s: String) -> Self {\n\n Self::String(s)\n\n }\n", "file_path": "src/json.rs", "rank": 38, "score": 4.125233209548064 }, { "content": " JsonLexer { json_chars: json_string.chars().peekable() }\n\n }\n\n \n\n pub fn tokenize(&mut self) -> Result<JsonTokens, JsonLexerError> {\n\n let mut tokens = Vec::new();\n\n \n\n while let Some(token) = self.next_token()? {\n\n match token {\n\n JsonToken::Digit(d) => {\n\n let mut num = JsonNumberToken::new(false);\n\n num.integer.push(d);\n\n let next_token = self.number_token(&mut num)?;\n\n tokens.push(JsonToken::Number(num));\n\n if let Some(token) = next_token {\n\n tokens.push(token);\n\n }\n\n },\n\n JsonToken::Minus => {\n\n if let Some(JsonToken::Digit(d)) = self.next_token()? {\n\n let mut num = JsonNumberToken::new(true);\n", "file_path": "src/lexer.rs", "rank": 39, "score": 3.851475462671325 }, { "content": " }\n\n }\n\n if !utf16.is_empty() {\n\n match String::from_utf16(&utf16) {\n\n Ok(utf16_str) => {\n\n buf.push_str(&utf16_str);\n\n },\n\n Err(e) => {\n\n return Err( JsonParseError{ kind: ParseErrorKind::FromUtf16Error(e)});\n\n }\n\n }\n\n }\n\n return Ok(Some(JsonValue::String(buf))); \n\n }\n\n\n\n pub fn get_value(&mut self) -> Result<JsonValue, JsonParseError> {\n\n let mut ret = None;\n\n while let Some(value) = self.next_value()? {\n\n if ret.is_none() {\n\n ret = Some(value);\n", "file_path": "src/parser.rs", "rank": 40, "score": 3.797135003794613 }, { "content": " }\n\n }\n\n Err(JsonLexerError { kind: LexErrorKind::NonQuotationMark })\n\n }\n\n\n\n fn number_token(&mut self, number : &mut JsonNumberToken) -> Result<Option<JsonToken>, JsonLexerError> {\n\n while let Some(token) = self.next_token()? {\n\n match token {\n\n JsonToken::Digit(d) => {\n\n number.integer.push(d);\n\n },\n\n JsonToken::DecimalPoint =>{\n\n if let Some(JsonToken::Digit(d)) = self.next_token()? {\n\n number.frac.push(d);\n\n while let Some(token) = self.next_token()? {\n\n match token {\n\n JsonToken::Digit(d) => {\n\n number.frac.push(d);\n\n },\n\n JsonToken::Exponent => {\n", "file_path": "src/lexer.rs", "rank": 41, "score": 3.5728426236194215 }, { "content": " ParseFloatError(ParseFloatError),\n\n LexError(JsonLexerError),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct JsonParseError {\n\n pub(crate) kind: ParseErrorKind,\n\n}\n\n\n\nimpl JsonParseError {\n\n pub fn kind(&self) -> &ParseErrorKind {\n\n &self.kind\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for JsonParseError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match &self.kind {\n\n ParseErrorKind::InvalidToken => write!(f, \"invalid token found in JSON\"),\n\n ParseErrorKind::NonValue => write!(f, \"expect some value, but cannot find any JSON value\"),\n", "file_path": "src/error.rs", "rank": 42, "score": 3.511235285232972 }, { "content": " },\n\n _ => {\n\n \n\n return Err( JsonParseError{ kind: ParseErrorKind::InvalidToken} );\n\n }\n\n }\n\n }\n\n }\n\n\n\n }\n\n }\n\n return Ok(Some(JsonValue::Objects(objects)));\n\n },\n\n JsonToken::BeginArray => {\n\n let mut values = Vec::new(); \n\n while let Some(token) = self.peek() {\n\n match token {\n\n JsonToken::WhiteSpace(_) => {\n\n self.next();\n\n continue;\n", "file_path": "src/parser.rs", "rank": 43, "score": 3.4171702763934815 }, { "content": " ParseErrorKind::NonEndObject => write!(f, \"expect end-of-object '}}', but cannot find any right curly bracket\"),\n\n ParseErrorKind::NonEndArray => write!(f, \"expect end-of-array ']', but cannot find right square bracket\"),\n\n ParseErrorKind::NoObjectName => write!(f, \"cannot find any object name\"),\n\n ParseErrorKind::FromUtf16Error(e) => e.fmt(f),\n\n ParseErrorKind::ParseFloatError(e) => e.fmt(f),\n\n ParseErrorKind::LexError(e) => e.fmt(f),\n\n }\n\n }\n\n}\n\n\n\nimpl std::error::Error for JsonParseError {\n\n \n\n}", "file_path": "src/error.rs", "rank": 44, "score": 3.2163684487069593 }, { "content": " }\n\n}\n\n\n\nimpl std::error::Error for JsonLexerError {}\n\n\n\n/// Enums to store the various types of errors that can cause parsing a JSON to fail.\n\n#[derive(Debug)]\n\npub enum ParseErrorKind {\n\n /// Contains an invalid token in a JSON.\n\n InvalidToken,\n\n /// Could not find any JSON value.\n\n NonValue,\n\n /// Found left curly brancket, but could not find right curly brancket.\n\n NonEndObject,\n\n /// Found left square brancket, but could not find right square brancket.\n\n NonEndArray,\n\n /// Could not find any JSON object name.\n\n NoObjectName,\n\n /// A possible error value when converting a String from a UTF-16 byte slice.\n\n FromUtf16Error(FromUtf16Error),\n", "file_path": "src/error.rs", "rank": 45, "score": 3.102531950785847 }, { "content": " },\n\n JsonToken::EndArray => {\n\n self.next();\n\n return Ok(Some(JsonValue::Array(values)));\n\n },\n\n _ => {\n\n if let Some(value) = self.next_value()? {\n\n values.push(value);\n\n } else {\n\n return Ok(Some(JsonValue::Array(values)));\n\n }\n\n\n\n while let Some(token) = self.peek() {\n\n match token {\n\n JsonToken::WhiteSpace(_) => {\n\n self.next();\n\n continue;\n\n },\n\n JsonToken::EndArray =>{\n\n self.next();\n", "file_path": "src/parser.rs", "rank": 46, "score": 3.0616565648688856 }, { "content": " num.integer.push(d);\n\n\n\n let next_token = self.number_token(&mut num)?;\n\n tokens.push(JsonToken::Number(num));\n\n if let Some(token) = next_token {\n\n tokens.push(token);\n\n }\n\n } else {\n\n return Err(JsonLexerError { kind: LexErrorKind::NotDigit });\n\n }\n\n \n\n },\n\n _ => tokens.push(token),\n\n }\n\n }\n\n\n\n Ok(JsonTokens { tokens })\n\n }\n\n\n\n fn next_return_token(&mut self, token: JsonToken) -> Option<JsonToken> {\n", "file_path": "src/lexer.rs", "rank": 47, "score": 2.958641162327601 }, { "content": " return Ok(Some(JsonValue::Array(values)));\n\n },\n\n JsonToken::ValueSeparator => {\n\n self.next();\n\n if let Some(value) = self.next_value()? {\n\n values.push(value);\n\n } else {\n\n return Err( JsonParseError{ kind: ParseErrorKind::NonValue} );\n\n }\n\n },\n\n _ => {\n\n return Err( JsonParseError{ kind: ParseErrorKind::InvalidToken} );\n\n }\n\n }\n\n }\n\n return Err( JsonParseError{ kind: ParseErrorKind::NonEndArray} );\n\n }\n\n }\n\n }\n\n unreachable!()\n", "file_path": "src/parser.rs", "rank": 48, "score": 2.89706775998793 }, { "content": " JsonToken::WhiteSpace(_) => {\n\n self.next();\n\n continue;\n\n },\n\n JsonToken::String(s) => {\n\n if let Some(JsonValue::String(key)) = JsonParser::token_to_string(s)?{ \n\n self.next();\n\n while let Some(token) = self.peek() {\n\n match token {\n\n JsonToken::WhiteSpace(_) => { \n\n self.next();\n\n continue;\n\n },\n\n JsonToken::NameSeparator => {\n\n self.next();\n\n return Ok(JsonObject::new(&key, self.next_value()?));\n\n },\n\n _ => {\n\n return Err(JsonParseError{ kind: ParseErrorKind::InvalidToken}); \n\n },\n", "file_path": "src/parser.rs", "rank": 49, "score": 2.7228556663908448 }, { "content": " } else {\n\n return Err( JsonParseError{ kind: ParseErrorKind::InvalidToken} );\n\n }\n\n }\n\n match ret {\n\n Some(v) => Ok(v),\n\n None => Err( JsonParseError{ kind: ParseErrorKind::InvalidToken} )\n\n }\n\n }\n\n\n\n fn next_value(&mut self) -> Result<Option<JsonValue>, JsonParseError> {\n\n while let Some(token) = self.next() {\n\n match token {\n\n JsonToken::BeginObject => {\n\n let mut objects = Vec::new();\n\n while let Some(token) = self.peek() {\n\n match token {\n\n JsonToken::WhiteSpace(_) => {\n\n self.next();\n\n continue;\n", "file_path": "src/parser.rs", "rank": 50, "score": 2.5780044665670037 }, { "content": " let sign = self.next_token()?;\n\n if let Some(JsonToken::Minus) = sign {\n\n number.exp.push('-');\n\n } else if let Some(JsonToken::Plus) = sign {\n\n number.exp.push('+');\n\n } \n\n while let Some(token) = self.next_token()? {\n\n match token {\n\n JsonToken::Digit(d) => number.exp.push(d),\n\n _=> {\n\n return Ok(Some(token));\n\n }\n\n }\n\n }\n\n }\n\n _ => {\n\n return Ok(Some(token));\n\n }\n\n }\n\n }\n", "file_path": "src/lexer.rs", "rank": 51, "score": 2.507979607566227 }, { "content": " break;\n\n } else {\n\n return Err(JsonLexerError { kind: LexErrorKind::NonFracDigit });\n\n }\n\n },\n\n JsonToken::Exponent =>{\n\n let sign = self.next_token()?;\n\n if let Some(JsonToken::Minus) = sign {\n\n number.exp.push('-');\n\n } else if let Some(JsonToken::Plus) = sign {\n\n number.exp.push('+');\n\n } \n\n while let Some(token) = self.next_token()? {\n\n match token {\n\n JsonToken::Digit(d) => number.exp.push(d),\n\n _=> {\n\n return Ok(Some(token));\n\n }\n\n }\n\n }\n", "file_path": "src/lexer.rs", "rank": 52, "score": 2.2236447101968 }, { "content": "pub mod json;\n\nmod error;\n\nmod parser;\n\nmod lexer;\n\n\n\n#[test]\n", "file_path": "src/lib.rs", "rank": 53, "score": 2.2170396783438857 }, { "content": " }\n\n }\n\n }\n\n return Err(JsonParseError{ kind: ParseErrorKind::NoObjectName}); \n\n },\n\n _ => {\n\n return Err(JsonParseError{ kind: ParseErrorKind::InvalidToken}); \n\n }\n\n }\n\n }\n\n unreachable!()\n\n }\n\n}", "file_path": "src/parser.rs", "rank": 54, "score": 1.73040794002931 }, { "content": " self.json_chars.next();\n\n Some(token)\n\n }\n\n \n\n fn next_token(&mut self) -> Result<Option<JsonToken>, JsonLexerError> {\n\n match self.json_chars.peek() {\n\n Some(&c) => match c {\n\n ' ' | '\\n' | '\\t' | '\\r' => {\n\n Ok(self.next_return_token(JsonToken::WhiteSpace(c)))\n\n },\n\n '{' => {\n\n Ok(self.next_return_token(JsonToken::BeginObject))\n\n },\n\n '}' => {\n\n Ok(self.next_return_token(JsonToken::EndObject))\n\n },\n\n '[' => {\n\n Ok(self.next_return_token(JsonToken::BeginArray))\n\n },\n\n ']' => {\n", "file_path": "src/lexer.rs", "rank": 55, "score": 1.6840577273499533 }, { "content": " self.position += 1;\n\n current\n\n }\n\n\n\n // fn skip_whitespace(&mut self) {\n\n // while let Some(token) = self.peek() {\n\n // match token {\n\n // JsonToken::WhiteSpace(_) => {\n\n // self.next();\n\n // continue;\n\n // },\n\n // _ => {\n\n // break;\n\n // }\n\n // }\n\n // }\n\n // }\n\n\n\n fn token_to_string(s : &Vec<JsonCharToken>) -> Result<Option<JsonValue>, JsonParseError> {\n\n let mut buf = String::new();\n", "file_path": "src/parser.rs", "rank": 56, "score": 1.3373642301966342 }, { "content": " Null,\n\n //QuotationMark,\n\n String(Vec<JsonCharToken>)\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct JsonTokens {\n\n pub tokens: Vec<JsonToken>\n\n}\n\n\n\nimpl JsonTokens {\n\n // pub fn ignore_whitespace(self) -> JsonTokens {\n\n // let mut tokens = Vec::with_capacity(self.tokens.len());\n\n // for token in self.tokens {\n\n // match token {\n\n // JsonToken::WhiteSpace(_) => {},\n\n // _ => tokens.push(token),\n\n // }\n\n \n\n // }\n", "file_path": "src/lexer.rs", "rank": 57, "score": 1.2467129559842398 }, { "content": " },\n\n 'n' => {\n\n if self.json_chars.by_ref().take(4).eq(['n','u','l','l']) {\n\n Ok(Some(JsonToken::Null))\n\n } else {\n\n Err(JsonLexerError { kind: LexErrorKind::InvalidChar(c) })\n\n }\n\n },\n\n _ => {\n\n Err(JsonLexerError { kind: LexErrorKind::InvalidChar(c) })\n\n }\n\n },\n\n None => {\n\n Ok(None)\n\n }\n\n }\n\n }\n\n\n\n fn string_token(&mut self) -> Result<Option<JsonToken>, JsonLexerError> {\n\n let mut chars = Vec::new();\n", "file_path": "src/lexer.rs", "rank": 58, "score": 1.1957882210196793 }, { "content": " let mut utf16 = Vec::new();\n\n for c in s {\n\n match c {\n\n JsonCharToken::Escape(c) | JsonCharToken::UnEscaped(c) =>{\n\n if !utf16.is_empty() {\n\n match String::from_utf16(&utf16) {\n\n Ok(utf16_str) => {\n\n buf.push_str(&utf16_str);\n\n utf16.clear();\n\n },\n\n Err(e) => {\n\n return Err( JsonParseError{ kind: ParseErrorKind::FromUtf16Error(e)});\n\n }\n\n }\n\n } \n\n buf.push_str(c);\n\n },\n\n JsonCharToken::Unicode(c) => {\n\n utf16.push(*c);\n\n }\n", "file_path": "src/parser.rs", "rank": 59, "score": 1.1467665475943618 }, { "content": " },\n\n 'e' | 'E' => {\n\n Ok(self.next_return_token(JsonToken::Exponent))\n\n },\n\n '0'..='9' => {\n\n Ok(self.next_return_token(JsonToken::Digit(c)))\n\n },\n\n 't' => {\n\n if self.json_chars.by_ref().take(4).eq(['t','r','u','e']) {\n\n Ok(Some(JsonToken::True))\n\n } else {\n\n Err(JsonLexerError { kind: LexErrorKind::InvalidChar(c) })\n\n }\n\n },\n\n 'f' => {\n\n if self.json_chars.by_ref().take(5).eq(['f','a','l','s','e']) {\n\n Ok(Some(JsonToken::False))\n\n } else {\n\n Err(JsonLexerError { kind: LexErrorKind::InvalidChar(c) })\n\n }\n", "file_path": "src/lexer.rs", "rank": 60, "score": 1.0875642954564047 }, { "content": " Ok(self.next_return_token(JsonToken::EndArray))\n\n },\n\n ':' => {\n\n Ok(self.next_return_token(JsonToken::NameSeparator))\n\n },\n\n ',' => {\n\n Ok(self.next_return_token(JsonToken::ValueSeparator))\n\n },\n\n '\\\"' => {\n\n self.json_chars.next();\n\n self.string_token()\n\n },\n\n '-' => {\n\n Ok(self.next_return_token(JsonToken::Minus))\n\n },\n\n '+' => {\n\n Ok(self.next_return_token(JsonToken::Plus))\n\n },\n\n '.' => {\n\n Ok(self.next_return_token(JsonToken::DecimalPoint))\n", "file_path": "src/lexer.rs", "rank": 61, "score": 1.0304902850240354 }, { "content": "use std::{num::{ParseIntError, ParseFloatError}, string::FromUtf16Error};\n\n\n\n/// Enum to store the various types of errors that can cause tokenizing a JSON to fail.\n\n#[derive(Debug)]\n\npub enum LexErrorKind {\n\n /// Contains an invalid char in a JSON.\n\n InvalidChar(char),\n\n /// Could not find a pair of quotation marks in string.\n\n NonQuotationMark,\n\n /// Could not find after minus sign.\n\n NotDigit,\n\n /// Fraction part didn't contain any digit.\n\n NonFracDigit,\n\n /// Found an invalid char after reverse solidus.\n\n NotEscapeChar,\n\n /// Found `\"\\uXXXX\"`(X is a hex digit) from JSON, but `XXXX` could not parse to `u16`.\n\n ParseError(ParseIntError)\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/error.rs", "rank": 62, "score": 0.8376559041600395 }, { "content": "pub struct JsonLexerError {\n\n pub(crate) kind: LexErrorKind,\n\n}\n\n\n\nimpl JsonLexerError {\n\n pub fn kind(&self) -> &LexErrorKind {\n\n &self.kind\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for JsonLexerError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self.kind() {\n\n LexErrorKind::InvalidChar(c) => write!(f, \"invalid charactor '{}' found from JSON\", c),\n\n LexErrorKind::NonQuotationMark => write!(f, \"cannot find a pair of quotation-mark from JSON string\"),\n\n LexErrorKind::NonFracDigit => write!(f, \"cannot find any digit after decimal-point\"),\n\n LexErrorKind::NotDigit => write!(f, \"cannot find any digit after minus sign\"),\n\n LexErrorKind::NotEscapeChar => write!(f, \"invalid charactor found after reverse solidus\"),\n\n LexErrorKind::ParseError(_) => write!(f, \"cannot parse hex digit string to u16\"),\n\n }\n", "file_path": "src/error.rs", "rank": 63, "score": 0.670583603379789 } ]
Rust
src/chopper/types.rs
snaar/drat
8c9c1e2e413d8a93f3fd50df852915a27cb37e6f
use std::cmp::Ordering; use std::fmt; use ndarray::ArrayD; use crate::chopper::error::{Error, Result}; use crate::util::timestamp_util; use crate::util::tz::ChopperTz; pub type ChainId = usize; pub type NodeId = usize; pub type Nanos = u64; #[derive(Copy, Clone)] pub struct TimestampRange { pub begin: Option<Nanos>, pub end: Option<Nanos>, } pub static TIMESTAMP_RANGE_ALL: TimestampRange = TimestampRange { begin: None, end: None, }; impl TimestampRange { pub fn new( begin: Option<impl AsRef<str>>, end: Option<impl AsRef<str>>, timezone: &ChopperTz, ) -> Result<Self> { let begin = match begin { Some(t) => Some(timestamp_util::parse_datetime_range_element( t.as_ref(), timezone, )?), None => None, }; let end = match end { Some(t) => Some(timestamp_util::parse_datetime_range_element( t.as_ref(), timezone, )?), None => None, }; Ok(TimestampRange { begin, end }) } } #[derive(Clone, Debug)] pub struct Header { field_names: Vec<String>, field_types: Vec<FieldType>, } impl Header { pub fn generate_default_field_names(field_count: usize) -> Vec<String> { let mut field_names: Vec<String> = Vec::new(); for i in 0..field_count { field_names.push(format!("col_{}", i)); } field_names } } impl PartialEq for Header { fn eq(&self, other: &Header) -> bool { self.field_names().eq(other.field_names()) && self.field_types().eq(other.field_types()) } } impl Header { pub fn new(field_names: Vec<String>, field_types: Vec<FieldType>) -> Self { Header { field_names, field_types, } } pub fn field_names(&self) -> &Vec<String> { &self.field_names } pub fn update_field_names(&mut self, new_names: Vec<String>) { self.field_names = new_names; } pub fn field_types(&self) -> &Vec<FieldType> { &self.field_types } pub fn update_field_types(&mut self, new_types: Vec<FieldType>) { self.field_types = new_types; } pub fn field_names_mut(&mut self) -> &mut Vec<String> { &mut self.field_names } pub fn field_types_mut(&mut self) -> &mut Vec<FieldType> { &mut self.field_types } pub fn field_index(&self, name: &str) -> Result<usize> { match self.field_names.iter().position(|s| s == name) { None => Err(Error::ColumnMissing(name.to_string())), Some(i) => Ok(i), } } } #[derive(Clone, Debug, PartialEq)] pub enum FieldValue { Boolean(bool), Byte(u8), ByteBuf(Vec<u8>), Char(u16), Double(f64), Float(f32), Int(i32), Long(i64), Short(i16), String(String), MultiDimDoubleArray(ArrayD<f64>), None, } impl PartialOrd for FieldValue { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match self { FieldValue::Boolean(v) => { if let FieldValue::Boolean(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Byte(v) => { if let FieldValue::Byte(o) = other { v.partial_cmp(o) } else { None } } FieldValue::ByteBuf(v) => { if let FieldValue::ByteBuf(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Char(v) => { if let FieldValue::Char(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Double(v) => { if let FieldValue::Double(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Float(v) => { if let FieldValue::Float(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Int(v) => { if let FieldValue::Int(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Long(v) => { if let FieldValue::Long(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Short(v) => { if let FieldValue::Short(o) = other { v.partial_cmp(o) } else { None } } FieldValue::String(v) => { if let FieldValue::String(o) = other { v.partial_cmp(o) } else { None } } FieldValue::MultiDimDoubleArray(_) => None, FieldValue::None => { if &FieldValue::None == other { Some(Ordering::Equal) } else { None } } } } } impl fmt::Display for FieldValue { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match &self { FieldValue::Boolean(x) => f.write_str(format!("bool[{}]", x).as_str()), FieldValue::Byte(x) => f.write_str(format!("byte[{}]", x).as_str()), FieldValue::ByteBuf(x) => f.write_str(format!("u8[len={}]", x.len()).as_str()), FieldValue::Char(x) => f.write_str(format!("char[{}]", x).as_str()), FieldValue::Double(x) => f.write_str(format!("double[{}]", x).as_str()), FieldValue::Float(x) => f.write_str(format!("float[{}]", x).as_str()), FieldValue::Int(x) => f.write_str(format!("int[{}]", x).as_str()), FieldValue::Long(x) => f.write_str(format!("long[{}]", x).as_str()), FieldValue::Short(x) => f.write_str(format!("short[{}]", x).as_str()), FieldValue::String(x) => f.write_str(format!("string[{}]", x.as_str()).as_str()), FieldValue::MultiDimDoubleArray(x) => { f.write_str("f64[shape=(")?; f.write_str( &x.shape() .iter() .map(|d| d.to_string()) .collect::<Vec<String>>() .join("x"), )?; f.write_str(")]")?; Ok(()) } FieldValue::None => f.write_str("none[]"), } } } impl FieldValue { pub fn string_field_to_float(&self) -> Result<f64> { if let FieldValue::String(string) = self { let float = string.parse::<f64>().unwrap(); Ok(float) } else { Err(Error::from("expected FieldValue::String")) } } pub fn string_field_to_string(&self) -> Result<&str> { if let FieldValue::String(string) = self { Ok(string) } else { Err(Error::from("expected FieldValue::String")) } } pub fn matches_field_type(&self, field_type: FieldType) -> bool { let self_type = match self { FieldValue::Boolean(_) => FieldType::Boolean, FieldValue::Byte(_) => FieldType::Byte, FieldValue::ByteBuf(_) => FieldType::ByteBuf, FieldValue::Char(_) => FieldType::Char, FieldValue::Double(_) => FieldType::Double, FieldValue::Float(_) => FieldType::Float, FieldValue::Int(_) => FieldType::Int, FieldValue::Long(_) => FieldType::Long, FieldValue::Short(_) => FieldType::Short, FieldValue::String(_) => FieldType::String, FieldValue::MultiDimDoubleArray(_) => FieldType::MultiDimDoubleArray, FieldValue::None => return true, }; self_type == field_type } } #[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)] pub enum FieldType { Boolean, Byte, ByteBuf, Char, Double, Float, Int, Long, Short, String, MultiDimDoubleArray, } #[derive(Clone, Debug, PartialEq)] pub struct Row { pub timestamp: Nanos, pub field_values: Vec<FieldValue>, } impl Row { pub fn empty() -> Row { Row { timestamp: 0, field_values: vec![], } } }
use std::cmp::Ordering; use std::fmt; use ndarray::ArrayD; use crate::chopper::error::{Error, Result}; use crate::util::timestamp_util; use crate::util::tz::ChopperTz; pub type ChainId = usize; pub type NodeId = usize; pub type Nanos = u64; #[derive(Copy, Clone)] pub struct TimestampRange { pub begin: Option<Nanos>, pub end: Option<Nanos>, } pub static TIMESTAMP_RANGE_ALL: TimestampRange = TimestampRange { begin: None, end: None, }; impl TimestampRange { pub fn new( begin: Option<impl AsRef<str>>, end: Option<impl AsRef<str>>, timezone: &ChopperTz, ) -> Result<Self> { let begin = match begin { Some(t) => Some(timestamp_util::parse_datetime_range_element( t.as_ref(), timezone, )?), None => None, }; let end = match end { Some(t) => Some(timestamp_util::parse_datetime_range_element( t.as_ref(), timezone, )?), None => None, }; Ok(TimestampRange { begin, end }) } } #[derive(Clone, Debug)] pub struct Header { field_names: Vec<String>, field_types: Vec<FieldType>, } impl Header { pub fn generate_default_field_names(field_count: usize) -> Vec<String> { let mut field_names: Vec<String> = Vec::new(); for i in 0..field_count { field_names.push(format!("col_{}", i)); } field_names } } impl PartialEq for Header { fn eq(&self, other: &Header) -> bool { self.field_names().eq(other.field_names()) && self.field_types().eq(other.field_types()) } } impl Header { pub fn new(field_names: Vec<String>, field_types: Vec<FieldType>) -> Self { Header { field_names, field_types, } } pub fn field_names(&self) -> &Vec<String> { &self.field_names } pub fn update_field_names(&mut self, new_names: Vec<String>) { self.field_names = new_names; } pub fn field_types(&self) -> &Vec<FieldType> { &self.field_types } pub fn update_field_types(&mut self, new_types: Vec<FieldType>) { self.field_types = new_types; } pub fn field_names_mut(&mut self) -> &mut Vec<String> { &mut self.field_names } pub fn field_types_mut(&mut self) -> &mut Vec<FieldType> { &mut self.field_types } pub fn field_index(&self, name: &str) -> Result<usize> { match self.field_names.iter().position(|s| s == name) { None => Err(Error::ColumnMissing(name.to_string())), Some(i) => Ok(i), } } } #[derive(Clone, Debug, PartialEq)] pub enum FieldValue { Boolean(bool), Byte(u8), ByteBuf(Vec<u8>), Char(u16), Double(f64), Float(f32), Int(i32), Long(i64), Short(i16), String(String), MultiDimDoubleArray(ArrayD<f64>), None, } impl PartialOrd for FieldValue { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { match self { FieldValue::Boolean(v) => { if let FieldValue::Boolean(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Byte(v) => { if let FieldValue::Byte(o) = other { v.partial_cmp(o) } else { None } } FieldValue::ByteBuf(v) => { if let FieldValue::ByteBuf(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Char(v) => { if let FieldValue::Char(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Double(v) => { if let FieldValue::Double(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Float(v) => { if let FieldValue::Float(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Int(v) => { if let FieldValue::Int(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Long(v) => { if let FieldValue::Long(o) = other { v.partial_cmp(o) } else { None } } FieldValue::Short(v) => { if let FieldValue::Short(o) = other { v.partial_cmp(o) } else { None } } FieldValue::String(v) => { if let FieldValue::String(o) = other { v.partial_cmp(o) } else { None } } FieldValue::MultiDimDoubleArray(_) => None, FieldValue::None => { if &FieldValue::None == other { Some(Ordering::Equal) } else { None } } } } } impl fmt::Display for FieldValue { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match &self { FieldValue::Boolean(x) => f.write_str(format!("bool[{}]", x).as_str()), FieldValue::Byte(x) => f.write_str(format!("byte[{}]", x).as_str()), FieldValue::ByteBuf(x) => f.write_str(format!("u8[len={}]", x.len()).as_str()), FieldValue::Char(x) => f.write_str(format!("char[{}]", x).as_str()), FieldValue::Double(x) => f.write_str(format!("double[{}]", x).as_str()), FieldValue::Float(x) => f.write_str(format!("float[{}]", x).as_str()), FieldValue::Int(x) => f.write_str(format!("int[{}]", x).as_str()), FieldValue::Long(x) => f.write_str(format!("long[{}]", x).as_str()), FieldValue::Short(x) => f.write_str(format!("short[{}]", x).as_str()), FieldValue::String(x) => f.write_str(format!("string[{}]", x.as_str()).as_str()), FieldValue::MultiDimDoubleArray(x) => { f.write_str("f64[shape=(")?; f.write_str( &x.shape() .iter() .map(|d| d.to_string()) .collect::<Vec<String>>() .join("x"), )?; f.write_str(")]")?; Ok(()) } FieldValue::None => f.write_str("none[]"), } } } impl FieldValue { pub fn string_field_to_float(&self) -> Result<f64> {
} pub fn string_field_to_string(&self) -> Result<&str> { if let FieldValue::String(string) = self { Ok(string) } else { Err(Error::from("expected FieldValue::String")) } } pub fn matches_field_type(&self, field_type: FieldType) -> bool { let self_type = match self { FieldValue::Boolean(_) => FieldType::Boolean, FieldValue::Byte(_) => FieldType::Byte, FieldValue::ByteBuf(_) => FieldType::ByteBuf, FieldValue::Char(_) => FieldType::Char, FieldValue::Double(_) => FieldType::Double, FieldValue::Float(_) => FieldType::Float, FieldValue::Int(_) => FieldType::Int, FieldValue::Long(_) => FieldType::Long, FieldValue::Short(_) => FieldType::Short, FieldValue::String(_) => FieldType::String, FieldValue::MultiDimDoubleArray(_) => FieldType::MultiDimDoubleArray, FieldValue::None => return true, }; self_type == field_type } } #[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)] pub enum FieldType { Boolean, Byte, ByteBuf, Char, Double, Float, Int, Long, Short, String, MultiDimDoubleArray, } #[derive(Clone, Debug, PartialEq)] pub struct Row { pub timestamp: Nanos, pub field_values: Vec<FieldValue>, } impl Row { pub fn empty() -> Row { Row { timestamp: 0, field_values: vec![], } } }
if let FieldValue::String(string) = self { let float = string.parse::<f64>().unwrap(); Ok(float) } else { Err(Error::from("expected FieldValue::String")) }
if_condition
[]
Rust
pallets/worker/src/prover.rs
edgeware-builders/substrate-eth-light-client
acdfa163ccbb1ccfb3b7f4681bd1ec05d706cd07
#![cfg_attr(not(feature = "std"), no_std)] use crate::*; use rlp::Rlp; pub trait Prover { fn extract_nibbles(a: Vec<u8>) -> Vec<u8>; fn concat_nibbles(a: Vec<u8>) -> Vec<u8>; fn assert_ethclient_hash( block_number: u64, expected_block_hash: H256, ) -> bool; fn verify_log_entry( log_index: u64, log_entry_data: Vec<u8>, receipt_index: u64, receipt_data: Vec<u8>, header_data: Vec<u8>, proof: Vec<Vec<u8>>, ) -> bool; fn verify_trie_proof( expected_root: H256, key: Vec<u8>, proof: Vec<Vec<u8>>, expected_value: Vec<u8>, ) -> bool; fn _verify_trie_proof( expected_root: H256, key: Vec<u8>, proof: Vec<Vec<u8>>, key_index: usize, proof_index: usize, expected_value: Vec<u8>, ) -> bool; } impl<T: Config> Prover for Module<T> { fn extract_nibbles(a: Vec<u8>) -> Vec<u8> { a.iter().flat_map(|b| vec![b >> 4, b & 0x0F]).collect() } fn concat_nibbles(a: Vec<u8>) -> Vec<u8> { a.iter() .enumerate() .filter(|(i, _)| i % 2 == 0) .zip(a.iter().enumerate().filter(|(i, _)| i % 2 == 1)) .map(|((_, x), (_, y))| (x << 4) | y) .collect() } fn assert_ethclient_hash( block_number: u64, expected_block_hash: H256, ) -> bool { match Self::block_hash_safe(block_number) { Some(hash) => hash == expected_block_hash, None => false, } } fn verify_log_entry( log_index: u64, log_entry_data: Vec<u8>, receipt_index: u64, receipt_data: Vec<u8>, header_data: Vec<u8>, proof: Vec<Vec<u8>>, ) -> bool { let log_entry: ethereum::Log = rlp::decode(log_entry_data.as_slice()).unwrap(); let receipt: ethereum::Receipt = rlp::decode(receipt_data.as_slice()).unwrap(); let header: ethereum::Header = rlp::decode(header_data.as_slice()).unwrap(); if receipt.logs[log_index as usize] == log_entry { return false; } let verification_result = Self::verify_trie_proof( header.receipts_root, rlp::encode(&receipt_index), proof, receipt_data, ); if !verification_result { return false; } Self::assert_ethclient_hash(header.number.as_u64(), header.hash()) } fn verify_trie_proof( expected_root: H256, key: Vec<u8>, proof: Vec<Vec<u8>>, expected_value: Vec<u8>, ) -> bool { let mut actual_key = vec![]; for el in key { if actual_key.len() + 1 == proof.len() { actual_key.push(el); } else { actual_key.push(el / 16); actual_key.push(el % 16); } } Self::_verify_trie_proof( expected_root, actual_key, proof, 0, 0, expected_value, ) } fn _verify_trie_proof( expected_root: H256, key: Vec<u8>, proof: Vec<Vec<u8>>, key_index: usize, proof_index: usize, expected_value: Vec<u8>, ) -> bool { let node = &proof[proof_index]; let dec = Rlp::new(&node.as_slice()); if key_index == 0 { if sp_io::hashing::keccak_256(node) != expected_root.0 { return false; } } else if node.len() < 32 { if dec.as_raw() != expected_root.0 { return false; } } else { if sp_io::hashing::keccak_256(node) != expected_root.0 { return false; } } if dec.iter().count() == 17 { if key_index == key.len() { if dec .at(dec.iter().count() - 1) .unwrap() .as_val::<Vec<u8>>() .unwrap() == expected_value { return true; } } else if key_index < key.len() { let new_expected_root = dec .at(key[key_index] as usize) .unwrap() .as_val::<Vec<u8>>() .unwrap(); let mut trunc_expected_root: [u8; 32] = [0; 32]; for i in 0..new_expected_root.len() { if i == 32 { break; } trunc_expected_root[i] = new_expected_root[i]; } if new_expected_root.len() != 0 { return Self::_verify_trie_proof( trunc_expected_root.into(), key, proof, key_index + 1, proof_index + 1, expected_value, ); } } else { panic!("This should not be reached if the proof has the correct format"); } } else if dec.iter().count() == 2 { let nibbles = Self::extract_nibbles( dec.at(0).unwrap().as_val::<Vec<u8>>().unwrap(), ); let (prefix, nibble) = (nibbles[0], nibbles[1]); if prefix == 2 { let key_end = &nibbles[2..]; if Self::concat_nibbles(key_end.to_vec()) == &key[key_index..] && expected_value == dec.at(1).unwrap().as_val::<Vec<u8>>().unwrap() { return true; } } else if prefix == 3 { let key_end = &nibbles[2..]; if nibble == key[key_index] && Self::concat_nibbles(key_end.to_vec()) == &key[key_index + 1..] && expected_value == dec.at(1).unwrap().as_val::<Vec<u8>>().unwrap() { return true; } } else if prefix == 0 { let shared_nibbles = &nibbles[2..]; let extension_length = shared_nibbles.len(); if Self::concat_nibbles(shared_nibbles.to_vec()) == &key[key_index..key_index + extension_length] { let new_expected_root = dec.at(1).unwrap().as_val::<Vec<u8>>().unwrap(); let mut trunc_expected_root: [u8; 32] = [0; 32]; for i in 0..new_expected_root.len() { if i == 32 { break; } trunc_expected_root[i] = new_expected_root[i]; } return Self::_verify_trie_proof( trunc_expected_root.into(), key, proof, key_index + extension_length, proof_index + 1, expected_value, ); } } else if prefix == 1 { let shared_nibbles = &nibbles[2..]; let extension_length = 1 + shared_nibbles.len(); if nibble == key[key_index] && Self::concat_nibbles(shared_nibbles.to_vec()) == &key[key_index + 1..key_index + extension_length] { let new_expected_root = dec.at(1).unwrap().as_val::<Vec<u8>>().unwrap(); let mut trunc_expected_root: [u8; 32] = [0; 32]; for i in 0..new_expected_root.len() { if i == 32 { break; } trunc_expected_root[i] = new_expected_root[i]; } return Self::_verify_trie_proof( trunc_expected_root.into(), key, proof, key_index + extension_length, proof_index + 1, expected_value, ); } } else { panic!("This should not be reached if the proof has the correct format"); } } else { panic!("This should not be reached if the proof has the correct format"); } expected_value.len() == 0 } }
#![cfg_attr(not(feature = "std"), no_std)] use crate::*; use rlp::Rlp; pub trait Prover { fn extract_nibbles(a: Vec<u8>) -> Vec<u8>; fn concat_nibbles(a: Vec<u8>) -> Vec<u8>; fn assert_ethclient_hash( block_number: u64, expected_block_hash: H256, ) -> bool; fn verify_log_entry( log_index: u64, log_entry_data: Vec<u8>, receipt_index: u64, receipt_data: Vec<u8>, header_data: Vec<u8>, proof: Vec<Vec<u8>>, ) -> bool; fn verify_trie_proof( expected_root: H256, key: Vec<u8>, proof: Vec<Vec<u8>>, expected_value: Vec<u8>, ) -> bool; fn _verify_trie_proof( expected_root: H256, key: Vec<u8>, proof: Vec<Vec<u8>>, key_index: usize, proof_index: usize, expected_value: Vec<u8>, ) -> bool; } impl<T: Config> Prover for Module<T> { fn extract_nibbles(a: Vec<u8>) -> Vec<u8> { a.iter().flat_map(|b| vec![b >> 4, b & 0x0F]).collect() } fn concat_nibbles(a: Vec<u8>) -> Vec<u8> { a.ite
fn assert_ethclient_hash( block_number: u64, expected_block_hash: H256, ) -> bool { match Self::block_hash_safe(block_number) { Some(hash) => hash == expected_block_hash, None => false, } } fn verify_log_entry( log_index: u64, log_entry_data: Vec<u8>, receipt_index: u64, receipt_data: Vec<u8>, header_data: Vec<u8>, proof: Vec<Vec<u8>>, ) -> bool { let log_entry: ethereum::Log = rlp::decode(log_entry_data.as_slice()).unwrap(); let receipt: ethereum::Receipt = rlp::decode(receipt_data.as_slice()).unwrap(); let header: ethereum::Header = rlp::decode(header_data.as_slice()).unwrap(); if receipt.logs[log_index as usize] == log_entry { return false; } let verification_result = Self::verify_trie_proof( header.receipts_root, rlp::encode(&receipt_index), proof, receipt_data, ); if !verification_result { return false; } Self::assert_ethclient_hash(header.number.as_u64(), header.hash()) } fn verify_trie_proof( expected_root: H256, key: Vec<u8>, proof: Vec<Vec<u8>>, expected_value: Vec<u8>, ) -> bool { let mut actual_key = vec![]; for el in key { if actual_key.len() + 1 == proof.len() { actual_key.push(el); } else { actual_key.push(el / 16); actual_key.push(el % 16); } } Self::_verify_trie_proof( expected_root, actual_key, proof, 0, 0, expected_value, ) } fn _verify_trie_proof( expected_root: H256, key: Vec<u8>, proof: Vec<Vec<u8>>, key_index: usize, proof_index: usize, expected_value: Vec<u8>, ) -> bool { let node = &proof[proof_index]; let dec = Rlp::new(&node.as_slice()); if key_index == 0 { if sp_io::hashing::keccak_256(node) != expected_root.0 { return false; } } else if node.len() < 32 { if dec.as_raw() != expected_root.0 { return false; } } else { if sp_io::hashing::keccak_256(node) != expected_root.0 { return false; } } if dec.iter().count() == 17 { if key_index == key.len() { if dec .at(dec.iter().count() - 1) .unwrap() .as_val::<Vec<u8>>() .unwrap() == expected_value { return true; } } else if key_index < key.len() { let new_expected_root = dec .at(key[key_index] as usize) .unwrap() .as_val::<Vec<u8>>() .unwrap(); let mut trunc_expected_root: [u8; 32] = [0; 32]; for i in 0..new_expected_root.len() { if i == 32 { break; } trunc_expected_root[i] = new_expected_root[i]; } if new_expected_root.len() != 0 { return Self::_verify_trie_proof( trunc_expected_root.into(), key, proof, key_index + 1, proof_index + 1, expected_value, ); } } else { panic!("This should not be reached if the proof has the correct format"); } } else if dec.iter().count() == 2 { let nibbles = Self::extract_nibbles( dec.at(0).unwrap().as_val::<Vec<u8>>().unwrap(), ); let (prefix, nibble) = (nibbles[0], nibbles[1]); if prefix == 2 { let key_end = &nibbles[2..]; if Self::concat_nibbles(key_end.to_vec()) == &key[key_index..] && expected_value == dec.at(1).unwrap().as_val::<Vec<u8>>().unwrap() { return true; } } else if prefix == 3 { let key_end = &nibbles[2..]; if nibble == key[key_index] && Self::concat_nibbles(key_end.to_vec()) == &key[key_index + 1..] && expected_value == dec.at(1).unwrap().as_val::<Vec<u8>>().unwrap() { return true; } } else if prefix == 0 { let shared_nibbles = &nibbles[2..]; let extension_length = shared_nibbles.len(); if Self::concat_nibbles(shared_nibbles.to_vec()) == &key[key_index..key_index + extension_length] { let new_expected_root = dec.at(1).unwrap().as_val::<Vec<u8>>().unwrap(); let mut trunc_expected_root: [u8; 32] = [0; 32]; for i in 0..new_expected_root.len() { if i == 32 { break; } trunc_expected_root[i] = new_expected_root[i]; } return Self::_verify_trie_proof( trunc_expected_root.into(), key, proof, key_index + extension_length, proof_index + 1, expected_value, ); } } else if prefix == 1 { let shared_nibbles = &nibbles[2..]; let extension_length = 1 + shared_nibbles.len(); if nibble == key[key_index] && Self::concat_nibbles(shared_nibbles.to_vec()) == &key[key_index + 1..key_index + extension_length] { let new_expected_root = dec.at(1).unwrap().as_val::<Vec<u8>>().unwrap(); let mut trunc_expected_root: [u8; 32] = [0; 32]; for i in 0..new_expected_root.len() { if i == 32 { break; } trunc_expected_root[i] = new_expected_root[i]; } return Self::_verify_trie_proof( trunc_expected_root.into(), key, proof, key_index + extension_length, proof_index + 1, expected_value, ); } } else { panic!("This should not be reached if the proof has the correct format"); } } else { panic!("This should not be reached if the proof has the correct format"); } expected_value.len() == 0 } }
r() .enumerate() .filter(|(i, _)| i % 2 == 0) .zip(a.iter().enumerate().filter(|(i, _)| i % 2 == 1)) .map(|((_, x), (_, y))| (x << 4) | y) .collect() }
function_block-function_prefixed
[ { "content": "/// This pallet's configuration trait\n\npub trait Config: system::Config + CreateSignedTransaction<Call<Self>> {\n\n /// The identifier type for an offchain worker.\n\n type AuthorityId: AppCrypto<Self::Public, Self::Signature>;\n\n\n\n /// The overarching event type.\n\n type Event: From<Event<Self>> + Into<<Self as frame_system::Config>::Event>;\n\n /// The overarching dispatch call type.\n\n type Call: From<Call<Self>>;\n\n}\n\n\n\n/// Minimal information about a header.\n\n#[derive(Clone, Encode, Decode)]\n\npub struct HeaderInfo {\n\n pub total_difficulty: U256,\n\n pub parent_hash: H256,\n\n pub number: U256,\n\n}\n\n\n\n#[derive(Encode, Decode)]\n\npub struct RpcUrl {\n", "file_path": "pallets/worker/src/lib.rs", "rank": 1, "score": 127943.03720750098 }, { "content": "pub fn development_config() -> Result<ChainSpec, String> {\n\n let wasm_binary = WASM_BINARY\n\n .ok_or(\"Development wasm binary not available\".to_string())?;\n\n\n\n Ok(ChainSpec::from_genesis(\n\n // Name\n\n \"Development\",\n\n // ID\n\n \"dev\",\n\n ChainType::Development,\n\n move || {\n\n testnet_genesis(\n\n wasm_binary,\n\n // Initial PoA authorities\n\n vec![authority_keys_from_seed(\"Alice\")],\n\n // Sudo account\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n // Pre-funded accounts\n\n vec![\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n", "file_path": "node/src/chain_spec.rs", "rank": 2, "score": 98560.97315508136 }, { "content": "pub fn local_testnet_config() -> Result<ChainSpec, String> {\n\n let wasm_binary = WASM_BINARY\n\n .ok_or(\"Development wasm binary not available\".to_string())?;\n\n\n\n Ok(ChainSpec::from_genesis(\n\n // Name\n\n \"Local Testnet\",\n\n // ID\n\n \"local_testnet\",\n\n ChainType::Local,\n\n move || {\n\n testnet_genesis(\n\n wasm_binary,\n\n // Initial PoA authorities\n\n vec![\n\n authority_keys_from_seed(\"Alice\"),\n\n authority_keys_from_seed(\"Bob\"),\n\n ],\n\n // Sudo account\n\n get_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n", "file_path": "node/src/chain_spec.rs", "rank": 3, "score": 96025.5062038051 }, { "content": "fn hex_to_h256(v: String) -> H256 {\n\n let s = &mut v[2..].as_bytes().to_vec();\n\n if s.len() % 2 != 0 {\n\n s.push(b'0');\n\n }\n\n let b = hex::decode(&s).unwrap();\n\n H256::from_slice(&b)\n\n}\n\n\n", "file_path": "pallets/worker/src/types.rs", "rank": 4, "score": 95671.36361787362 }, { "content": "/// Builds a new service for a full client.\n\npub fn new_full(\n\n mut config: Configuration,\n\n) -> Result<TaskManager, ServiceError> {\n\n let sc_service::PartialComponents {\n\n client,\n\n backend,\n\n mut task_manager,\n\n import_queue,\n\n mut keystore_container,\n\n select_chain,\n\n transaction_pool,\n\n inherent_data_providers,\n\n other: (block_import, grandpa_link),\n\n } = new_partial(&config)?;\n\n\n\n if let Some(url) = &config.keystore_remote {\n\n match remote_keystore(url) {\n\n Ok(k) => keystore_container.set_remote_keystore(k),\n\n Err(e) => {\n\n return Err(ServiceError::Other(format!(\n", "file_path": "node/src/service.rs", "rank": 5, "score": 94120.49773286903 }, { "content": "pub fn new_partial(\n\n config: &Configuration,\n\n) -> Result<\n\n sc_service::PartialComponents<\n\n FullClient,\n\n FullBackend,\n\n FullSelectChain,\n\n sp_consensus::DefaultImportQueue<Block, FullClient>,\n\n sc_transaction_pool::FullPool<Block, FullClient>,\n\n (\n\n sc_consensus_aura::AuraBlockImport<\n\n Block,\n\n FullClient,\n\n sc_finality_grandpa::GrandpaBlockImport<\n\n FullBackend,\n\n Block,\n\n FullClient,\n\n FullSelectChain,\n\n >,\n\n AuraPair,\n", "file_path": "node/src/service.rs", "rank": 6, "score": 94120.49773286903 }, { "content": "/// Builds a new service for a light client.\n\npub fn new_light(\n\n mut config: Configuration,\n\n) -> Result<TaskManager, ServiceError> {\n\n let (client, backend, keystore_container, mut task_manager, on_demand) =\n\n sc_service::new_light_parts::<Block, RuntimeApi, Executor>(&config)?;\n\n\n\n config\n\n .network\n\n .notifications_protocols\n\n .push(sc_finality_grandpa::GRANDPA_PROTOCOL_NAME.into());\n\n\n\n let select_chain = sc_consensus::LongestChain::new(backend.clone());\n\n\n\n let transaction_pool = Arc::new(sc_transaction_pool::BasicPool::new_light(\n\n config.transaction_pool.clone(),\n\n config.prometheus_registry(),\n\n task_manager.spawn_handle(),\n\n client.clone(),\n\n on_demand.clone(),\n\n ));\n", "file_path": "node/src/service.rs", "rank": 7, "score": 94120.49773286903 }, { "content": "/// Generate an Aura authority key.\n\npub fn authority_keys_from_seed(s: &str) -> (AuraId, GrandpaId) {\n\n (get_from_seed::<AuraId>(s), get_from_seed::<GrandpaId>(s))\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 8, "score": 91775.4921905963 }, { "content": "fn write_file() -> std::result::Result<(), std::io::Error> {\n\n let mut file = File::create(\"elts.txt\")?;\n\n for i in read_roots_collection().dag_merkle_roots.iter() {\n\n // println!(\"{:?}\", i.as_bytes());\n\n write!(file, \"{:?}\", i.as_bytes())?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 9, "score": 88814.89618525578 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn native_version() -> NativeVersion {\n\n NativeVersion {\n\n runtime_version: VERSION,\n\n can_author_with: Default::default(),\n\n }\n\n}\n\n\n\nconst NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(75);\n\n\n\nparameter_types! {\n\n pub const Version: RuntimeVersion = VERSION;\n\n pub const BlockHashCount: BlockNumber = 2400;\n\n /// We allow for 2 seconds of compute with a 6 second average block time.\n\n pub BlockWeights: frame_system::limits::BlockWeights = frame_system::limits::BlockWeights\n\n ::with_sensible_defaults(2 * WEIGHT_PER_SECOND, NORMAL_DISPATCH_RATIO);\n\n pub BlockLength: frame_system::limits::BlockLength = frame_system::limits::BlockLength\n\n ::max_with_normal_ratio(5 * 1024 * 1024, NORMAL_DISPATCH_RATIO);\n\n}\n\n\n\n// Configure FRAME pallets to include in runtime.\n", "file_path": "runtime/src/lib.rs", "rank": 10, "score": 86221.96746381887 }, { "content": "pub trait BlockFetcher: Decode + Encode {\n\n type Error: fmt::Debug;\n\n\n\n fn fetch_latest(&self) -> Result<BlockHeader, Self::Error>;\n\n fn fetch_one(&self, block_number: u64) -> Result<BlockHeader, Self::Error>;\n\n fn fetch_many(\n\n &self,\n\n block_numbers: ops::RangeInclusive<u64>,\n\n ) -> Result<Vec<BlockHeader>, Self::Error>;\n\n}\n\n\n\n#[derive(Debug, Clone, Encode, Decode)]\n\npub struct BlockQueue<F: BlockFetcher> {\n\n inner: VecDeque<BlockHeader>,\n\n last_seen_block_number: Option<u64>,\n\n block_fetcher: F,\n\n}\n\n\n\nimpl<F: BlockFetcher> BlockQueue<F> {\n\n pub fn with_fetcher(block_fetcher: F) -> Self {\n", "file_path": "pallets/worker/src/blocks_queue.rs", "rank": 11, "score": 85358.39179025886 }, { "content": "// roots from rainbow bridge\n\npub fn roots() -> Vec<H128> {\n\n return vec![\n\n [\n\n 85, 184, 145, 232, 66, 229, 143, 88, 149, 106, 132, 124, 187, 246,\n\n 120, 33,\n\n ]\n\n .into(),\n\n [\n\n 251, 160, 58, 61, 25, 2, 185, 37, 110, 190, 145, 119, 208, 50, 66,\n\n 254,\n\n ]\n\n .into(),\n\n [\n\n 43, 24, 109, 198, 91, 147, 190, 113, 120, 14, 81, 148, 253, 68,\n\n 252, 112,\n\n ]\n\n .into(),\n\n [\n\n 148, 192, 83, 45, 73, 82, 60, 217, 48, 144, 87, 168, 71, 239, 13,\n\n 189,\n", "file_path": "pallets/worker/src/roots.rs", "rank": 12, "score": 83651.03789084006 }, { "content": "/// Instantiate all full RPC extensions.\n\npub fn create_full<C, P>(\n\n deps: FullDeps<C, P>,\n\n) -> jsonrpc_core::IoHandler<sc_rpc::Metadata>\n\nwhere\n\n C: ProvideRuntimeApi<Block>,\n\n C: HeaderBackend<Block>\n\n + HeaderMetadata<Block, Error = BlockChainError>\n\n + 'static,\n\n C: Send + Sync + 'static,\n\n C::Api:\n\n substrate_frame_rpc_system::AccountNonceApi<Block, AccountId, Index>,\n\n C::Api: pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<\n\n Block,\n\n Balance,\n\n >,\n\n C::Api: BlockBuilder<Block>,\n\n P: TransactionPool + 'static,\n\n{\n\n use pallet_transaction_payment_rpc::{\n\n TransactionPayment, TransactionPaymentApi,\n", "file_path": "node/src/rpc.rs", "rank": 13, "score": 83651.03789084006 }, { "content": "/// Parse and run command line arguments\n\npub fn run() -> sc_cli::Result<()> {\n\n let cli = Cli::from_args();\n\n\n\n match &cli.subcommand {\n\n Some(Subcommand::BuildSpec(cmd)) => {\n\n let runner = cli.create_runner(cmd)?;\n\n runner.sync_run(|config| cmd.run(config.chain_spec, config.network))\n\n },\n\n Some(Subcommand::CheckBlock(cmd)) => {\n\n let runner = cli.create_runner(cmd)?;\n\n runner.async_run(|config| {\n\n let PartialComponents {\n\n client,\n\n task_manager,\n\n import_queue,\n\n ..\n\n } = service::new_partial(&config)?;\n\n Ok((cmd.run(client, import_queue), task_manager))\n\n })\n\n },\n", "file_path": "node/src/command.rs", "rank": 14, "score": 83651.03789084006 }, { "content": " trait Store for Module<T: Config> as WorkerModule {\n\n pub ValidateETHash get(fn validate_ethash): bool;\n\n /// The epoch from which the DAG merkle roots start.\n\n pub DAGsStartEpoch get(fn dags_start_epoch): Option<u64>;\n\n /// DAG merkle roots for the next several years.\n\n pub DAGsMerkleRoots get(fn dags_merkle_roots): Vec<H128>;\n\n /// Hash of the header that has the highest cumulative difficulty. The current head of the\n\n /// canonical chain.\n\n pub BestHeaderHash get(fn best_header_hash): H256;\n\n /// We store the hashes of the blocks for the past `hashes_gc_threshold` headers.\n\n /// Events that happen past this threshold cannot be verified by the client.\n\n /// It is desirable that this number is larger than 7 days worth of headers, which is roughly\n\n /// 40k Ethereum blocks. So this number should be 40k in production.\n\n pub HashesGCThreshold get(fn hashes_gc_threshold): Option<U256>;\n\n /// We store full information about the headers for the past `finalized_gc_threshold` blocks.\n\n /// This is required to be able to adjust the canonical chain when the fork switch happens.\n\n /// The commonly used number is 500 blocks, so this number should be 500 in production.\n\n pub FinalizedGCThreshold get(fn finalized_gc_threshold): Option<U256>;\n\n /// Number of confirmations that applications can use to consider the transaction safe.\n\n /// For most use cases 25 should be enough, for super safe cases it should be 500.\n", "file_path": "pallets/worker/src/lib.rs", "rank": 15, "score": 79895.68537839266 }, { "content": "/// Generate a crypto pair from seed.\n\npub fn get_from_seed<TPublic: Public>(\n\n seed: &str,\n\n) -> <TPublic::Pair as Pair>::Public {\n\n TPublic::Pair::from_string(&format!(\"//{}\", seed), None)\n\n .expect(\"static values are valid; qed\")\n\n .public()\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 16, "score": 79541.4965848957 }, { "content": "pub fn sha256(data: &[u8]) -> [u8; 32] {\n\n let mut buffer = [0u8; 32];\n\n buffer.copy_from_slice(&sha2_256(data));\n\n buffer\n\n}\n\n\n\n#[derive(RuntimeDebug, Default, Clone, Encode, Decode, PartialEq)]\n\npub struct DoubleNodeWithMerkleProof {\n\n pub dag_nodes: [H512; 2],\n\n pub proof: Vec<H128>,\n\n}\n\n\n\nimpl DoubleNodeWithMerkleProof {\n\n pub fn new() -> Self {\n\n Self {\n\n dag_nodes: [H512::from([0; 64]); 2],\n\n proof: vec![],\n\n }\n\n }\n\n\n", "file_path": "pallets/worker/src/types.rs", "rank": 17, "score": 77369.42058850851 }, { "content": "fn generate_proofs(\n\n header: &BlockHeader,\n\n) -> Option<Vec<types::DoubleNodeWithMerkleProof>> {\n\n let rlp_header = rlp::encode(header);\n\n let rlp_hex = hex::encode(rlp_header);\n\n let payload = types::ProofsPayload { rlp: rlp_hex };\n\n let body = serde_json::to_vec(&payload);\n\n let request = match http::Request::post(\n\n \"http://127.0.0.1:3000/proofs\",\n\n body,\n\n )\n\n .send()\n\n {\n\n Ok(handle) => handle,\n\n Err(e) => {\n\n debug::native::error!(\"http request failed: {:?}\", e);\n\n return None;\n\n },\n\n };\n\n let response = match request.wait() {\n", "file_path": "pallets/worker/src/lib.rs", "rank": 18, "score": 74579.25516163948 }, { "content": "/// Generate an account ID from seed.\n\npub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId\n\nwhere\n\n AccountPublic: From<<TPublic::Pair as Pair>::Public>,\n\n{\n\n AccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 19, "score": 64550.87330247503 }, { "content": "fn read_block(filename: String) -> BlockWithProofs {\n\n read_block_raw(filename).into()\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 20, "score": 61557.34048264695 }, { "content": "fn read_block_raw(filename: String) -> BlockWithProofsRaw {\n\n serde_json::from_reader(\n\n std::fs::File::open(std::path::Path::new(&filename)).unwrap(),\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 21, "score": 58044.981319313214 }, { "content": "fn main() {\n\n generate_cargo_keys();\n\n\n\n rerun_if_git_head_changed();\n\n}\n", "file_path": "node/build.rs", "rank": 22, "score": 50714.60677777328 }, { "content": "fn main() {\n\n WasmBuilder::new()\n\n .with_current_project()\n\n .export_heap_base()\n\n .import_memory()\n\n .build()\n\n}\n", "file_path": "runtime/build.rs", "rank": 23, "score": 50714.60677777328 }, { "content": "#[test]\n\nfn should_init() {\n\n let (offchain, _state) = testing::TestOffchainExt::new();\n\n let mut t = sp_io::TestExternalities::default();\n\n t.register_extension(OffchainExt::new(offchain));\n\n t.execute_with(|| {\n\n let (blocks, _) = get_blocks(&WEB3RS, 400_000, 400_001);\n\n let pair = sp_core::sr25519::Pair::from_seed(\n\n b\"12345678901234567890123456789012\",\n\n );\n\n let dmr = read_roots_collection();\n\n assert_ok!(Example::init(\n\n Origin::signed(pair.public()),\n\n 0,\n\n read_roots_collection().dag_merkle_roots,\n\n rlp::decode(&blocks[0]).unwrap(),\n\n U256::from(30),\n\n U256::from(10),\n\n U256::from(10),\n\n None,\n\n ));\n\n\n\n assert_eq!(dmr.dag_merkle_roots[0], Example::dag_merkle_root(0));\n\n assert_eq!(dmr.dag_merkle_roots[10], Example::dag_merkle_root(10));\n\n assert_eq!(dmr.dag_merkle_roots[511], Example::dag_merkle_root(511));\n\n\n\n let result = catch_unwind_silent(|| Example::dag_merkle_root(512));\n\n assert!(result.is_err());\n\n });\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 24, "score": 47455.97857460521 }, { "content": "fn get_blocks(\n\n web3rust: &web3::Web3<web3::transports::Http>,\n\n start: usize,\n\n stop: usize,\n\n) -> (Vec<Vec<u8>>, Vec<H256>) {\n\n let futures = (start..stop)\n\n .map(|i| web3rust.eth().block((i as u64).into()))\n\n .collect::<Vec<_>>();\n\n\n\n let block_headers = join_all(futures).wait().unwrap();\n\n\n\n let mut blocks: Vec<Vec<u8>> = vec![];\n\n let mut hashes: Vec<H256> = vec![];\n\n for block_header in block_headers {\n\n let mut stream = RlpStream::new();\n\n rlp_append(&block_header.clone().unwrap(), &mut stream);\n\n blocks.push(stream.out());\n\n hashes.push(H256(block_header.clone().unwrap().hash.unwrap().0));\n\n }\n\n\n\n (blocks, hashes)\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 25, "score": 46058.61987199425 }, { "content": "#[test]\n\nfn add_blocks_2_and_3() {\n\n let (offchain, _state) = testing::TestOffchainExt::new();\n\n let mut t = sp_io::TestExternalities::default();\n\n t.register_extension(OffchainExt::new(offchain));\n\n t.execute_with(|| {\n\n let pair = sp_core::sr25519::Pair::from_seed(\n\n b\"12345678901234567890123456789012\",\n\n );\n\n // Check on 3 block from here: https://github.com/KyberNetwork/bridge_eos_smart_contracts/blob/master/scripts/jungle/jungle_relay_3.js\n\n let (blocks, hashes) = get_blocks(&WEB3RS, 2, 4);\n\n\n\n // $ ../ethrelay/ethashproof/cmd/relayer/relayer 3\n\n let blocks_with_proofs: Vec<BlockWithProofs> =\n\n [\"./data/2.json\", \"./data/3.json\"]\n\n .iter()\n\n .map(|filename| read_block((&filename).to_string()))\n\n .collect();\n\n\n\n assert_ok!(Example::init(\n\n Origin::signed(pair.public()),\n", "file_path": "pallets/worker/src/tests.rs", "rank": 26, "score": 46058.61987199425 }, { "content": "#[test]\n\nfn add_400000_block_only() {\n\n let (offchain, _state) = testing::TestOffchainExt::new();\n\n let mut t = sp_io::TestExternalities::default();\n\n t.register_extension(OffchainExt::new(offchain));\n\n t.execute_with(|| {\n\n let pair = sp_core::sr25519::Pair::from_seed(\n\n b\"12345678901234567890123456789012\",\n\n );\n\n\n\n // Check on 400000 block from this answer: https://ethereum.stackexchange.com/a/67333/3032\n\n let (blocks, hashes) = get_blocks(&WEB3RS, 400_000, 400_001);\n\n\n\n // $ ../ethrelay/ethashproof/cmd/relayer/relayer 400000\n\n // digest: 0x3fbea7af642a4e20cd93a945a1f5e23bd72fc5261153e09102cf718980aeff38\n\n // ethash result:\n\n // 0x00000000000ca599ebe9913fa00da78a4d1dd2fa154c4fd2aad10ccbca52a2a1\n\n // Proof length: 24\n\n // [400000.json]\n\n\n\n let block_with_proof = read_block(\"./data/400000.json\".to_string());\n", "file_path": "pallets/worker/src/tests.rs", "rank": 27, "score": 46058.61987199425 }, { "content": "/// Configure initial storage state for FRAME modules.\n\nfn testnet_genesis(\n\n wasm_binary: &[u8],\n\n initial_authorities: Vec<(AuraId, GrandpaId)>,\n\n root_key: AccountId,\n\n endowed_accounts: Vec<AccountId>,\n\n _enable_println: bool,\n\n) -> GenesisConfig {\n\n GenesisConfig {\n\n frame_system: Some(SystemConfig {\n\n // Add Wasm runtime to storage.\n\n code: wasm_binary.to_vec(),\n\n changes_trie_config: Default::default(),\n\n }),\n\n pallet_balances: Some(BalancesConfig {\n\n // Configure endowed accounts with initial balance of 1 << 60.\n\n balances: endowed_accounts\n\n .iter()\n\n .cloned()\n\n .map(|k| (k, 1 << 60))\n\n .collect(),\n", "file_path": "node/src/chain_spec.rs", "rank": 28, "score": 46058.61987199425 }, { "content": "#[test]\n\nfn add_2_blocks_from_400000() {\n\n let (offchain, _state) = testing::TestOffchainExt::new();\n\n let mut t = sp_io::TestExternalities::default();\n\n t.register_extension(OffchainExt::new(offchain));\n\n t.execute_with(|| {\n\n let pair = sp_core::sr25519::Pair::from_seed(\n\n b\"12345678901234567890123456789012\",\n\n );\n\n\n\n // Check on 400000 block from this answer: https://ethereum.stackexchange.com/a/67333/3032\n\n let (blocks, hashes) = get_blocks(&WEB3RS, 400_000, 400_002);\n\n\n\n // $ ../ethrelay/ethashproof/cmd/relayer/relayer 400001\n\n // digest: 0x3fbea7af642a4e20cd93a945a1f5e23bd72fc5261153e09102cf718980aeff38\n\n // ethash result:\n\n // 0x00000000000ca599ebe9913fa00da78a4d1dd2fa154c4fd2aad10ccbca52a2a1\n\n // Proof length: 24\n\n // [400001.json]\n\n\n\n let blocks_with_proofs: Vec<BlockWithProofs> =\n", "file_path": "pallets/worker/src/tests.rs", "rank": 29, "score": 46058.61987199425 }, { "content": "#[test]\n\nfn add_two_blocks_from_8996776() {\n\n let (offchain, _state) = testing::TestOffchainExt::new();\n\n let mut t = sp_io::TestExternalities::default();\n\n t.register_extension(OffchainExt::new(offchain));\n\n t.execute_with(|| {\n\n let pair = sp_core::sr25519::Pair::from_seed(\n\n b\"12345678901234567890123456789012\",\n\n );\n\n // Check on 8996777 block from this test: https://github.com/sorpaas/rust-ethash/blob/ac6e42bcb7f40ad2a3b89f7400a61f7baf3f0926/src/lib.rs#L318-L326\n\n let (blocks, hashes) = get_blocks(&WEB3RS, 8_996_776, 8_996_778);\n\n\n\n // $ ../ethrelay/ethashproof/cmd/relayer/relayer 8996777\n\n let blocks_with_proofs: Vec<BlockWithProofs> =\n\n [\"./data/8996776.json\", \"./data/8996777.json\"]\n\n .iter()\n\n .map(|filename| read_block((&filename).to_string()))\n\n .collect();\n\n\n\n assert_ok!(Example::init(\n\n Origin::signed(pair.public()),\n", "file_path": "pallets/worker/src/tests.rs", "rank": 30, "score": 44787.8749863495 }, { "content": "#[derive(Debug)]\n\nstruct Hex(pub Vec<u8>);\n\n\n\nimpl<'de> Deserialize<'de> for Hex {\n\n fn deserialize<D>(\n\n deserializer: D,\n\n ) -> Result<Self, <D as Deserializer<'de>>::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n let mut s = <String as Deserialize>::deserialize(deserializer)?;\n\n if s.starts_with(\"0x\") {\n\n s = s[2..].to_string();\n\n }\n\n if s.len() % 2 == 1 {\n\n s.insert_str(0, \"0\");\n\n }\n\n Ok(Hex(Vec::from_hex(&s).map_err(|err| {\n\n serde::de::Error::custom(err.to_string())\n\n })?))\n\n }\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 31, "score": 44020.82144699544 }, { "content": "#[test]\n\nfn add_block_2_rust_ethash() {\n\n let (offchain, _state) = testing::TestOffchainExt::new();\n\n let mut t = sp_io::TestExternalities::default();\n\n t.register_extension(OffchainExt::new(offchain));\n\n t.execute_with(|| {\n\n let pair = sp_core::sr25519::Pair::from_seed(\n\n b\"12345678901234567890123456789012\",\n\n );\n\n // Check on 3 block from here: https://github.com/KyberNetwork/bridge_eos_smart_contracts/blob/master/scripts/jungle/jungle_relay_3.js\n\n let (blocks, hashes) = get_blocks(&WEB3RS, 2, 4);\n\n\n\n // $ ../ethrelay/ethashproof/cmd/relayer/relayer 3\n\n let blocks_with_proofs: Vec<BlockWithProofs> =\n\n [\"./data/2_rust_ethash.json\", \"./data/3.json\"]\n\n .iter()\n\n .map(|filename| read_block((&filename).to_string()))\n\n .collect();\n\n\n\n assert_ok!(Example::init(\n\n Origin::signed(pair.public()),\n", "file_path": "pallets/worker/src/tests.rs", "rank": 32, "score": 43627.2811572777 }, { "content": "fn catch_unwind_silent<F: FnOnce() -> R + panic::UnwindSafe, R>(\n\n f: F,\n\n) -> std::thread::Result<R> {\n\n let prev_hook = panic::take_hook();\n\n panic::set_hook(Box::new(|_| {}));\n\n let result = panic::catch_unwind(f);\n\n panic::set_hook(prev_hook);\n\n result\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 33, "score": 41678.994184010226 }, { "content": "fn read_roots_collection() -> RootsCollection {\n\n read_roots_collection_raw().into()\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 34, "score": 40999.389199934296 }, { "content": "fn hex_to_h64(v: String) -> H64 {\n\n let s = &mut v[2..].as_bytes().to_vec();\n\n if s.len() % 2 != 0 {\n\n s.push(b'0');\n\n }\n\n let b = hex::decode(&s).unwrap();\n\n H64::from_slice(&b)\n\n}\n\n\n", "file_path": "pallets/worker/src/types.rs", "rank": 35, "score": 39630.21644384461 }, { "content": "fn hex_to_address(v: String) -> Address {\n\n let s = &mut v[2..].as_bytes().to_vec();\n\n if s.len() % 2 != 0 {\n\n s.push(b'0');\n\n }\n\n let b = hex::decode(&s).unwrap();\n\n Address::from_slice(&b)\n\n}\n\n\n\n#[derive(Debug, Clone, Encode, Decode, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct BlockHeader {\n\n pub parent_hash: H256,\n\n pub uncles_hash: H256,\n\n pub author: Address,\n\n pub state_root: H256,\n\n pub transactions_root: H256,\n\n pub receipts_root: H256,\n\n pub log_bloom: Bloom,\n\n pub difficulty: U256,\n\n pub number: U256,\n", "file_path": "pallets/worker/src/types.rs", "rank": 36, "score": 39630.21644384461 }, { "content": "fn hex_to_bloom(v: String) -> Bloom {\n\n let s = &mut v[2..].as_bytes().to_vec();\n\n if s.len() % 2 != 0 {\n\n s.push(b'0');\n\n }\n\n let b = hex::decode(&s).unwrap();\n\n Bloom::from_slice(&b)\n\n}\n\n\n", "file_path": "pallets/worker/src/types.rs", "rank": 37, "score": 39630.21644384461 }, { "content": "fn hex_to_u256(v: String) -> U256 {\n\n let s = &mut v[2..].as_bytes().to_vec();\n\n if s.len() % 2 != 0 {\n\n s.insert(0, b'0'); // big endian .. add to the first.\n\n }\n\n let b = hex::decode(&s).unwrap();\n\n U256::from_big_endian(b.as_slice())\n\n}\n\n\n", "file_path": "pallets/worker/src/types.rs", "rank": 38, "score": 39630.21644384461 }, { "content": "fn read_roots_collection_raw() -> RootsCollectionRaw {\n\n serde_json::from_reader(\n\n std::fs::File::open(std::path::Path::new(\n\n \"./data/dag_merkle_roots.json\",\n\n ))\n\n .unwrap(),\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 39, "score": 39115.94143605378 }, { "content": "fn main() -> sc_cli::Result<()> { command::run() }\n", "file_path": "node/src/main.rs", "rank": 40, "score": 37600.801170733364 }, { "content": "fn set_block_response(state: &mut testing::OffchainState) {\n\n let body = b\"{\\\"jsonrpc\\\":\\\"2.0\\\",\\\"method\\\":\\\"eth_getBlockByNumber\\\",\\\"params\\\":[\\\"latest\\\", false],\\\"id\\\":1}\";\n\n state.expect_request(testing::PendingRequest {\n\n\t\tmethod: \"POST\".into(),\n\n\t\turi: \"http://localhost:8545\".into(),\n\n\t\tbody: body.to_vec(),\n\n\t\tresponse: Some(br#\"{\n\n\t\t\t\"jsonrpc\":\"2.0\",\n\n\t\t\t\"id\":1,\n\n\t\t\t\"result\":{\n\n\t\t\t\t\"difficulty\": \"0x29d45538\",\n\n\t\t\t\t\"extraData\": \"0xdb830300018c4f70656e457468657265756d86312e34332e31826c69\",\n\n\t\t\t\t\"gasLimit\": \"0x7a121d\",\n\n\t\t\t\t\"gasUsed\": \"0xcb5e\",\n\n\t\t\t\t\"hash\": \"0xa03b310a4fa187d7aafe458323da848fe4e4ed610b0ca970818f8d76ff7acafc\",\n\n\t\t\t\t\"logsBloom\": \"0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400400000000000000000000000000000000020000001000008000000000000000000000000000000000000000000000020000000000000000000000000000000000000000000000110000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000100000000000000000000000000000000001000000000000000000000000\",\n\n\t\t\t\t\"miner\": \"0x05fc5a079e0583b8a07526023a16e2022c4c6296\",\n\n\t\t\t\t\"mixHash\": \"0xca855e662d1d628cdb218b1989386aceb1eab53eb4968fdf4220851db7f776a2\",\n\n\t\t\t\t\"nonce\": \"0x83e8ba4b86c92bee\",\n\n\t\t\t\t\"number\": \"0x88555f\",\n", "file_path": "pallets/worker/src/tests.rs", "rank": 41, "score": 34879.984264246596 }, { "content": "fn set_infura_block_response(state: &mut testing::OffchainState) {\n\n let body = b\"{\\\"jsonrpc\\\":\\\"2.0\\\",\\\"method\\\":\\\"eth_getBlockByNumber\\\",\\\"params\\\":[\\\"latest\\\", false],\\\"id\\\":1}\";\n\n state.expect_request(testing::PendingRequest {\n\n\t\tmethod: \"POST\".into(),\n\n\t\turi: \"https://mainnet.infura.io/v3/b5f870422ee5454fb11937e947154cd2\".into(),\n\n\t\tbody: body.to_vec(),\n\n\t\tresponse: Some(br#\"{\n\n\t\t\t\"jsonrpc\": \"2.0\",\n\n\t\t\t\"id\": 1,\n\n\t\t\t\"result\": {\n\n\t\t\t\t\"difficulty\": \"0xc3097478dc9f3\",\n\n\t\t\t\t\"extraData\": \"0x6574682d70726f2d687a6f2d74303035\",\n\n\t\t\t\t\"gasLimit\": \"0xbf0335\",\n\n\t\t\t\t\"gasUsed\": \"0xbed04a\",\n\n\t\t\t\t\"hash\": \"0x09d89a973040f671f1e33824d806634b625228faf87b5faa1f8f56b3d6af28e6\",\n\n\t\t\t\t\"logsBloom\": \"0xbabb737509801c83c436040080b59409e1850658c445477ceae58102843219836d42751410540241b40e9321004281d78a69d400ee2360dba21342955d6f0ea044a972061880a36f40cc2c8c72c502f40c04466209649c94ed12c09cc39994429b4a075c266add1af470098403944af04200d80e1e8c7d0c9245db780d10308218e2c08d4418a805a0c51d21da5186bc4808b0011e073029c1c3ab63675c0816077a351710e2999222a018bcb110800712d0c197250a08a1c0aba3c61af688f2f048cc1a60c3d04abc0c0c37130a30040bcc175b5486ae104048d01ab73c7c2e2154ec5db80484ca828259c805ee4c986006329b4920ea74005a1460295b0452\",\n\n\t\t\t\t\"miner\": \"0x5a0b54d5dc17e0aadc383d2db43b0a0d3e029c4c\",\n\n\t\t\t\t\"mixHash\": \"0xfcd66e3d064d1ebbc03eaa86101661566d33c7c534de005c21f6c89db55e7215\",\n\n\t\t\t\t\"nonce\": \"0xcf18e648d3ca4516\",\n\n\t\t\t\t\"number\": \"0xaacc3e\",\n", "file_path": "pallets/worker/src/tests.rs", "rank": 42, "score": 34102.27216316475 }, { "content": "fn remote_keystore(_url: &String) -> Result<Arc<LocalKeystore>, &'static str> {\n\n // FIXME: here would the concrete keystore be built,\n\n // must return a concrete type (NOT `LocalKeystore`) that\n\n // implements `CryptoStore` and `SyncCryptoStore`\n\n Err(\"Remote Keystore not supported.\")\n\n}\n\n\n", "file_path": "node/src/service.rs", "rank": 43, "score": 31867.767435304297 }, { "content": "// Wish to avoid this code and use web3+rlp libraries directly\n\nfn rlp_append<TX>(header: &Block<TX>, stream: &mut RlpStream) {\n\n stream.begin_list(16);\n\n stream.append(&header.parent_hash);\n\n stream.append(&header.uncles_hash);\n\n stream.append(&header.author);\n\n stream.append(&header.state_root);\n\n stream.append(&header.transactions_root);\n\n stream.append(&header.receipts_root);\n\n stream.append(&header.logs_bloom);\n\n stream.append(&header.difficulty);\n\n stream.append(&header.number.unwrap());\n\n stream.append(&header.gas_limit);\n\n stream.append(&header.gas_used);\n\n stream.append(&header.timestamp);\n\n stream.append(&header.extra_data.0);\n\n stream.append(&header.mix_hash.unwrap());\n\n stream.append(&header.nonce.unwrap());\n\n stream.append(&header.hash.unwrap());\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 58, "score": 31146.97071784171 }, { "content": "fn hex_to_bytes(v: &Vec<char>) -> Result<Vec<u8>, hex::FromHexError> {\n\n let mut vec = v.clone();\n\n\n\n // remove 0x prefix\n\n if vec.len() >= 2 && vec[0] == '0' && vec[1] == 'x' {\n\n vec.drain(0..2);\n\n }\n\n\n\n // add leading 0 if odd length\n\n if vec.len() % 2 != 0 {\n\n vec.insert(0, '0');\n\n }\n\n let vec_u8 = vec.iter().map(|c| *c as u8).collect::<Vec<u8>>();\n\n hex::decode(&vec_u8[..])\n\n}\n\n\n\nimpl<T: Config> Module<T> {\n\n pub fn initialized() -> bool { Self::dags_start_epoch().is_some() }\n\n\n\n pub fn dag_merkle_root(epoch: u64) -> H128 {\n", "file_path": "pallets/worker/src/lib.rs", "rank": 59, "score": 30559.61693182832 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct BlockWithProofsRaw {\n\n pub proof_length: u64,\n\n pub header_rlp: Hex,\n\n pub merkle_root: Hex, // H128\n\n pub elements: Vec<Hex>, // H256\n\n pub merkle_proofs: Vec<Hex>, // H128\n\n}\n\n\n\nimpl From<BlockWithProofsRaw> for BlockWithProofs {\n\n fn from(item: BlockWithProofsRaw) -> Self {\n\n let mut temp_merkle_root: [u8; 16] = [0; 16];\n\n for i in 0..16 {\n\n temp_merkle_root[i] = item.merkle_root.0[i];\n\n }\n\n Self {\n\n proof_length: item.proof_length,\n\n merkle_root: H128::from(temp_merkle_root),\n\n elements: item\n\n .elements\n\n .iter()\n", "file_path": "pallets/worker/src/tests.rs", "rank": 60, "score": 27228.516634078795 }, { "content": "use sp_consensus_aura::sr25519::AuthorityId as AuraId;\n\nuse sp_core::{crypto::KeyTypeId, OpaqueMetadata};\n\nuse sp_runtime::traits::{\n\n self, AccountIdLookup, BlakeTwo256, Block as BlockT, IdentifyAccount,\n\n NumberFor, Verify,\n\n};\n\nuse sp_runtime::{\n\n create_runtime_str, generic, impl_opaque_keys,\n\n transaction_validity::{TransactionSource, TransactionValidity},\n\n ApplyExtrinsicResult, MultiSignature,\n\n};\n\nuse sp_std::prelude::*;\n\n#[cfg(feature = \"std\")]\n\nuse sp_version::NativeVersion;\n\nuse sp_version::RuntimeVersion;\n\n\n\n// A few exports that help ease life for downstream crates.\n\npub use frame_support::{\n\n construct_runtime, debug, parameter_types,\n\n traits::{KeyOwnerProofSystem, Randomness},\n", "file_path": "runtime/src/lib.rs", "rank": 61, "score": 14.89723564951516 }, { "content": "}\n\n\n\n#[derive(Debug, serde::Deserialize)]\n\npub struct BlockWithProofsRaw {\n\n pub number: u64,\n\n pub proof_length: u64,\n\n pub merkle_root: String,\n\n pub elements: Vec<String>,\n\n pub merkle_proofs: Vec<String>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct BlockWithProofs {\n\n pub proof_length: u64,\n\n pub merkle_root: H128,\n\n pub elements: Vec<H256>,\n\n pub merkle_proofs: Vec<H128>,\n\n}\n\n\n\nimpl From<BlockWithProofsRaw> for BlockWithProofs {\n", "file_path": "pallets/worker/src/types.rs", "rank": 62, "score": 13.804133059457136 }, { "content": "use crate::types::*;\n\nuse crate::*;\n\nuse codec::{Decode, Encode};\n\nuse frame_support::{\n\n assert_ok, impl_outer_origin, parameter_types, weights::Weight,\n\n};\n\nuse sp_core::{\n\n offchain::{testing, OffchainExt},\n\n sr25519::Signature,\n\n H256,\n\n};\n\nuse std::fs::File;\n\nuse std::io::Write;\n\n\n\nuse sp_runtime::{\n\n testing::{Header, TestXt},\n\n traits::{\n\n BlakeTwo256, Extrinsic as ExtrinsicT, IdentifyAccount, IdentityLookup,\n\n Verify,\n\n },\n", "file_path": "pallets/worker/src/tests.rs", "rank": 63, "score": 13.434643310613541 }, { "content": "use crate::*;\n\nuse ethereum_types::{Address, Bloom, H160, H256, H64, U256};\n\nuse rlp::{Rlp, RlpStream};\n\nuse rlp_derive::{\n\n RlpDecodable as RlpDecodableDerive, RlpEncodable as RlpEncodableDerive,\n\n};\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nextern crate alloc;\n\n#[cfg(not(feature = \"std\"))]\n\nuse alloc::string::String;\n\n\n\nuse sp_runtime::RuntimeDebug;\n\n\n\n// TODO(shekohex) clean up the following code\n\n// add a trait for doing this work.\n\n\n", "file_path": "pallets/worker/src/types.rs", "rank": 64, "score": 13.179686810719495 }, { "content": "use ethereum_types::H128;\n\nuse sp_std::prelude::*;\n\n\n\n/// Holds Storage Keys constants.\n\n/// I don't like using a lot of Raw strings around.\n\n/// this helps avoid misspiling keys.\n\npub mod storage_keys {\n\n pub const BLOCKS_QUEUE: &[u8] = b\"light-client-worker::blocks_queue\";\n\n}\n\n\n\n// AUTO GENERATED using `python ./scripts/roots.py <ROOTS_FILE_TXT>`\n\npub const DAG_START_EPOCH: u64 = 389;\n\nlazy_static::lazy_static! {\n\n pub static ref ROOT_HASHES: Vec<H128> = vec![\n\n \"0xbeaa602d3dd5708dca1901b6615c1205\", // 389\n\n \"0x176878a13808017e01639d3c249e4360\", // 390\n\n \"0xe5b70fa91960ba1b6d50179bb8943547\", // 391\n\n \"0x3ba7480530edae8e4d9d8bf1ab12436d\", // 392\n\n \"0xd5778f48d009671ae46f28e9a4b3af53\", // 393\n\n \"0xab7ad138b345b8765dccda4857a13285\", // 394\n", "file_path": "pallets/worker/src/constants.rs", "rank": 65, "score": 12.99638222573728 }, { "content": " stream.begin_list(3usize);\n\n stream.append(&self.address);\n\n stream.append_list::<H256, _>(&self.topics);\n\n stream.append(&self.data);\n\n }\n\n}\n\n\n\n// Receipt Header\n\n\n\n#[derive(\n\n Debug, Clone, PartialEq, Eq, RlpEncodableDerive, RlpDecodableDerive,\n\n)]\n\npub struct Receipt {\n\n pub status: bool,\n\n pub gas_used: U256,\n\n pub log_bloom: Bloom,\n\n pub logs: Vec<LogEntry>,\n\n}\n\n\n", "file_path": "pallets/worker/src/types.rs", "rank": 66, "score": 12.007354601534223 }, { "content": " pub gas_limit: u64,\n\n pub gas_used: u64,\n\n pub timestamp: u64,\n\n pub extra_data: Vec<u8>,\n\n pub mix_hash: H256,\n\n pub nonce: H64,\n\n}\n\n\n\n#[derive(Debug, Clone, serde::Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct InfuraBlockHeader {\n\n pub difficulty: String,\n\n pub extra_data: String,\n\n pub gas_limit: String,\n\n pub gas_used: String,\n\n pub hash: String,\n\n pub logs_bloom: String,\n\n pub miner: String,\n\n pub mix_hash: String,\n\n pub nonce: String,\n", "file_path": "pallets/worker/src/types.rs", "rank": 67, "score": 11.703380676508699 }, { "content": "\n\npub const UNSIGNED_TXS_PRIORITY: u64 = 100;\n\n\n\n/// Defines application identifier for crypto keys of this module.\n\n///\n\n/// Every module that deals with signatures needs to declare its unique\n\n/// identifier for its crypto keys.\n\n/// When offchain worker is signing transactions it's going to request keys of\n\n/// type `KeyTypeId` from the keystore and use the ones it finds to sign the\n\n/// transaction. The keys can be inserted manually via RPC (see\n\n/// `author_insertKey`).\n\npub const KEY_TYPE: KeyTypeId = KeyTypeId(*b\"eth!\");\n\n\n\n/// Based on the above `KeyTypeId` we need to generate a pallet-specific crypto\n\n/// type wrappers. We can use from supported crypto kinds (`sr25519`, `ed25519`\n\n/// and `ecdsa`) and augment the types with this pallet-specific identifier.\n\npub mod crypto {\n\n use super::KEY_TYPE;\n\n use sp_core::sr25519::Signature as Sr25519Signature;\n\n use sp_runtime::{\n", "file_path": "pallets/worker/src/lib.rs", "rank": 68, "score": 11.370526149100426 }, { "content": " pub fn to_double_node_with_merkle_proof_vec(\n\n &self,\n\n ) -> Vec<types::DoubleNodeWithMerkleProof> {\n\n let h512s = Self::combine_dag_h256_to_h512(self.elements.clone());\n\n h512s\n\n .iter()\n\n .zip(h512s.iter().skip(1))\n\n .enumerate()\n\n .filter(|(i, _)| i % 2 == 0)\n\n .map(|(i, (a, b))| DoubleNodeWithMerkleProof {\n\n dag_nodes: [*a, *b],\n\n proof: self.merkle_proofs[i / 2 * self.proof_length as usize\n\n ..(i / 2 + 1) * self.proof_length as usize]\n\n .to_vec(),\n\n })\n\n .collect()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "pallets/worker/src/types.rs", "rank": 69, "score": 11.238671445234463 }, { "content": "use core::{cmp, fmt, ops};\n\n\n\n#[cfg(not(feature = \"std\"))]\n\nextern crate alloc;\n\n#[cfg(not(feature = \"std\"))]\n\nuse alloc::format;\n\n\n\nuse codec::{Decode, Encode};\n\nuse frame_support::debug;\n\nuse sp_runtime::offchain::http;\n\nuse sp_std::collections::vec_deque::VecDeque;\n\nuse sp_std::prelude::*;\n\n\n\nuse crate::types::{BlockHeader, InfuraBlockHeader};\n\n\n", "file_path": "pallets/worker/src/blocks_queue.rs", "rank": 70, "score": 11.00449385752572 }, { "content": "}\n\n\n\nimpl pallet_aura::Config for Runtime {\n\n type AuthorityId = AuraId;\n\n}\n\n\n\nimpl pallet_grandpa::Config for Runtime {\n\n type Call = Call;\n\n type Event = Event;\n\n type HandleEquivocation = ();\n\n type KeyOwnerIdentification =\n\n <Self::KeyOwnerProofSystem as KeyOwnerProofSystem<(\n\n KeyTypeId,\n\n GrandpaId,\n\n )>>::IdentificationTuple;\n\n type KeyOwnerProof = <Self::KeyOwnerProofSystem as KeyOwnerProofSystem<\n\n (KeyTypeId, GrandpaId),\n\n >>::Proof;\n\n type KeyOwnerProofSystem = ();\n\n type WeightInfo = ();\n", "file_path": "runtime/src/lib.rs", "rank": 71, "score": 10.950294340584918 }, { "content": "}\n\n\n\nimpl<C> frame_system::offchain::SendTransactionTypes<C> for Runtime\n\nwhere\n\n Call: From<C>,\n\n{\n\n type Extrinsic = UncheckedExtrinsic;\n\n type OverarchingCall = Call;\n\n}\n\n\n\npub mod report {\n\n use super::{Signature, Verify};\n\n use frame_system::offchain::AppCrypto;\n\n use sp_core::crypto::{key_types, KeyTypeId};\n\n\n\n /// Key type for the reporting module. Used for reporting BABE and GRANDPA\n\n /// equivocations.\n\n pub const KEY_TYPE: KeyTypeId = key_types::REPORTING;\n\n\n\n pub mod app {\n", "file_path": "runtime/src/lib.rs", "rank": 72, "score": 10.613302438367857 }, { "content": " ValidTransaction,\n\n },\n\n};\n\nuse sp_std::prelude::*;\n\n\n\nuse ethereum_types::{H128, H256, H512, H64, U256};\n\nuse sp_io::hashing::{keccak_256, sha2_256};\n\n\n\nmod constants;\n\n\n\nmod types;\n\nuse types::BlockHeader;\n\n\n\nmod prover;\n\n\n\nmod blocks_queue;\n\nuse blocks_queue::{BlockQueue, Infura};\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "pallets/worker/src/lib.rs", "rank": 73, "score": 10.55985894142251 }, { "content": " author: hex_to_address(self.miner),\n\n state_root: hex_to_h256(self.state_root),\n\n transactions_root: hex_to_h256(self.transactions_root),\n\n receipts_root: hex_to_h256(self.receipts_root),\n\n log_bloom: hex_to_bloom(self.logs_bloom),\n\n difficulty: hex_to_u256(self.difficulty),\n\n gas_limit: hex_to_u256(self.gas_limit).as_u64(),\n\n gas_used: hex_to_u256(self.gas_used).as_u64(),\n\n timestamp: hex_to_u256(self.timestamp).as_u64(),\n\n extra_data,\n\n mix_hash: hex_to_h256(self.mix_hash),\n\n nonce: hex_to_h64(self.nonce),\n\n }\n\n }\n\n}\n\n\n\nimpl BlockHeader {\n\n pub fn hash(&self) -> H256 {\n\n let mut stream = RlpStream::new();\n\n self.stream_rlp(&mut stream, false);\n", "file_path": "pallets/worker/src/types.rs", "rank": 74, "score": 10.12269316047313 }, { "content": "use node_template_runtime::{\n\n AccountId, AuraConfig, BalancesConfig, GenesisConfig, GrandpaConfig,\n\n Signature, SudoConfig, SystemConfig, WASM_BINARY,\n\n};\n\nuse sc_service::ChainType;\n\nuse sp_consensus_aura::sr25519::AuthorityId as AuraId;\n\nuse sp_core::{sr25519, Pair, Public};\n\nuse sp_finality_grandpa::AuthorityId as GrandpaId;\n\nuse sp_runtime::traits::{IdentifyAccount, Verify};\n\n\n\n// The URL for the telemetry server.\n\n// const STAGING_TELEMETRY_URL: &str = \"wss://telemetry.polkadot.io/submit/\";\n\n\n\n/// Specialized `ChainSpec`. This is a specialization of the general Substrate\n\n/// ChainSpec type.\n\npub type ChainSpec = sc_service::GenericChainSpec<GenesisConfig>;\n\n\n\n/// Generate a crypto pair from seed.\n", "file_path": "node/src/chain_spec.rs", "rank": 75, "score": 10.014986518715016 }, { "content": " match Self::dags_start_epoch() {\n\n Some(ep) => Self::dags_merkle_roots()[(epoch - ep) as usize],\n\n None => H128::zero(),\n\n }\n\n }\n\n\n\n pub fn last_block_number() -> U256 {\n\n match Self::infos(Self::best_header_hash()) {\n\n Some(header) => header.number,\n\n None => U256::zero(),\n\n }\n\n }\n\n\n\n /// Returns the block hash from the canonical chain.\n\n pub fn block_hash(index: u64) -> Option<H256> {\n\n Self::canonical_header_hashes(U256::from(index))\n\n }\n\n\n\n /// Returns all hashes known for that height.\n\n pub fn known_hashes(index: u64) -> Vec<H256> {\n", "file_path": "pallets/worker/src/lib.rs", "rank": 76, "score": 9.980769254379716 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n// `construct_runtime!` does a lot of recursion and requires us to increase the\n\n// limit to 256.\n\n#![recursion_limit = \"256\"]\n\n\n\n// Make the WASM binary available.\n\n#[cfg(feature = \"std\")]\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/wasm_binary.rs\"));\n\nuse codec::{Decode, Encode};\n\n/// Import the worker pallet.\n\npub use pallet_worker;\n\nuse sp_runtime::generic::Era;\n\nuse sp_runtime::traits::StaticLookup;\n\nuse sp_runtime::SaturatedConversion;\n\n\n\nuse pallet_grandpa::fg_primitives;\n\nuse pallet_grandpa::{\n\n AuthorityId as GrandpaId, AuthorityList as GrandpaAuthorityList,\n\n};\n\nuse sp_api::impl_runtime_apis;\n", "file_path": "runtime/src/lib.rs", "rank": 77, "score": 9.780853518575933 }, { "content": " Perbill,\n\n};\n\n\n\nuse futures::future::join_all;\n\nuse hex::FromHex;\n\nuse lazy_static::lazy_static;\n\nuse rlp::RlpStream;\n\nuse serde::{Deserialize, Deserializer};\n\nuse sp_core::Pair;\n\nuse std::panic;\n\nuse web3::futures::Future;\n\nuse web3::types::Block;\n\n\n\nimpl_outer_origin! {\n\n pub enum Origin for Test where system = frame_system {}\n\n}\n\n\n\n// For testing the module, we construct most of a mock runtime. This means\n\n// first constructing a configuration type (`Test`) which `impl`s each of the\n\n// configuration traits of modules we want to use.\n", "file_path": "pallets/worker/src/tests.rs", "rank": 78, "score": 9.762529777638402 }, { "content": "/// the chain.\n\npub type Signature = MultiSignature;\n\n\n\n/// Some way of identifying an account on the chain. We intentionally make it\n\n/// equivalent to the public key of our transaction signing scheme.\n\npub type AccountId =\n\n <<Signature as Verify>::Signer as IdentifyAccount>::AccountId;\n\n\n\n/// The type for looking up accounts. We don't expect more than 4 billion of\n\n/// them, but you never know...\n\npub type AccountIndex = u32;\n\n\n\n/// Balance of an account.\n\npub type Balance = u128;\n\n\n\n/// Index of a transaction in the chain.\n\npub type Index = u32;\n\n\n\n/// A hash of some data used by the chain.\n\npub type Hash = sp_core::H256;\n", "file_path": "runtime/src/lib.rs", "rank": 79, "score": 9.495014569647372 }, { "content": "\n\n /// Verify merkle paths to the DAG nodes.\n\n fn hashimoto_merkle(\n\n header_hash: H256,\n\n nonce: H64,\n\n header_number: U256,\n\n nodes: &[types::DoubleNodeWithMerkleProof],\n\n ) -> (H256, H256) {\n\n <VerificationIndex>::set(0);\n\n // Check that we have the expected number of nodes with proofs\n\n // const MIXHASHES: usize = MIX_BYTES / HASH_BYTES;\n\n // if nodes.len() != MIXHASHES * ACCESSES / 2 {\n\n // return Err(Error::UnexpectedNumberOfNodes);\n\n // }\n\n\n\n let epoch = header_number.as_u64() / 30_000;\n\n // Reuse single Merkle root across all the proofs\n\n let merkle_root = Self::dag_merkle_root(epoch);\n\n ethash::hashimoto(\n\n header_hash.0.into(),\n", "file_path": "pallets/worker/src/lib.rs", "rank": 80, "score": 9.482220149467853 }, { "content": "}\n\n\n\nparameter_types! {\n\n pub const MinimumPeriod: u64 = SLOT_DURATION / 2;\n\n}\n\n\n\nimpl pallet_timestamp::Config for Runtime {\n\n type MinimumPeriod = MinimumPeriod;\n\n /// A timestamp: milliseconds since the unix epoch.\n\n type Moment = u64;\n\n type OnTimestampSet = Aura;\n\n type WeightInfo = ();\n\n}\n\n\n\nparameter_types! {\n\n pub const ExistentialDeposit: u128 = 500;\n\n pub const MaxLocks: u32 = 50;\n\n}\n\n\n\nimpl pallet_balances::Config for Runtime {\n", "file_path": "runtime/src/lib.rs", "rank": 81, "score": 9.38220631698755 }, { "content": "/// Blocks will be produced at a minimum duration defined by `SLOT_DURATION`.\n\n/// `SLOT_DURATION` is picked up by `pallet_timestamp` which is in turn picked\n\n/// up by `pallet_aura` to implement `fn slot_duration()`.\n\n///\n\n/// Change this to adjust the block time.\n\npub const MILLISECS_PER_BLOCK: u64 = 6000;\n\n\n\npub const SLOT_DURATION: u64 = MILLISECS_PER_BLOCK;\n\n\n\n// Time is measured by number of blocks.\n\npub const MINUTES: BlockNumber = 60_000 / (MILLISECS_PER_BLOCK as BlockNumber);\n\npub const HOURS: BlockNumber = MINUTES * 60;\n\npub const DAYS: BlockNumber = HOURS * 24;\n\n\n\n/// The version information used to identify this runtime when compiled\n\n/// natively.\n\n#[cfg(feature = \"std\")]\n", "file_path": "runtime/src/lib.rs", "rank": 82, "score": 9.291426789308396 }, { "content": "//! A collection of node-specific RPC methods.\n\n//! Substrate provides the `sc-rpc` crate, which defines the core RPC layer\n\n//! used by Substrate nodes. This file extends those RPC definitions with\n\n//! capabilities that are specific to this project's runtime configuration.\n\n\n\n#![warn(missing_docs)]\n\n\n\nuse std::sync::Arc;\n\n\n\nuse node_template_runtime::{opaque::Block, AccountId, Balance, Index};\n\npub use sc_rpc_api::DenyUnsafe;\n\nuse sp_api::ProvideRuntimeApi;\n\nuse sp_block_builder::BlockBuilder;\n\nuse sp_blockchain::{Error as BlockChainError, HeaderBackend, HeaderMetadata};\n\nuse sp_transaction_pool::TransactionPool;\n\n\n\n/// Full client dependencies.\n\npub struct FullDeps<C, P> {\n\n /// The client instance to use.\n\n pub client: Arc<C>,\n\n /// Transaction pool instance.\n\n pub pool: Arc<P>,\n\n /// Whether to deny unsafe calls\n\n pub deny_unsafe: DenyUnsafe,\n\n}\n\n\n\n/// Instantiate all full RPC extensions.\n", "file_path": "node/src/rpc.rs", "rank": 83, "score": 9.149180458177778 }, { "content": " weights::{\n\n constants::{\n\n BlockExecutionWeight, ExtrinsicBaseWeight, RocksDbWeight,\n\n WEIGHT_PER_SECOND,\n\n },\n\n IdentityFee, Weight,\n\n },\n\n StorageValue,\n\n};\n\npub use pallet_balances::Call as BalancesCall;\n\npub use pallet_timestamp::Call as TimestampCall;\n\nuse pallet_transaction_payment::CurrencyAdapter;\n\n#[cfg(any(feature = \"std\", test))]\n\npub use sp_runtime::BuildStorage;\n\npub use sp_runtime::{Perbill, Permill};\n\n\n\n/// An index to a block.\n\npub type BlockNumber = u32;\n\n\n\n/// Alias to 512-bit hash when used in the context of a transaction signature on\n", "file_path": "runtime/src/lib.rs", "rank": 84, "score": 9.060960190396305 }, { "content": " pub fn from_values(dag_nodes: [H512; 2], proof: Vec<H128>) -> Self {\n\n Self { dag_nodes, proof }\n\n }\n\n\n\n fn truncate_to_h128(arr: H256) -> H128 {\n\n let mut data = [0u8; 16];\n\n data.copy_from_slice(&(arr.0)[16..]);\n\n H128(data)\n\n }\n\n\n\n fn hash_h128(l: H128, r: H128) -> H128 {\n\n let mut data = [0u8; 64];\n\n data[16..32].copy_from_slice(&(l.0));\n\n data[48..64].copy_from_slice(&(r.0));\n\n Self::truncate_to_h128(sha2_256(&data).into())\n\n }\n\n\n\n pub fn apply_merkle_proof(&self, index: u64) -> Result<H128, &'static str> {\n\n let mut data = [0u8; 128];\n\n data[..64].copy_from_slice(&(self.dag_nodes[0].0));\n", "file_path": "pallets/worker/src/types.rs", "rank": 85, "score": 9.001634767826946 }, { "content": "\n\n // 0x823a4ce867a306eca6ecb523198293e46c7e137f4bf29af83590041afa365f11\n\n fn truncate_to_h128(arr: H256) -> H128 {\n\n let mut data = [0u8; 16];\n\n data.copy_from_slice(&(arr.0)[16..]);\n\n H128(data)\n\n }\n\n\n\n fn hash_h128(l: H128, r: H128) -> H128 {\n\n let mut data = [0u8; 64];\n\n data[16..32].copy_from_slice(&(l.0));\n\n data[48..64].copy_from_slice(&(r.0));\n\n\n\n Self::truncate_to_h128(types::sha256(&data).into())\n\n }\n\n\n\n pub fn apply_merkle_proof(\n\n index: u64,\n\n dag_nodes: Vec<H512>,\n\n proof: Vec<H128>,\n", "file_path": "pallets/worker/src/lib.rs", "rank": 86, "score": 8.880880516066396 }, { "content": " }\n\n}\n\n\n\ndecl_event!(\n\n pub enum Event<T>\n\n where\n\n AccountId = <T as frame_system::Config>::AccountId,\n\n {\n\n NewHeader(u32, AccountId),\n\n }\n\n);\n\n\n\ndecl_module! {\n\n pub struct Module<T: Config> for enum Call where origin: T::Origin {\n\n // // Errors must be initialized if they are used by the pallet.\n\n // type Error = Error<T>;\n\n\n\n // Events must be initialized if they are used by the pallet.\n\n fn deposit_event() = default;\n\n\n", "file_path": "pallets/worker/src/lib.rs", "rank": 87, "score": 8.842866526890477 }, { "content": " use sp_application_crypto::{app_crypto, sr25519};\n\n app_crypto!(sr25519, super::KEY_TYPE);\n\n }\n\n\n\n /// Identity of the equivocation/misbehavior reporter.\n\n pub type ReporterId = app::Public;\n\n\n\n /// An `AppCrypto` type to allow submitting signed transactions using the\n\n /// reporting application key as signer.\n\n pub struct ReporterAppCrypto;\n\n\n\n impl AppCrypto<<Signature as Verify>::Signer, Signature> for ReporterAppCrypto {\n\n type GenericPublic = sp_core::sr25519::Public;\n\n type GenericSignature = sp_core::sr25519::Signature;\n\n type RuntimeAppPublic = ReporterId;\n\n }\n\n}\n\n\n\nimpl pallet_worker::Config for Runtime {\n\n type AuthorityId = report::ReporterAppCrypto;\n", "file_path": "runtime/src/lib.rs", "rank": 88, "score": 8.607854514272255 }, { "content": " fn create_transaction<\n\n C: frame_system::offchain::AppCrypto<Self::Public, Self::Signature>,\n\n >(\n\n call: Call<Test>,\n\n _public: <Signature as Verify>::Signer,\n\n _account: AccountId,\n\n nonce: u64,\n\n ) -> Option<(Call<Test>, <Extrinsic as ExtrinsicT>::SignaturePayload)> {\n\n Some((call, (nonce, ())))\n\n }\n\n}\n\n\n\nparameter_types! {\n\n pub const GracePeriod: u64 = 5;\n\n pub const UnsignedInterval: u64 = 128;\n\n pub const UnsignedPriority: u64 = 1 << 20;\n\n}\n\n\n\nimpl Config for Test {\n\n type AuthorityId = crypto::AuthId;\n\n type Call = Call<Test>;\n\n type Event = ();\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 89, "score": 8.38773822664992 }, { "content": "#[derive(Default, Debug, Clone, PartialEq, Eq)]\n\npub struct LogEntry {\n\n pub address: H160,\n\n pub topics: Vec<H256>,\n\n pub data: Vec<u8>,\n\n}\n\n\n\nimpl rlp::Decodable for LogEntry {\n\n fn decode(rlp: &rlp::Rlp) -> Result<Self, rlp::DecoderError> {\n\n let result = LogEntry {\n\n address: rlp.val_at(0usize)?,\n\n topics: rlp.list_at(1usize)?,\n\n data: rlp.val_at(2usize)?,\n\n };\n\n Ok(result)\n\n }\n\n}\n\n\n\nimpl rlp::Encodable for LogEntry {\n\n fn rlp_append(&self, stream: &mut rlp::RlpStream) {\n", "file_path": "pallets/worker/src/types.rs", "rank": 90, "score": 8.271415903997378 }, { "content": "#[derive(Clone, Eq, PartialEq, Encode, Decode)]\n\npub struct Test;\n\n\n\nparameter_types! {\n\n pub const BlockHashCount: u64 = 250;\n\n pub BlockWeights: frame_system::limits::BlockWeights =\n\n frame_system::limits::BlockWeights::simple_max(1024);\n\n pub const MinimumPeriod: u64 = 1;\n\n}\n\n\n\nimpl frame_system::Config for Test {\n\n type AccountData = ();\n\n type AccountId = sp_core::sr25519::Public;\n\n type BaseCallFilter = ();\n\n type BlockHashCount = BlockHashCount;\n\n type BlockLength = ();\n\n type BlockNumber = u64;\n\n type BlockWeights = ();\n\n type Call = ();\n\n type DbWeight = ();\n", "file_path": "pallets/worker/src/tests.rs", "rank": 91, "score": 8.04094958588776 }, { "content": " #[derive(Debug, Clone, Encode, Decode)]\n\n struct MockedInfura(u64);\n\n\n\n fn mock_header_with(number: u64) -> BlockHeader {\n\n BlockHeader {\n\n parent_hash: H256::random(),\n\n uncles_hash: H256::random(),\n\n author: Address::random(),\n\n state_root: H256::random(),\n\n transactions_root: H256::random(),\n\n receipts_root: H256::random(),\n\n log_bloom: Bloom::random(),\n\n difficulty: U256::MAX,\n\n number: U256::from(number),\n\n gas_limit: 1000,\n\n gas_used: 1000,\n\n timestamp: 1610749011,\n\n extra_data: b\"Mocked data for tests\".to_vec(),\n\n mix_hash: H256::random(),\n\n nonce: H64::random(),\n", "file_path": "pallets/worker/src/blocks_queue.rs", "rank": 92, "score": 8.015183135129066 }, { "content": "//! Service and ServiceFactory implementation. Specialized wrapper over\n\n//! substrate service.\n\n\n\nuse node_template_runtime::{self, opaque::Block, RuntimeApi};\n\nuse sc_client_api::{ExecutorProvider, RemoteBackend};\n\nuse sc_executor::native_executor_instance;\n\npub use sc_executor::NativeExecutor;\n\nuse sc_finality_grandpa::SharedVoterState;\n\nuse sc_keystore::LocalKeystore;\n\nuse sc_service::{error::Error as ServiceError, Configuration, TaskManager};\n\nuse sp_consensus_aura::sr25519::AuthorityPair as AuraPair;\n\nuse sp_inherents::InherentDataProviders;\n\nuse sp_keystore::SyncCryptoStore;\n\nuse std::sync::Arc;\n\nuse std::time::Duration;\n\n// Our native executor instance.\n\nnative_executor_instance!(\n\n pub Executor,\n\n node_template_runtime::api::dispatch,\n\n node_template_runtime::native_version,\n\n frame_benchmarking::benchmarking::HostFunctions,\n\n);\n\n\n", "file_path": "node/src/service.rs", "rank": 93, "score": 7.942167293308554 }, { "content": " app_crypto::{app_crypto, sr25519},\n\n traits::Verify,\n\n };\n\n app_crypto!(sr25519, KEY_TYPE);\n\n\n\n pub struct AuthId;\n\n impl\n\n frame_system::offchain::AppCrypto<\n\n <Sr25519Signature as Verify>::Signer,\n\n Sr25519Signature,\n\n > for AuthId\n\n {\n\n type GenericPublic = sp_core::sr25519::Public;\n\n type GenericSignature = sp_core::sr25519::Signature;\n\n type RuntimeAppPublic = Public;\n\n }\n\n}\n\n\n\n#[derive(Encode, Decode, Clone, PartialEq, Eq)]\n\npub struct Payload<Public> {\n\n number: u64,\n\n public: Public,\n\n}\n\n\n\nimpl<T: SigningTypes> SignedPayload<T> for Payload<T::Public> {\n\n fn public(&self) -> T::Public { self.public.clone() }\n\n}\n\n\n\n/// This pallet's configuration trait\n", "file_path": "pallets/worker/src/lib.rs", "rank": 94, "score": 7.891296394914718 }, { "content": " let data = stream.out();\n\n crate::keccak_256(&data).into()\n\n }\n\n\n\n pub fn seal_hash(&self) -> H256 {\n\n let mut stream = RlpStream::new();\n\n self.stream_rlp(&mut stream, true);\n\n let data = stream.out();\n\n crate::keccak_256(&data).into()\n\n }\n\n\n\n fn stream_rlp(&self, stream: &mut RlpStream, partial: bool) {\n\n stream.begin_list(13 + if !partial { 2 } else { 0 });\n\n stream.append(&self.parent_hash);\n\n stream.append(&self.uncles_hash);\n\n stream.append(&self.author);\n\n stream.append(&self.state_root);\n\n stream.append(&self.transactions_root);\n\n stream.append(&self.receipts_root);\n\n stream.append(&self.log_bloom);\n", "file_path": "pallets/worker/src/types.rs", "rank": 95, "score": 7.777725828296183 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n#![allow(clippy::too_many_arguments, clippy::large_enum_variant)]\n\n\n\nuse codec::{Decode, Encode};\n\nuse frame_support::{\n\n debug, decl_error, decl_event, decl_module, decl_storage, ensure,\n\n};\n\nuse frame_system::{\n\n self as system, ensure_signed,\n\n offchain::{\n\n AppCrypto, CreateSignedTransaction, SendSignedTransaction,\n\n SignedPayload, Signer, SigningTypes,\n\n },\n\n};\n\nuse sp_core::crypto::KeyTypeId;\n\nuse sp_runtime::offchain::storage::StorageValueRef;\n\nuse sp_runtime::{\n\n offchain::http,\n\n transaction_validity::{\n\n InvalidTransaction, TransactionSource, TransactionValidity,\n", "file_path": "pallets/worker/src/lib.rs", "rank": 96, "score": 7.537656132826095 }, { "content": " nonce.0.into(),\n\n ethash::get_full_size(epoch as usize),\n\n |offset| {\n\n let idx = Self::verification_index() as usize;\n\n <VerificationIndex>::set(Self::verification_index() + 1);\n\n debug::native::trace!(\n\n \"Starting verification index: {:?}...\",\n\n idx\n\n );\n\n // Each two nodes are packed into single 128 bytes with Merkle\n\n // proof\n\n let node = &nodes[idx as usize / 2];\n\n if idx % 2 == 0 && Self::validate_ethash() {\n\n // Divide by 2 to adjust offset for 64-byte words instead of\n\n // 128-byte\n\n if let Ok(computed_root) =\n\n node.apply_merkle_proof((offset / 2) as u64)\n\n {\n\n assert_eq!(merkle_root, computed_root);\n\n }\n", "file_path": "pallets/worker/src/lib.rs", "rank": 97, "score": 7.448427178837215 }, { "content": " type Event = ();\n\n type Hash = H256;\n\n type Hashing = ::sp_runtime::traits::BlakeTwo256;\n\n type Header = Header;\n\n type Index = u64;\n\n type Lookup = IdentityLookup<Self::AccountId>;\n\n type OnKilledAccount = ();\n\n type OnNewAccount = ();\n\n type Origin = Origin;\n\n type PalletInfo = ();\n\n type SystemWeightInfo = ();\n\n type Version = ();\n\n}\n\n\n", "file_path": "pallets/worker/src/tests.rs", "rank": 98, "score": 7.312075350671989 }, { "content": " >,\n\n _key_owner_proof: fg_primitives::OpaqueKeyOwnershipProof,\n\n ) -> Option<()> {\n\n None\n\n }\n\n\n\n fn generate_key_ownership_proof(\n\n _set_id: fg_primitives::SetId,\n\n _authority_id: GrandpaId,\n\n ) -> Option<fg_primitives::OpaqueKeyOwnershipProof> {\n\n // NOTE: this is the only implementation possible since we've\n\n // defined our key owner proof type as a bottom type (i.e. a type\n\n // with no values).\n\n None\n\n }\n\n }\n\n\n\n impl frame_system_rpc_runtime_api::AccountNonceApi<Block, AccountId, Index> for Runtime {\n\n fn account_nonce(account: AccountId) -> Index {\n\n System::account_nonce(account)\n", "file_path": "runtime/src/lib.rs", "rank": 99, "score": 7.308579935842479 } ]
Rust
src/year2018/day10.rs
CraZySacX/aoc
c96faf4c3fb0f39db0c2f5b573a307e5c779e421
use error::Result; use regex::Regex; use std::io::BufRead; pub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> { println!("{}", align(reader, false)?); Ok(0) } fn align<T: BufRead>(reader: T, test: bool) -> Result<String> { let line_re = Regex::new(r"position=<(.*), (.*)> velocity=<(.*), (.*)>")?; let mut star_map: Vec<(isize, isize, isize, isize)> = Vec::new(); for line in reader.lines().filter_map(|x| x.ok()) { for cap in line_re.captures_iter(&line) { let x = (&cap[1]).trim().parse::<isize>()?; let y = (&cap[2]).trim().parse::<isize>()?; let vx = (&cap[3]).trim().parse::<isize>()?; let vy = (&cap[4]).trim().parse::<isize>()?; star_map.push((x, y, vx, vy)); } } let max_step = if test { 3 } else { 10619 }; for _ in 0..max_step { move_stars(&mut star_map); } Ok(show_stars(&star_map)) } fn move_stars(star_map: &mut Vec<(isize, isize, isize, isize)>) { for star in star_map { star.0 += star.2; star.1 += star.3; } } fn show_stars(star_map: &[(isize, isize, isize, isize)]) -> String { let mut output = String::new(); let mut min_x = isize::max_value(); let mut min_y = isize::max_value(); let mut max_x = isize::min_value(); let mut max_y = isize::min_value(); for star in star_map { if star.0 < min_x { min_x = star.0; } if star.0 > max_x { max_x = star.0; } if star.1 < min_y { min_y = star.1; } if star.1 > max_y { max_y = star.1; } } for y in min_y..=max_y { for x in min_x..=max_x { let mut found_star = false; for star in star_map { if star.0 == x && star.1 == y { output.push('#'); found_star = true; break; } } if !found_star { output.push('.'); } } output.push('\n'); } output } #[cfg(test)] mod one_star { use super::align; use error::Result; use std::io::Cursor; const TEST_CHAIN: &str = r"position=< 9, 1> velocity=< 0, 2> position=< 7, 0> velocity=<-1, 0> position=< 3, -2> velocity=<-1, 1> position=< 6, 10> velocity=<-2, -1> position=< 2, -4> velocity=< 2, 2> position=<-6, 10> velocity=< 2, -2> position=< 1, 8> velocity=< 1, -1> position=< 1, 7> velocity=< 1, 0> position=<-3, 11> velocity=< 1, -2> position=< 7, 6> velocity=<-1, -1> position=<-2, 3> velocity=< 1, 0> position=<-4, 3> velocity=< 2, 0> position=<10, -3> velocity=<-1, 1> position=< 5, 11> velocity=< 1, -2> position=< 4, 7> velocity=< 0, -1> position=< 8, -2> velocity=< 0, 1> position=<15, 0> velocity=<-2, 0> position=< 1, 6> velocity=< 1, 0> position=< 8, 9> velocity=< 0, -1> position=< 3, 3> velocity=<-1, 1> position=< 0, 5> velocity=< 0, -1> position=<-2, 2> velocity=< 2, 0> position=< 5, -2> velocity=< 1, 2> position=< 1, 4> velocity=< 2, 1> position=<-2, 7> velocity=< 2, -2> position=< 3, 6> velocity=<-1, -1> position=< 5, 0> velocity=< 1, 0> position=<-6, 0> velocity=< 2, 0> position=< 5, 9> velocity=< 1, -2> position=<14, 7> velocity=<-2, 0> position=<-3, 6> velocity=< 2, -1>"; const EXPECTED: &str = r"#...#..### #...#...#. #...#...#. #####...#. #...#...#. #...#...#. #...#...#. #...#..### "; #[test] fn solution() -> Result<()> { assert_eq!(align(Cursor::new(TEST_CHAIN), true)?, EXPECTED); Ok(()) } } #[cfg(test)] mod two_star { #[test] fn solution() { assert!(true); } }
use error::Result; use regex::Regex; use std::io::BufRead; pub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> { println!("{}", align(reader, false)?); Ok(0) } fn align<T: BufRead>(reader: T, test: bool) -> Result<String> { let line_re = Regex::new(r"position=<(.*), (.*)> velocity=<(.*), (.*)>")?; let mut star_map: Vec<(isize, isize, isize, isize)> = Vec::new(); for line in reader.lines().filter_map(|x| x.ok()) { for cap in line_re.captures_iter(&line) { let x = (&cap[1]).trim().parse::<isize>()?; let y = (&cap[2]).trim().parse::<isize>()?; let vx = (&cap[3]).trim().parse::<isize>()?; let vy = (&cap[4]).trim().parse::<isize>()?; star_map.push((x, y, vx, vy)); } } let max_step = if test { 3 } else { 10619 }; for _ in 0..max_step { move_stars(&mut star_map); } Ok(show_stars(&star_map)) } fn move_stars(star_map: &mut Vec<(isize, isize, isize, isize)>) { for star in star_map { star.0 += star.2; star.1 += star.3; } } fn show_stars(star_map: &[(isize, isize, isize, isize)]) -> String { let mut output = String::new(); let mut min_x = isize::max_value(); let mut min_y = isize::max_value(); let mut max_x = isize::min_value(); let mut max_y = isize::min_value(); for star in star_map { if star.0 < min_x { min_x = star.0; }
#[cfg(test)] mod one_star { use super::align; use error::Result; use std::io::Cursor; const TEST_CHAIN: &str = r"position=< 9, 1> velocity=< 0, 2> position=< 7, 0> velocity=<-1, 0> position=< 3, -2> velocity=<-1, 1> position=< 6, 10> velocity=<-2, -1> position=< 2, -4> velocity=< 2, 2> position=<-6, 10> velocity=< 2, -2> position=< 1, 8> velocity=< 1, -1> position=< 1, 7> velocity=< 1, 0> position=<-3, 11> velocity=< 1, -2> position=< 7, 6> velocity=<-1, -1> position=<-2, 3> velocity=< 1, 0> position=<-4, 3> velocity=< 2, 0> position=<10, -3> velocity=<-1, 1> position=< 5, 11> velocity=< 1, -2> position=< 4, 7> velocity=< 0, -1> position=< 8, -2> velocity=< 0, 1> position=<15, 0> velocity=<-2, 0> position=< 1, 6> velocity=< 1, 0> position=< 8, 9> velocity=< 0, -1> position=< 3, 3> velocity=<-1, 1> position=< 0, 5> velocity=< 0, -1> position=<-2, 2> velocity=< 2, 0> position=< 5, -2> velocity=< 1, 2> position=< 1, 4> velocity=< 2, 1> position=<-2, 7> velocity=< 2, -2> position=< 3, 6> velocity=<-1, -1> position=< 5, 0> velocity=< 1, 0> position=<-6, 0> velocity=< 2, 0> position=< 5, 9> velocity=< 1, -2> position=<14, 7> velocity=<-2, 0> position=<-3, 6> velocity=< 2, -1>"; const EXPECTED: &str = r"#...#..### #...#...#. #...#...#. #####...#. #...#...#. #...#...#. #...#...#. #...#..### "; #[test] fn solution() -> Result<()> { assert_eq!(align(Cursor::new(TEST_CHAIN), true)?, EXPECTED); Ok(()) } } #[cfg(test)] mod two_star { #[test] fn solution() { assert!(true); } }
if star.0 > max_x { max_x = star.0; } if star.1 < min_y { min_y = star.1; } if star.1 > max_y { max_y = star.1; } } for y in min_y..=max_y { for x in min_x..=max_x { let mut found_star = false; for star in star_map { if star.0 == x && star.1 == y { output.push('#'); found_star = true; break; } } if !found_star { output.push('.'); } } output.push('\n'); } output }
function_block-function_prefix_line
[ { "content": "fn lca<T: BufRead>(reader: T, max_i: usize, max_j: usize, _second_star: bool, test: bool) -> Result<Array2<char>> {\n\n let mut lca = Array2::<char>::default((max_i, max_j));\n\n\n\n for (j, line) in reader.lines().filter_map(|x| x.ok()).enumerate() {\n\n for (i, ch) in line.chars().enumerate() {\n\n match ch {\n\n '.' => lca[[i, j]] = ch,\n\n '|' => lca[[i, j]] = ch,\n\n '#' => lca[[i, j]] = ch,\n\n _ => return Err(\"invalid lumber area\".into()),\n\n }\n\n }\n\n }\n\n\n\n if test {\n\n print_lca(&lca, 0);\n\n }\n\n Ok(lca)\n\n}\n\n\n", "file_path": "src/year2018/day18.rs", "rank": 1, "score": 245424.1834221271 }, { "content": "fn consume_work(worker: &mut Worker, work: &(String, u32)) -> bool {\n\n if worker.work.is_none() {\n\n worker.remaining = work.1;\n\n worker.work = Some(work.clone());\n\n true\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/year2018/day07.rs", "rank": 2, "score": 244273.1957853592 }, { "content": "/// Parse the list of lengths and calculate the hash.\n\nfn parse_list_and_hash(hash: &mut Vec<u32>, line: &str, num_elements: u32, second_star: bool) -> Result<()> {\n\n let lengths = generate_lengths(line, second_star)?;\n\n\n\n for i in 0..num_elements {\n\n hash.push(i);\n\n }\n\n\n\n let rounds = if second_star { 64 } else { 1 };\n\n let mut curr_pos: u32 = 0;\n\n let mut skip_size = 0;\n\n\n\n for _ in 0..rounds {\n\n for length in &lengths {\n\n let mut indices = Vec::new();\n\n let mut slice = Vec::new();\n\n\n\n for j in curr_pos..u32::from(*length) + curr_pos {\n\n let actual_idx = j % num_elements;\n\n indices.push(actual_idx);\n\n slice.push(*hash.get(actual_idx as usize).ok_or(\"invalid\")?);\n", "file_path": "src/year2017/day14.rs", "rank": 4, "score": 241345.35351142864 }, { "content": "/// Parse the list of lengths and calculate the hash.\n\nfn parse_list_and_hash(hash: &mut Vec<u32>, line: &str, num_elements: u32, second_star: bool) -> Result<()> {\n\n let lengths = generate_lengths(line, second_star)?;\n\n\n\n for i in 0..num_elements {\n\n hash.push(i);\n\n }\n\n\n\n let rounds = if second_star { 64 } else { 1 };\n\n let mut curr_pos: u32 = 0;\n\n let mut skip_size = 0;\n\n\n\n for _ in 0..rounds {\n\n for length in &lengths {\n\n let mut indices = Vec::new();\n\n let mut slice = Vec::new();\n\n\n\n for j in curr_pos..u32::from(*length) + curr_pos {\n\n let actual_idx = j % num_elements;\n\n indices.push(actual_idx);\n\n slice.push(*hash.get(actual_idx as usize).ok_or(\"invalid\")?);\n", "file_path": "src/year2017/day10.rs", "rank": 5, "score": 241345.35351142866 }, { "content": "fn run_scan<T: BufRead>(reader: T, _second_star: bool, test: bool) -> Result<usize> {\n\n let vein_re = Regex::new(r\"(x|y)=(\\d+), (x|y)=(\\d+)\\.\\.(\\d+)\")?;\n\n let mut x_coord_map = HashMap::new();\n\n let mut y_coord_map = HashMap::new();\n\n\n\n for line in reader.lines().filter_map(|x| x.ok()) {\n\n for caps in vein_re.captures_iter(&line) {\n\n let c1 = (&caps[1]).to_string();\n\n let v1 = (&caps[2]).parse::<usize>()?;\n\n let r1 = (&caps[4]).parse::<usize>()?;\n\n let r2 = (&caps[5]).parse::<usize>()?;\n\n\n\n match &c1[..] {\n\n \"x\" => {\n\n let range_vec = x_coord_map.entry(v1).or_insert_with(Vec::new);\n\n\n\n for i in r1..=r2 {\n\n range_vec.push(i);\n\n }\n\n }\n", "file_path": "src/year2018/day17.rs", "rank": 6, "score": 240652.98345293006 }, { "content": "/// Parse a node description line, and add the nodes and children to the appropriate structures.\n\nfn parse_line(line: &str, id: usize, nodes: &mut Vec<Node>, children: &mut HashMap<usize, Vec<String>>) -> Result<()> {\n\n let node_def: Vec<&str> = line.split(\" -> \").collect();\n\n\n\n let node_desc = node_def.get(0).ok_or(\"Unable to get node description\")?;\n\n let desc: Vec<&str> = node_desc.split(' ').collect();\n\n let name = desc.get(0).ok_or(\"Unable to deternmine node name\")?;\n\n let weight_str = desc.get(1).ok_or(\"Unable to determine node weight\")?;\n\n let weight = weight_str.trim_matches(|c| c == '(' || c == ')').parse::<u32>()?;\n\n\n\n if let Some(children_desc) = node_def.get(1) {\n\n let children_vec: Vec<String> = children_desc.split(\", \").map(String::from).collect();\n\n children.insert(id, children_vec);\n\n }\n\n\n\n nodes.push(Node {\n\n id,\n\n name: String::from(*name),\n\n weight,\n\n parent: None,\n\n children: None,\n\n });\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/year2017/day07.rs", "rank": 8, "score": 234274.07822034837 }, { "content": "fn run_battle<T>(reader: T, max_i: usize, max_j: usize, second_star: bool, test: bool) -> Result<usize>\n\nwhere\n\n T: BufRead,\n\n{\n\n let lines: Vec<String> = reader.lines().filter_map(|x| x.ok()).collect();\n\n let mut board = Array2::default((0, 0));\n\n let mut dead_elf = true;\n\n let mut round_count = 0;\n\n let mut elf_attack_power = 3;\n\n\n\n while dead_elf {\n\n round_count = 0;\n\n board = generate_map(&lines, max_i, max_j, elf_attack_power)?;\n\n\n\n let mut done = false;\n\n while !done {\n\n match round(&mut board, max_i, max_j, second_star)? {\n\n Outcome::NoMoreEnemies => {\n\n dead_elf = false;\n\n done = true;\n", "file_path": "src/year2018/day15.rs", "rank": 9, "score": 228775.3836275788 }, { "content": "/// Generate a register map entry and the associated command\n\nfn generate_register_map_entry_and_command(line: &str, register_map: &mut HashMap<String, i32>, commands: &mut Vec<RegisterCommand>) -> Result<()> {\n\n let line_desc: Vec<&str> = line.split_whitespace().collect();\n\n let name_str = line_desc.get(0).ok_or(\"Invalid register name!\")?;\n\n let name = String::from(*name_str);\n\n let command_str = line_desc.get(1).ok_or(\"Invalid command!\")?;\n\n let value = line_desc.get(2).ok_or(\"Invalid command value\")?.parse::<i32>()?;\n\n let command_register_str = line_desc.get(4).ok_or(\"Invalid command register!\")?;\n\n let command_register = String::from(*command_register_str);\n\n let operator_str = line_desc.get(5).ok_or(\"Invalid operator!\")?;\n\n let condition_value = line_desc.get(6).ok_or(\"Invalid condition value!\")?.parse::<i32>()?;\n\n\n\n register_map.entry(name.clone()).or_insert(0);\n\n\n\n commands.push(RegisterCommand {\n\n register: name,\n\n command: TryFrom::try_from(*command_str)?,\n\n value,\n\n condition: Condition {\n\n register: command_register,\n\n op: TryFrom::try_from(*operator_str)?,\n\n value: condition_value,\n\n },\n\n });\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/year2017/day08.rs", "rank": 10, "score": 226447.32251333035 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day25.rs", "rank": 11, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day06.rs", "rank": 12, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day05.rs", "rank": 13, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day07.rs", "rank": 14, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n let result = if _second_star { brighten(reader)? } else { decorate(reader)? };\n\n Ok(result as u32)\n\n}\n\n\n", "file_path": "src/year2015/day06.rs", "rank": 15, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day22.rs", "rank": 17, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day15.rs", "rank": 18, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day10.rs", "rank": 19, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day21.rs", "rank": 20, "score": 224169.91999416688 }, { "content": "pub fn find_solution<T: BufRead>(_reader: T, _second_star: bool) -> Result<u32> {\n\n Ok(1)\n\n}\n", "file_path": "src/year2018/day25.rs", "rank": 21, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day16.rs", "rank": 22, "score": 224169.91999416688 }, { "content": "/// Find the solution for Advent of Code 2017\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n let mut tape: Vec<u8> = vec![0; 10_000_000];\n\n let mut states: BTreeMap<char, State> = BTreeMap::new();\n\n\n\n let begin_re = Regex::new(r\"^Begin in state ([A-Z])\\.$\")?;\n\n let dc_re = Regex::new(r\"^Perform a diagnostic checksum after (\\d+) steps\\.$\")?;\n\n let in_state_re = Regex::new(r\"^In state ([A-Z]):$\")?;\n\n let if_curr_re = Regex::new(r\"If the current value is (\\d+):$\")?;\n\n let write_val_re = Regex::new(r\" - Write the value (\\d+)\\.$\")?;\n\n let move_re = Regex::new(r\"- Move one slot to the (right|left)\\.$\")?;\n\n let cont_re = Regex::new(r\"- Continue with state ([A-Z])\\.$\")?;\n\n\n\n let mut start_state = 'A';\n\n let mut step_count = 0;\n\n let mut parsing_state = false;\n\n let mut curr_state = 'A';\n\n let mut curr_val = 0;\n\n\n\n for line in reader.lines().filter_map(|x| x.ok()) {\n\n if begin_re.is_match(&line) {\n", "file_path": "src/year2017/day25.rs", "rank": 23, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day08.rs", "rank": 24, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n let result = if _second_star {\n\n find_lowest(reader, \"000000\")?\n\n } else {\n\n find_lowest(reader, \"00000\")?\n\n };\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/year2015/day04.rs", "rank": 25, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day23.rs", "rank": 26, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day16.rs", "rank": 27, "score": 224169.91999416688 }, { "content": "pub fn find_solution<T: BufRead>(_reader: T, _second_star: bool) -> Result<u32> {\n\n Ok(1)\n\n}\n", "file_path": "src/year2018/day23.rs", "rank": 28, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day19.rs", "rank": 29, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day14.rs", "rank": 30, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day24.rs", "rank": 31, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day18.rs", "rank": 32, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day08.rs", "rank": 33, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day24.rs", "rank": 34, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day11.rs", "rank": 35, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day18.rs", "rank": 36, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day07.rs", "rank": 37, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day22.rs", "rank": 38, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day12.rs", "rank": 39, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day17.rs", "rank": 40, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day15.rs", "rank": 41, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day25.rs", "rank": 42, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day13.rs", "rank": 43, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day21.rs", "rank": 44, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day01.rs", "rank": 45, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day17.rs", "rank": 46, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day12.rs", "rank": 47, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day11.rs", "rank": 48, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day20.rs", "rank": 49, "score": 224169.91999416688 }, { "content": "pub fn find_solution<T: BufRead>(_reader: T, _second_star: bool) -> Result<u32> {\n\n Ok(1)\n\n}\n", "file_path": "src/year2018/day21.rs", "rank": 50, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day03.rs", "rank": 51, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day19.rs", "rank": 52, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day14.rs", "rank": 53, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day02.rs", "rank": 54, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day09.rs", "rank": 55, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day23.rs", "rank": 56, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day04.rs", "rank": 57, "score": 224169.91999416688 }, { "content": "pub fn find_solution<T: BufRead>(_reader: T, _second_star: bool) -> Result<u32> {\n\n Ok(1)\n\n}\n", "file_path": "src/year2018/day20.rs", "rank": 58, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day13.rs", "rank": 59, "score": 224169.91999416688 }, { "content": "/// Find the solution for Advent of Code 2017\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n let mut network_map: Array2<u8> = Array2::zeros((201, 201));\n\n for (idx, line_result) in reader.lines().enumerate() {\n\n let line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n fill_row(line, idx, &mut network_map);\n\n }\n\n\n\n let (letters, steps) = traverse_map(&network_map)?;\n\n writeln!(io::stdout(), \"{}\", letters)?;\n\n\n\n Ok(steps)\n\n}\n\n\n", "file_path": "src/year2017/day19.rs", "rank": 60, "score": 224169.91999416688 }, { "content": "pub fn find_solution<T: BufRead>(_reader: T, _second_star: bool) -> Result<u32> {\n\n Ok(1)\n\n}\n", "file_path": "src/year2018/day24.rs", "rank": 61, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day20.rs", "rank": 62, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2015/day10.rs", "rank": 63, "score": 224169.91999416688 }, { "content": "/// Find the solution\n\npub fn find_solution<T: BufRead>(reader: T, _second_star: bool) -> Result<u32> {\n\n for line_result in reader.lines() {\n\n let _line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n }\n\n Ok(0)\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod two_star {\n\n #[test]\n\n fn solution() {\n\n assert!(true);\n\n }\n\n}\n", "file_path": "src/year2016/day09.rs", "rank": 64, "score": 224169.91999416688 }, { "content": "fn determine_risk<T: BufRead>(reader: T, second_star: bool, test: bool, max_i: usize, max_j: usize) -> Result<usize> {\n\n let depth_re = Regex::new(r\"depth: (\\d+)\")?;\n\n let target_re = Regex::new(r\"target: (\\d+),(\\d+)\")?;\n\n let mut region_map = Array2::<Region>::default((max_i, max_j));\n\n let mut depth = 0;\n\n let mut target_coord = (0, 0);\n\n\n\n for line in reader.lines().filter_map(|x| x.ok()) {\n\n if depth_re.is_match(&line) {\n\n for caps in depth_re.captures_iter(&line) {\n\n depth = (&caps[1]).parse::<usize>()?;\n\n }\n\n } else if target_re.is_match(&line) {\n\n for caps in target_re.captures_iter(&line) {\n\n let i = (&caps[1]).parse::<usize>()?;\n\n let j = (&caps[2]).parse::<usize>()?;\n\n target_coord = (i, j);\n\n }\n\n }\n\n }\n", "file_path": "src/year2018/day22.rs", "rank": 65, "score": 222063.96759846527 }, { "content": "fn grow_plants(action_map: HashMap<isize, bool>, state_map: &mut BTreeMap<isize, bool>) {\n\n for (idx, action) in action_map {\n\n *state_map.entry(idx).or_insert(false) = action;\n\n }\n\n}\n\n\n", "file_path": "src/year2018/day12.rs", "rank": 66, "score": 219275.75622190157 }, { "content": "fn find_match(all_ids: &mut Vec<String>, matches: &mut Vec<String>) {\n\n let current = all_ids.remove(0);\n\n let curr_ch: Vec<char> = current.chars().collect();\n\n\n\n for id in all_ids {\n\n matches.push(curr_ch.iter().zip(id.chars()).filter(|(a, b)| *a == b).map(|(_, b)| b).collect());\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n use super::has_two_or_three;\n\n\n\n #[test]\n\n fn solution() {\n\n assert_eq!(has_two_or_three(\"abcdef\"), (false, false));\n\n assert_eq!(has_two_or_three(\"bababc\"), (true, true));\n\n assert_eq!(has_two_or_three(\"abbcde\"), (true, false));\n\n assert_eq!(has_two_or_three(\"abcccd\"), (false, true));\n\n assert_eq!(has_two_or_three(\"aabcdd\"), (true, false));\n", "file_path": "src/year2018/day02.rs", "rank": 67, "score": 217766.8684817909 }, { "content": "fn execute(registers: &mut Registers, ip: &Ip, ins_vec: &[HashMap<OpCode, Instruction>], test: bool) {\n\n if let Some(ins_map) = ins_vec.get(ip.value) {\n\n if ins_map.len() == 1 {\n\n for (opcode, ins) in ins_map.iter() {\n\n if test {\n\n print!(\"{} {} {} {} \", opcode, ins[0], ins[1], ins[2]);\n\n }\n\n opcode.execute(registers, *ins);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/year2018/day19.rs", "rank": 68, "score": 216186.63202380278 }, { "content": "/// Parse the input and go.\n\nfn parse_and_go(line: &str, second_star: bool) -> Result<u32> {\n\n let steps: Vec<&str> = line.split(',').collect();\n\n let mut coords = (0, 0, 0);\n\n let mut max_distance = 0;\n\n\n\n for step in steps {\n\n move_in_direction(step, &mut coords)?;\n\n let curr_distance = manhattan_distance_from_origin(coords)?;\n\n\n\n if curr_distance > max_distance {\n\n max_distance = curr_distance;\n\n }\n\n }\n\n\n\n if second_star {\n\n Ok(max_distance)\n\n } else {\n\n Ok(manhattan_distance_from_origin(coords)?)\n\n }\n\n}\n\n\n", "file_path": "src/year2017/day11.rs", "rank": 69, "score": 215106.32572130059 }, { "content": "fn score_recipes(count: &str, second_star: bool) -> Result<String> {\n\n let len = count.len();\n\n let count_u = count.parse::<usize>()?;\n\n let mut recipe_deque = VecDeque::<u8>::new();\n\n recipe_deque.push_back(3);\n\n recipe_deque.push_back(7);\n\n\n\n let mut idx_e1 = 0;\n\n let mut idx_e2 = 1;\n\n\n\n loop {\n\n if !second_star && recipe_deque.len() > count_u + 10 {\n\n break;\n\n }\n\n let e1_r = recipe_deque[idx_e1];\n\n let e2_r = recipe_deque[idx_e2];\n\n let next = e1_r + e2_r;\n\n\n\n if next > 9 {\n\n let tens = next / 10;\n", "file_path": "src/year2018/day14.rs", "rank": 70, "score": 214941.13995394442 }, { "content": "fn has_two_or_three(line: &str) -> (bool, bool) {\n\n let mut char_freq = HashMap::new();\n\n let mut result = (false, false);\n\n let chars: Vec<char> = line.chars().collect();\n\n for ch in chars {\n\n let freq = char_freq.entry(ch).or_insert(0);\n\n *freq += 1;\n\n }\n\n\n\n for val in char_freq.values() {\n\n if *val == 2 {\n\n result.0 = true;\n\n break;\n\n }\n\n }\n\n\n\n for val in char_freq.values() {\n\n if *val == 3 {\n\n result.1 = true;\n\n break;\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/year2018/day02.rs", "rank": 71, "score": 213493.70856801124 }, { "content": "fn drip(mins_maxes: (usize, usize, usize, usize), scan_arr: &mut Array2<Soil>, test: bool) -> Result<()> {\n\n move_flowing_water(mins_maxes, scan_arr)?;\n\n\n\n if scan_arr[[500, 1]].kind == SoilKind::Sand {\n\n scan_arr[[500, 1]].kind = SoilKind::FlowingWater;\n\n }\n\n\n\n if test {\n\n print_scan_arr(mins_maxes, scan_arr);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/year2018/day17.rs", "rank": 72, "score": 213483.85952838688 }, { "content": "fn find_closest(all_ids: &mut Vec<String>) -> String {\n\n let mut matches = Vec::new();\n\n\n\n while !all_ids.is_empty() {\n\n find_match(all_ids, &mut matches);\n\n }\n\n\n\n if let Some(longest) = matches.iter().max_by_key(|x| x.len()) {\n\n longest.clone()\n\n } else {\n\n \"\".to_string()\n\n }\n\n}\n\n\n", "file_path": "src/year2018/day02.rs", "rank": 73, "score": 212307.8211508231 }, { "content": "fn run_program<T: BufRead>(reader: T, second_star: bool, test: bool) -> Result<usize> {\n\n let ip_re = Regex::new(r\"#ip (\\d+)\")?;\n\n let instruction_re = Regex::new(r\"([a-z]+) (\\d+) (\\d+) (\\d+)\")?;\n\n\n\n let mut instructions_vec = Vec::new();\n\n let mut register = 0;\n\n for line in reader.lines().filter_map(|x| x.ok()) {\n\n if ip_re.is_match(&line) {\n\n for caps in ip_re.captures_iter(&line) {\n\n register = (&caps[1]).parse::<usize>()?;\n\n }\n\n } else if instruction_re.is_match(&line) {\n\n for caps in instruction_re.captures_iter(&line) {\n\n let mut instruction_map = HashMap::new();\n\n let opcode = OpCode::try_from(&caps[1])?;\n\n let reg_a = (&caps[2]).parse::<usize>()?;\n\n let reg_b = (&caps[3]).parse::<usize>()?;\n\n let reg_c = (&caps[4]).parse::<usize>()?;\n\n\n\n instruction_map.insert(opcode, [reg_a, reg_b, reg_c]);\n", "file_path": "src/year2018/day19.rs", "rank": 74, "score": 211446.581218219 }, { "content": "fn gen_maps<T: BufRead>(reader: T, state_map: &mut BTreeMap<isize, bool>, pattern_map: &mut IndexMap<Vec<bool>, bool>) -> Result<()> {\n\n let initial_state_re = Regex::new(r\"^initial state: ([\\.#]+)\")?;\n\n let patt_re = Regex::new(r\"([\\.#]+) => ([\\.#])\")?;\n\n\n\n for line in reader.lines().filter_map(|x| x.ok()) {\n\n for cap in initial_state_re.captures_iter(&line) {\n\n let state_str = &cap[1];\n\n\n\n for (idx, ch) in state_str.chars().enumerate() {\n\n match ch {\n\n '#' => state_map.insert(isize::try_from(idx)?, true),\n\n '.' => state_map.insert(isize::try_from(idx)?, false),\n\n _ => return Err(\"invalid state character\".into()),\n\n };\n\n }\n\n }\n\n\n\n for cap in patt_re.captures_iter(&line) {\n\n let pattern: Vec<bool> = (&cap[1]).chars().map(|ch| ch == '#').collect();\n\n\n\n match &cap[2] {\n\n \"#\" => pattern_map.insert(pattern, true),\n\n \".\" => pattern_map.insert(pattern, false),\n\n _ => return Err(\"invalid pattern character\".into()),\n\n };\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/year2018/day12.rs", "rank": 75, "score": 211393.31293774032 }, { "content": "fn run_generations(gens: usize, state_map: &mut BTreeMap<isize, bool>, pattern_map: &IndexMap<Vec<bool>, bool>) -> Result<isize> {\n\n for _ in 0..gens {\n\n let mut action_map = HashMap::new();\n\n add_left(state_map)?;\n\n add_right(state_map)?;\n\n check_plants(state_map, pattern_map, &mut action_map)?;\n\n grow_plants(action_map, state_map);\n\n }\n\n let total: isize = state_map.iter().filter(|(_, v)| **v).map(|(k, _)| *k).sum();\n\n Ok(total)\n\n}\n\n\n", "file_path": "src/year2018/day12.rs", "rank": 76, "score": 210283.889069865 }, { "content": "fn check_plants(state_map: &BTreeMap<isize, bool>, pattern_map: &IndexMap<Vec<bool>, bool>, action_map: &mut HashMap<isize, bool>) -> Result<()> {\n\n let mut window: Storage<(&isize, &bool)> = Storage::new(5);\n\n for x in state_map.iter().sliding_windows(&mut window) {\n\n let plants: Vec<bool> = x.iter().map(|(_, plant)| **plant).collect();\n\n let idx: Vec<isize> = x.iter().map(|(idx, _)| **idx).collect();\n\n let mut found = false;\n\n let mut action = false;\n\n\n\n for (pattern, outcome) in pattern_map {\n\n if pattern == &plants {\n\n found = true;\n\n action = *outcome;\n\n }\n\n }\n\n\n\n if found {\n\n action_map.insert(idx[2], action);\n\n } else {\n\n action_map.insert(idx[2], false);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/year2018/day12.rs", "rank": 77, "score": 210280.28403275274 }, { "content": "/// Generate the list of lengths.\n\nfn generate_lengths(line: &str, second_star: bool) -> Result<Vec<u8>> {\n\n let mut lengths = Vec::new();\n\n\n\n if second_star {\n\n if !line.is_empty() {\n\n lengths.extend(line.as_bytes());\n\n }\n\n lengths.extend(vec![17, 31, 73, 47, 23]);\n\n } else {\n\n let length_strs: Vec<&str> = line.split(',').collect();\n\n for length_str in length_strs {\n\n lengths.push(length_str.parse::<u8>()?);\n\n }\n\n }\n\n\n\n Ok(lengths)\n\n}\n\n\n", "file_path": "src/year2017/day14.rs", "rank": 78, "score": 209510.17612323392 }, { "content": "/// Generate the list of lengths.\n\nfn generate_lengths(line: &str, second_star: bool) -> Result<Vec<u8>> {\n\n let mut lengths = Vec::new();\n\n\n\n if second_star {\n\n if !line.is_empty() {\n\n lengths.extend(line.as_bytes());\n\n }\n\n lengths.extend(vec![17, 31, 73, 47, 23]);\n\n } else {\n\n let length_strs: Vec<&str> = line.split(',').collect();\n\n for length_str in length_strs {\n\n lengths.push(length_str.parse::<u8>()?);\n\n }\n\n }\n\n\n\n Ok(lengths)\n\n}\n\n\n", "file_path": "src/year2017/day10.rs", "rank": 79, "score": 209510.17612323392 }, { "content": "fn add_left(state_map: &mut BTreeMap<isize, bool>) -> Result<()> {\n\n let min = find_min_plant(state_map)?;\n\n\n\n for i in (min - 4)..min {\n\n state_map.insert(i, false);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/year2018/day12.rs", "rank": 80, "score": 208760.90599277156 }, { "content": "fn add_right(state_map: &mut BTreeMap<isize, bool>) -> Result<()> {\n\n let max = find_max_plant(state_map)?;\n\n\n\n for i in max + 1..max + 4 {\n\n state_map.insert(i, false);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n use super::{gen_maps, run_generations};\n\n use error::Result;\n\n use indexmap::IndexMap;\n\n use std::collections::BTreeMap;\n\n use std::io::Cursor;\n\n\n\n const TEST_STATE: &str = r\"initial state: #..#.#..##......###...###\n\n\n", "file_path": "src/year2018/day12.rs", "rank": 81, "score": 208760.90599277156 }, { "content": "/// Infect or clean the given coords.\n\nfn change_state(coords: (usize, usize), arr: &mut Array2<char>, second_star: bool) -> Result<bool> {\n\n let mut new_infection = false;\n\n let curr_state = get_state(coords, arr)?;\n\n\n\n if second_star {\n\n match curr_state {\n\n State::Clean => arr[[coords.0, coords.1]] = 'W',\n\n State::Weakened => {\n\n arr[[coords.0, coords.1]] = '#';\n\n new_infection = true;\n\n }\n\n State::Infected => arr[[coords.0, coords.1]] = 'F',\n\n State::Flagged => arr[[coords.0, coords.1]] = '.',\n\n }\n\n } else {\n\n match curr_state {\n\n State::Clean => {\n\n arr[[coords.0, coords.1]] = '#';\n\n new_infection = true;\n\n }\n\n State::Infected => {\n\n arr[[coords.0, coords.1]] = '.';\n\n }\n\n _ => return Err(\"invalid state for one star\".into()),\n\n }\n\n }\n\n\n\n Ok(new_infection)\n\n}\n\n\n", "file_path": "src/year2017/day22.rs", "rank": 82, "score": 207915.75835865655 }, { "content": "fn is_nice2(line: &[char]) -> bool {\n\n let mut has_separated = false;\n\n let mut has_non_overlapping_pairs = false;\n\n let mut chunk_map: HashMap<(char, char), usize> = HashMap::new();\n\n\n\n let pairs = line.windows(2);\n\n\n\n for (idx, pair) in pairs.enumerate() {\n\n if pair.len() == 2 {\n\n let entry = chunk_map.entry((pair[0], pair[1])).or_insert(idx);\n\n\n\n if idx >= *entry + 2 {\n\n has_non_overlapping_pairs = true;\n\n break;\n\n }\n\n }\n\n }\n\n\n\n let windows = line.windows(3);\n\n\n\n for window in windows {\n\n if window.len() == 3 && window[0] == window[2] {\n\n has_separated = true;\n\n break;\n\n }\n\n }\n\n\n\n has_non_overlapping_pairs && has_separated\n\n}\n\n\n", "file_path": "src/year2015/day05.rs", "rank": 83, "score": 204617.39471812508 }, { "content": "fn is_nice(line: &[char]) -> bool {\n\n let mut idx0 = 0;\n\n let mut idx1 = 1;\n\n let len = line.len();\n\n let mut is_bad = false;\n\n let mut vowel_count = 0;\n\n let mut has_double = false;\n\n let mut skip_ch1 = false;\n\n\n\n while idx1 < len && !is_bad {\n\n if idx1 < len {\n\n let ch1 = line[idx0];\n\n let ch2 = line[idx1];\n\n\n\n let pair = format!(\"{}{}\", line[idx0], line[idx1]);\n\n match &pair[..] {\n\n \"ab\" | \"cd\" | \"pq\" | \"xy\" => {\n\n is_bad = true;\n\n break;\n\n }\n", "file_path": "src/year2015/day05.rs", "rank": 84, "score": 204617.39471812508 }, { "content": "/// Initialize\n\nfn initialize(commands: &mut HashMap<i64, (String, String, Option<Value>)>, register_map: &mut HashMap<String, i64>) -> Result<()> {\n\n let reader = BufReader::new(File::open(\"data/2017/day18/data_file\")?);\n\n\n\n for (idx, line_result) in reader.lines().enumerate() {\n\n let line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n commands.insert(idx as i64, parse_command(line)?);\n\n }\n\n initialize_register_map(commands, register_map)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/year2017/day18.rs", "rank": 85, "score": 203293.56107790302 }, { "content": "fn recurse(license_vec: &mut Vec<u32>, second_star: bool) -> Result<u32> {\n\n let children_count = license_vec.pop().ok_or(\"\")?;\n\n let metadata_count = license_vec.pop().ok_or(\"\")?;\n\n let mut result = 0;\n\n\n\n if second_star {\n\n let mut children_values = Vec::new();\n\n\n\n for _ in 0..children_count {\n\n children_values.push(recurse(license_vec, second_star)?);\n\n }\n\n\n\n for _ in 0..metadata_count {\n\n let metadata = license_vec.pop().ok_or(\"\")?;\n\n if children_count == 0 {\n\n result += metadata;\n\n } else {\n\n result += children_values.get(metadata as usize - 1).unwrap_or(&0);\n\n }\n\n }\n", "file_path": "src/year2018/day08.rs", "rank": 86, "score": 202837.2932721852 }, { "content": "fn take_turn(board: &mut Array2<Element>, i: usize, j: usize, max_i: usize, max_j: usize, second_star: bool) -> Result<usize> {\n\n let mut move_vec = Vec::new();\n\n\n\n // Scope for mutable board change below.\n\n {\n\n let curr_cell = &board[[i, j]];\n\n\n\n match curr_cell.kind {\n\n ElementKind::Wall | ElementKind::Cavern {} => {}\n\n ElementKind::Unit => {\n\n if let Some(ref unit) = curr_cell.unit {\n\n let targets = find_enemy_targets(board, unit.kind);\n\n\n\n if targets.is_empty() {\n\n return Ok(0);\n\n }\n\n\n\n if let Ok(Some(target)) = move_if_not_adjacent(board, &targets, unit, i, j, max_i, max_j) {\n\n move_vec.push((Action::Move([i, j]), target));\n\n } else {\n", "file_path": "src/year2018/day15.rs", "rank": 87, "score": 198585.0391072713 }, { "content": "/// Check each passphrase for the same word an toss out any that violate.\n\nfn check_for_duplicates(line: &str) -> Result<bool> {\n\n let words: Vec<&str> = line.split(char::is_whitespace).collect();\n\n let word_count = words.len();\n\n let mut word_set = HashSet::new();\n\n\n\n for word in words {\n\n word_set.insert(word);\n\n }\n\n\n\n Ok(word_count == word_set.len())\n\n}\n\n\n", "file_path": "src/year2017/day04.rs", "rank": 88, "score": 195864.24866166367 }, { "content": "/// Check each passphrase for the same anagram and toss out any that violate.\n\nfn check_for_anagrams(line: &str) -> Result<bool> {\n\n let words: Vec<&str> = line.split(char::is_whitespace).collect();\n\n let word_count = words.len();\n\n let mut word_set = HashSet::new();\n\n\n\n for word in words {\n\n let mut chars: Vec<char> = word.chars().collect();\n\n chars.sort_by(|a, b| b.cmp(a));\n\n let s = String::from_iter(chars);\n\n word_set.insert(s);\n\n }\n\n\n\n Ok(word_count == word_set.len())\n\n}\n\n\n\n#[cfg(test)]\n\nmod one_star {\n\n #[test]\n\n fn solution() {\n\n assert!(super::check_for_duplicates(\"aa bb cc dd\").unwrap_or(false));\n", "file_path": "src/year2017/day04.rs", "rank": 89, "score": 195864.24866166367 }, { "content": "/// Parse the file at `filename` and generate the checksum.\n\npub fn find_solution<T: BufRead>(reader: T, use_div: bool) -> Result<u32> {\n\n let mut checksum = 0;\n\n\n\n for line_result in reader.lines() {\n\n let line = &line_result.unwrap_or_else(|_| \"\".to_string());\n\n if use_div {\n\n checksum += row_evenly_divisible_value(line)?;\n\n } else {\n\n checksum += row_min_max_diff(line)?;\n\n }\n\n }\n\n\n\n Ok(checksum)\n\n}\n\n\n", "file_path": "src/year2017/day02.rs", "rank": 90, "score": 195112.04901745956 }, { "content": "pub fn find_solution<T: BufRead>(reader: T, second_star: bool) -> Result<u32> {\n\n let line_re = Regex::new(r#\"#(\\d+) @ (\\d+),(\\d+): (\\d+)x(\\d+)\"#)?;\n\n let mut rectangles = BTreeMap::new();\n\n\n\n for line in reader.lines().flatten() {\n\n for cap in line_re.captures_iter(&line) {\n\n let id = (&cap[1]).parse::<usize>()?;\n\n let l = (&cap[2]).parse::<usize>()?;\n\n let t = (&cap[3]).parse::<usize>()?;\n\n let w = (&cap[4]).parse::<usize>()?;\n\n let h = (&cap[5]).parse::<usize>()?;\n\n let top_left = Point { x: l, y: t };\n\n let bottom_right = Point { x: l + w - 1, y: t + h - 1 };\n\n let rectangle = Rectangle { top_left, bottom_right };\n\n rectangles.insert(id, rectangle);\n\n }\n\n }\n\n\n\n if second_star {\n\n Ok(find_non_overlaps(&rectangles)? as u32)\n\n } else {\n\n Ok(check_points(&rectangles, 1000, 1000)?)\n\n }\n\n}\n\n\n", "file_path": "src/year2018/day03.rs", "rank": 91, "score": 194135.67249395102 }, { "content": "pub fn find_solution<T: BufRead>(reader: T, second_star: bool) -> Result<u32> {\n\n let line_re = Regex::new(r#\"\\[(\\d{4})-(\\d{2})-(\\d{2}) (\\d{2}):(\\d{2})\\] (.*)\"#)?;\n\n let guard_re = Regex::new(r#\"Guard #(\\d+) begins shift\"#)?;\n\n let mut sorted_events = BTreeMap::new();\n\n\n\n for line in reader.lines().flatten() {\n\n for cap in line_re.captures_iter(&line) {\n\n let y = (&cap[1]).parse::<i32>()?;\n\n let mon = (&cap[2]).parse::<u32>()?;\n\n let d = (&cap[3]).parse::<u32>()?;\n\n let h = (&cap[4]).parse::<u32>()?;\n\n let m = (&cap[5]).parse::<u32>()?;\n\n let rest = &cap[6];\n\n\n\n let dt = Utc.ymd(y, mon, d).and_hms(h, m, 0);\n\n sorted_events.insert(dt, rest.to_string());\n\n }\n\n }\n\n\n\n let mut guards_napping: BTreeMap<u32, BTreeMap<u32, u32>> = BTreeMap::new();\n", "file_path": "src/year2018/day04.rs", "rank": 92, "score": 194135.67249395102 }, { "content": "pub fn find_solution<T: BufRead>(reader: T, second_star: bool) -> Result<u32> {\n\n let mut state_map = BTreeMap::new();\n\n let mut pattern_map = IndexMap::new();\n\n\n\n gen_maps(reader, &mut state_map, &mut pattern_map)?;\n\n let res = if second_star {\n\n let mut sub_total = run_generations(95, &mut state_map, &pattern_map)?;\n\n sub_total += (50_000_000_000 - 95) * 91;\n\n sub_total\n\n } else {\n\n run_generations(20, &mut state_map, &pattern_map)?\n\n };\n\n println!(\"Sum: {}\", res);\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/year2018/day12.rs", "rank": 93, "score": 194135.67249395102 }, { "content": "pub fn find_solution<T: BufRead>(reader: T, second_star: bool) -> Result<u32> {\n\n let line_re = Regex::new(r#\"(\\d+), (\\d+)\"#)?;\n\n let mut coords: Vec<(i32, i32)> = Vec::new();\n\n\n\n for line in reader.lines().flatten() {\n\n for cap in line_re.captures_iter(&line) {\n\n let x = &cap[1].parse::<i32>()?;\n\n let y = &cap[2].parse::<i32>()?;\n\n coords.push((*x, *y));\n\n }\n\n }\n\n\n\n let (max_x, max_y) = max_coords(&coords);\n\n\n\n if second_star {\n\n let d_to_check = if coords.len() == 6 { 32 } else { 10000 };\n\n let mut less_than_d = 0;\n\n for y in 0..=max_y {\n\n for x in 0..=max_x {\n\n let total_of_mds = total_of_mds((x, y), &coords);\n", "file_path": "src/year2018/day06.rs", "rank": 94, "score": 194135.67249395102 }, { "content": "pub fn find_solution<T: BufRead>(reader: T, second_star: bool) -> Result<u32> {\n\n let mut serial_number = 0;\n\n\n\n for line in reader.lines().filter_map(|x| x.ok()) {\n\n serial_number = line.parse::<usize>()?;\n\n }\n\n\n\n let result = submatrix_sum_queries(serial_number, second_star)?;\n\n println!(\"{},{},{} with power level {}\", result.0, result.1, result.2, result.3);\n\n\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/year2018/day11.rs", "rank": 95, "score": 194135.67249395102 }, { "content": "pub fn find_solution<T: BufRead>(reader: T, second_star: bool) -> Result<u32> {\n\n let mut twos = 0;\n\n let mut threes = 0;\n\n let mut all_ids = Vec::new();\n\n\n\n for line in reader.lines().flatten() {\n\n if second_star {\n\n all_ids.push(line);\n\n } else {\n\n let (has_two, has_three) = has_two_or_three(&line);\n\n if has_two {\n\n twos += 1\n\n };\n\n if has_three {\n\n threes += 1\n\n };\n\n }\n\n }\n\n\n\n if second_star {\n\n println!(\"{}\", find_closest(&mut all_ids));\n\n }\n\n\n\n Ok(twos * threes)\n\n}\n\n\n", "file_path": "src/year2018/day02.rs", "rank": 96, "score": 194135.67249395102 }, { "content": "pub fn find_solution<T: BufRead>(reader: T, second_star: bool) -> Result<u32> {\n\n let mut license_vec = Vec::new();\n\n for line in reader.lines().filter_map(|x| x.ok()) {\n\n for tok in line.split(' ').map(|x| x.parse::<u32>()).filter_map(|x| x.ok()) {\n\n license_vec.push(tok);\n\n }\n\n }\n\n\n\n license_vec.reverse();\n\n\n\n recurse(&mut license_vec, second_star)\n\n}\n\n\n", "file_path": "src/year2018/day08.rs", "rank": 97, "score": 194135.67249395102 }, { "content": "pub fn find_solution<T: BufRead>(reader: T, second_star: bool) -> Result<u32> {\n\n if second_star {\n\n let duration = find_duration(reader, false)?;\n\n Ok(duration)\n\n } else {\n\n let order = find_order(reader)?;\n\n println!(\"{}\", order);\n\n Ok(0)\n\n }\n\n}\n\n\n", "file_path": "src/year2018/day07.rs", "rank": 98, "score": 194135.67249395102 }, { "content": "pub fn find_solution<T: BufRead>(reader: T, second_star: bool) -> Result<u32> {\n\n let line_re = Regex::new(r\"(\\d+) players; last marble is worth (\\d+) points\")?;\n\n let mut players = 0;\n\n let mut final_marble = 0;\n\n\n\n for line in reader.lines().filter_map(|x| x.ok()) {\n\n for cap in line_re.captures_iter(&line) {\n\n players = (&cap[1]).parse::<usize>()?;\n\n final_marble = (&cap[2]).parse::<usize>()?;\n\n }\n\n }\n\n\n\n if second_star {\n\n final_marble *= 100;\n\n }\n\n\n\n let result = play_game(players, final_marble)?;\n\n Ok(result as u32)\n\n}\n\n\n", "file_path": "src/year2018/day09.rs", "rank": 99, "score": 194135.67249395102 } ]
Rust
src/endian/write.rs
zaksabeast/no_std_io
092305d45807619ad35a5bb89f3396f562c27f13
use crate::Error; use core::mem; pub trait EndianWrite { fn get_size(&self) -> usize; fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error>; fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error>; } impl EndianWrite for bool { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<bool>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = [*self as u8]; dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<bool>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = [*self as u8]; dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for u8 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u8>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u8>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for i8 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i8>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i8>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for u16 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u16>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u16>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for i16 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i16>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i16>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for u32 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u32>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u32>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for i32 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i32>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i32>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for u64 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u64>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u64>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for i64 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i64>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i64>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl<const SIZE: usize> EndianWrite for [u8; SIZE] { #[inline(always)] fn get_size(&self) -> usize { SIZE } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { if SIZE > dst.len() { return Err(Error::InvalidSize { wanted_size: SIZE, offset: 0, data_len: dst.len(), }); } dst[..SIZE].copy_from_slice(self); Ok(SIZE) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { if SIZE > dst.len() { return Err(Error::InvalidSize { wanted_size: SIZE, offset: 0, data_len: dst.len(), }); } dst[..SIZE].copy_from_slice(self); Ok(SIZE) } } impl EndianWrite for () { fn get_size(&self) -> usize { 0 } fn try_write_le(&self, _dst: &mut [u8]) -> Result<usize, Error> { Ok(0) } fn try_write_be(&self, _dst: &mut [u8]) -> Result<usize, Error> { Ok(0) } }
use crate::Error; use core::mem; pub trait EndianWrite { fn get_size(&self) -> usize; fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error>; fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error>; } impl EndianWrite for bool { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<bool>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = [*self as u8]; dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<bool>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = [*self as u8]; dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for u8 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u8>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u8>(); if byte_count > dst.len() { return Err(Error::InvalidSize {
); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u64>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for i64 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i64>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i64>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl<const SIZE: usize> EndianWrite for [u8; SIZE] { #[inline(always)] fn get_size(&self) -> usize { SIZE } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { if SIZE > dst.len() { return Err(Error::InvalidSize { wanted_size: SIZE, offset: 0, data_len: dst.len(), }); } dst[..SIZE].copy_from_slice(self); Ok(SIZE) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { if SIZE > dst.len() { return Err(Error::InvalidSize { wanted_size: SIZE, offset: 0, data_len: dst.len(), }); } dst[..SIZE].copy_from_slice(self); Ok(SIZE) } } impl EndianWrite for () { fn get_size(&self) -> usize { 0 } fn try_write_le(&self, _dst: &mut [u8]) -> Result<usize, Error> { Ok(0) } fn try_write_be(&self, _dst: &mut [u8]) -> Result<usize, Error> { Ok(0) } }
wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for i8 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i8>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i8>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for u16 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u16>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u16>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for i16 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i16>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i16>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for u32 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u32>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u32>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for i32 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i32>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_le_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } #[inline(always)] fn try_write_be(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<i32>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }); } let bytes = self.to_be_bytes(); dst[..byte_count].copy_from_slice(&bytes); Ok(bytes.len()) } } impl EndianWrite for u64 { #[inline(always)] fn get_size(&self) -> usize { mem::size_of::<Self>() } #[inline(always)] fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> { let byte_count = mem::size_of::<u64>(); if byte_count > dst.len() { return Err(Error::InvalidSize { wanted_size: byte_count, offset: 0, data_len: dst.len(), }
random
[ { "content": "#[test]\n\nfn should_error_if_there_are_not_enough_bytes() {\n\n let bytes = vec![0xaa, 0xbb, 0xcc, 0xdd];\n\n let result = bytes\n\n .read_le::<Test>(0)\n\n .expect_err(\"This should have failed\");\n\n assert_eq!(\n\n result,\n\n Error::InvalidSize {\n\n wanted_size: 4,\n\n offset: 1,\n\n data_len: 4\n\n }\n\n );\n\n}\n\n\n", "file_path": "macros/tests/endian_read.rs", "rank": 0, "score": 86709.12604880343 }, { "content": "#[test]\n\nfn should_error_if_there_are_not_enough_bytes() {\n\n let value = Test {\n\n first: 0xaa,\n\n second: 0xeeddccbb,\n\n };\n\n let mut bytes: [u8; 4] = [0; 4];\n\n let result = bytes\n\n .write_le::<Test>(0, &value)\n\n .expect_err(\"This should have failed\");\n\n\n\n assert_eq!(\n\n result,\n\n Error::InvalidSize {\n\n wanted_size: 4,\n\n offset: 1,\n\n data_len: 4\n\n }\n\n );\n\n}\n\n\n", "file_path": "macros/tests/endian_write.rs", "rank": 1, "score": 86709.12604880343 }, { "content": "/// An interface to safely write values to a source.\n\n///\n\n/// Blanket implementations are provided for byte slices and vectors.\n\n/// Vectors will grow if there isn't enough space. If this isn't desirable, use a slice from a vector as the writer.\n\n///\n\n/// To forward [Writer] methods to containers with vectors, implement both\n\n/// [Writer::get_mut_slice] and [Writer::get_sized_mut_slice] instead of only [Writer::get_mut_slice].\n\npub trait Writer {\n\n /// Returns the data to be read from.\n\n fn get_mut_slice(&mut self) -> &mut [u8];\n\n\n\n /// Returns a slice from the given offset.\n\n /// Returns an empty slice if the offset is greater than the slice size.\n\n #[inline(always)]\n\n fn get_mut_slice_at_offset(&mut self, offset: usize) -> &mut [u8] {\n\n let data = self.get_mut_slice();\n\n\n\n if offset >= data.len() {\n\n return &mut [];\n\n }\n\n\n\n &mut data[offset..]\n\n }\n\n\n\n /// Gets a slice of bytes with a specified length from an offset of a source.\n\n ///\n\n /// An error should be returned if the size is invalid.\n", "file_path": "src/writer.rs", "rank": 2, "score": 85977.36432425684 }, { "content": "/// An interface to safely read values from a source.\n\npub trait Reader {\n\n /// Returns the data to be read from.\n\n fn get_slice(&self) -> &[u8];\n\n\n\n /// Returns a slice from the given offset.\n\n /// Returns an empty slice if the offset is greater than the slice size.\n\n #[inline(always)]\n\n fn get_slice_at_offset(&self, offset: usize) -> &[u8] {\n\n let data = self.get_slice();\n\n\n\n if offset >= data.len() {\n\n return &[];\n\n }\n\n\n\n &data[offset..]\n\n }\n\n\n\n /// Gets a slice of bytes from an offset of a source where `slice.len() == size`.\n\n ///\n\n /// An error should be returned if the size is invalid (e.g. `offset + size` exceeds the available data)\n", "file_path": "src/reader.rs", "rank": 3, "score": 85969.94240955843 }, { "content": "/// An interface for working with cursors by getting and setting an index.\n\npub trait Cursor {\n\n fn get_index(&self) -> usize;\n\n fn set_index(&mut self, index: usize);\n\n\n\n /// Increments the index by the given amount.\n\n #[inline(always)]\n\n fn increment_by(&mut self, count: usize) {\n\n self.set_index(self.get_index() + count);\n\n }\n\n\n\n /// Returns the current index and replaces it with the provided size.\n\n #[inline(always)]\n\n fn swap_incremented_index(&mut self, size: usize) -> usize {\n\n let index = self.get_index();\n\n self.increment_by(size);\n\n index\n\n }\n\n\n\n /// Returns the current index and replaces it\n\n /// with the size of the provided type added to the index.\n", "file_path": "src/stream/cursor.rs", "rank": 4, "score": 82969.29919751236 }, { "content": "#[proc_macro_derive(EndianWrite, attributes(no_std_io))]\n\npub fn impl_endian_write(tokens: TokenStream) -> TokenStream {\n\n endian_write::impl_endian_write(tokens)\n\n}\n", "file_path": "macros/src/lib.rs", "rank": 6, "score": 76043.12587304617 }, { "content": "#[proc_macro_derive(EndianRead, attributes(no_std_io))]\n\npub fn impl_endian_read(tokens: TokenStream) -> TokenStream {\n\n endian_read::impl_endian_read(tokens)\n\n}\n\n\n", "file_path": "macros/src/lib.rs", "rank": 7, "score": 76043.12587304617 }, { "content": "pub fn impl_endian_read(tokens: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(tokens as DeriveInput);\n\n\n\n let named_fields = match input.data {\n\n Data::Struct(DataStruct {\n\n fields: Fields::Named(fields),\n\n ..\n\n }) => fields.named,\n\n _ => panic!(\"Only structs can derive EndianRead\"),\n\n };\n\n\n\n let try_read_le = create_method_impl(\n\n &named_fields,\n\n quote! { try_read_le },\n\n quote! { read_stream_le },\n\n );\n\n\n\n let try_read_be = create_method_impl(\n\n &named_fields,\n\n quote! { try_read_be },\n", "file_path": "macros/src/endian_read.rs", "rank": 8, "score": 73911.73477594253 }, { "content": "pub fn impl_endian_write(tokens: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(tokens as DeriveInput);\n\n\n\n let named_fields = match input.data {\n\n Data::Struct(DataStruct {\n\n fields: Fields::Named(fields),\n\n ..\n\n }) => fields.named,\n\n _ => panic!(\"Only structs can derive EndianWrite\"),\n\n };\n\n\n\n let get_size_fields = named_fields\n\n .iter()\n\n .map(create_get_size_field)\n\n .collect::<Vec<proc_macro2::TokenStream>>();\n\n\n\n let try_write_le = create_write_method_impl(\n\n &named_fields,\n\n quote! { try_write_le },\n\n quote! { write_stream_le },\n", "file_path": "macros/src/endian_write.rs", "rank": 9, "score": 73911.73477594253 }, { "content": "/// Defines a shared interface to read data from a source that is endian specific.\n\n///\n\n/// This should only be used when handling an external data source, such as a remote API or file.\n\n/// Usually you'll want code to be endian agnostic.\n\npub trait EndianRead: Sized {\n\n /// Tries to read the value from its little endian representation.\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error>;\n\n /// Tries to read the value from its big endian representation.\n\n fn try_read_be(bytes: &[u8]) -> Result<ReadOutput<Self>, Error>;\n\n}\n\n\n\nimpl EndianRead for bool {\n\n #[inline(always)]\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n let result = u8::try_read_le(bytes)?;\n\n Ok(ReadOutput {\n\n read_bytes: result.get_read_bytes(),\n\n data: result.into_data() != 0,\n\n })\n\n }\n\n\n\n #[inline(always)]\n\n fn try_read_be(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n let result = u8::try_read_le(bytes)?;\n", "file_path": "src/endian/read.rs", "rank": 10, "score": 73630.74311907878 }, { "content": "/// An interface to write values as a stream.\n\npub trait StreamWriter: Writer + Cursor {\n\n /// Same as [Writer::write], but uses the current stream instead of an offset.\n\n #[inline(always)]\n\n fn write_stream<T: TriviallyTransmutable>(&mut self, value: &T) -> WriterResult<usize> {\n\n let index = self.swap_incremented_index_for_type::<T>();\n\n self.write(index, value)\n\n }\n\n\n\n /// Same as [StreamWriter::write_stream], but does not write if there is not enough space.\n\n #[inline(always)]\n\n fn checked_write_stream<T: TriviallyTransmutable>(&mut self, value: &T) -> usize {\n\n let index = self.swap_incremented_index_for_type::<T>();\n\n self.checked_write(index, value)\n\n }\n\n\n\n /// Same as [Writer::write_le], but uses the current stream instead of an offset.\n\n #[inline(always)]\n\n fn write_stream_le<T: EndianWrite>(&mut self, value: &T) -> WriterResult<usize> {\n\n let index = self.get_index();\n\n let bytes_written = self.write_le(index, value)?;\n", "file_path": "src/stream/writer.rs", "rank": 11, "score": 68214.12907775675 }, { "content": "/// An interface to read values as a stream.\n\npub trait StreamReader: Reader + Cursor + Sized {\n\n /// Same as [Reader::read], but uses the current stream instead of an offset.\n\n #[inline(always)]\n\n fn read_stream<T: TriviallyTransmutable + Default>(&mut self) -> ReaderResult<T> {\n\n let index = self.swap_incremented_index_for_type::<T>();\n\n self.read(index)\n\n }\n\n\n\n /// Same as [StreamReader::read_stream], but returns a default value if the read is invalid.\n\n #[inline(always)]\n\n fn default_read_stream<T: TriviallyTransmutable + Default>(&mut self) -> T {\n\n let index = self.swap_incremented_index_for_type::<T>();\n\n self.default_read(index)\n\n }\n\n\n\n /// Same as [Reader::read_le], but uses the current stream instead of an offset.\n\n #[inline(always)]\n\n fn read_stream_le<T: EndianRead>(&mut self) -> ReaderResult<T> {\n\n let index = self.get_index();\n\n let read_value = self.read_le_with_output(index)?;\n", "file_path": "src/stream/reader.rs", "rank": 12, "score": 63684.37051731418 }, { "content": "fn create_method_impl(\n\n fields: &Punctuated<Field, Comma>,\n\n impl_method: proc_macro2::TokenStream,\n\n field_method: proc_macro2::TokenStream,\n\n) -> proc_macro2::TokenStream {\n\n let field_tokens = fields\n\n .iter()\n\n .map(|field| create_field(field, &field_method))\n\n .collect::<Vec<proc_macro2::TokenStream>>();\n\n let field_idents = fields\n\n .iter()\n\n .map(|field| field.ident.as_ref().expect(\"Field should have identity\"))\n\n .collect::<Vec<&Ident>>();\n\n\n\n quote! {\n\n #[inline(always)]\n\n fn #impl_method(bytes: &[u8]) -> Result<::no_std_io::ReadOutput<Self>, ::no_std_io::Error> {\n\n let mut stream = ::no_std_io::StreamContainer::new(bytes);\n\n #(#field_tokens)*\n\n let result = Self {\n\n #(#field_idents),*\n\n };\n\n let bytes_read = ::no_std_io::Cursor::get_index(&stream);\n\n\n\n Ok(::no_std_io::ReadOutput::new(result, bytes_read))\n\n }\n\n }\n\n}\n\n\n", "file_path": "macros/src/endian_read.rs", "rank": 13, "score": 63217.189912678514 }, { "content": "fn create_write_method_impl(\n\n fields: &Punctuated<Field, Comma>,\n\n impl_method: proc_macro2::TokenStream,\n\n field_method: proc_macro2::TokenStream,\n\n) -> proc_macro2::TokenStream {\n\n let field_tokens = fields\n\n .iter()\n\n .map(|field| create_write_field(field, &field_method))\n\n .collect::<Vec<proc_macro2::TokenStream>>();\n\n\n\n quote! {\n\n #[inline(always)]\n\n fn #impl_method(&self, dst: &mut [u8]) -> Result<usize, ::no_std_io::Error> {\n\n let mut stream = ::no_std_io::StreamContainer::new(dst);\n\n #(#field_tokens)*\n\n let bytes_written = ::no_std_io::Cursor::get_index(&stream);\n\n Ok(bytes_written)\n\n }\n\n }\n\n}\n\n\n", "file_path": "macros/src/endian_write.rs", "rank": 14, "score": 61031.231926971755 }, { "content": "#[test]\n\nfn should_write_be() {\n\n let value = Test {\n\n first: 0xaa,\n\n second: 0xbbccddee,\n\n };\n\n let mut bytes = vec![0; 5];\n\n let result = bytes.write_be(0, &value).expect(\"Write should have worked\");\n\n\n\n assert_eq!(result, 5);\n\n assert_eq!(bytes, [0xaa, 0xbb, 0xcc, 0xdd, 0xee]);\n\n}\n\n\n", "file_path": "macros/tests/endian_write.rs", "rank": 15, "score": 42344.45362962113 }, { "content": "#[test]\n\nfn should_read_be() {\n\n let bytes: [u8; 5] = [0xaa, 0xbb, 0xcc, 0xdd, 0xee];\n\n let result: Test = bytes.read_be(0).expect(\"Read should have worked\");\n\n let expected = Test {\n\n first: 0xaa,\n\n second: 0xbbccddee,\n\n };\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "macros/tests/endian_read.rs", "rank": 16, "score": 42344.45362962113 }, { "content": "#[test]\n\nfn should_write_nested_be() {\n\n let value = TestContainer {\n\n test: Test {\n\n first: 0x00,\n\n second: 0x11223344,\n\n },\n\n list: ListContainer(vec![0xaabbccdd, 0x55667788]),\n\n };\n\n let mut bytes = vec![];\n\n let result = bytes.write_be(0, &value).expect(\"Write should have worked\");\n\n\n\n assert_eq!(result, 14);\n\n assert_eq!(\n\n bytes,\n\n [0x00, 0x11, 0x22, 0x33, 0x44, 0x02, 0xaa, 0xbb, 0xcc, 0xdd, 0x55, 0x66, 0x77, 0x88,]\n\n )\n\n}\n\n\n\nmod padding {\n\n use super::*;\n", "file_path": "macros/tests/endian_write.rs", "rank": 17, "score": 41015.04531094255 }, { "content": "fn create_field(\n\n field: &Field,\n\n field_method: &proc_macro2::TokenStream,\n\n) -> proc_macro2::TokenStream {\n\n let field_ident = field.ident.as_ref().expect(\"Field should have identity\");\n\n let pad_before = match MacroArgs::from_attributes(&field.attrs) {\n\n Some(MacroArgs { pad_before }) => {\n\n quote! { ::no_std_io::Cursor::increment_by(&mut stream, #pad_before); }\n\n }\n\n _ => quote! {},\n\n };\n\n\n\n quote! {\n\n #pad_before\n\n let #field_ident = ::no_std_io::StreamReader::#field_method(&mut stream)?;\n\n }\n\n}\n\n\n", "file_path": "macros/src/endian_read.rs", "rank": 18, "score": 41015.04531094255 }, { "content": "#[test]\n\nfn should_read_le() {\n\n let bytes: [u8; 5] = [0xaa, 0xbb, 0xcc, 0xdd, 0xee];\n\n let result: Test = bytes.read_le(0).expect(\"Read should have worked\");\n\n let expected = Test {\n\n first: 0xaa,\n\n second: 0xeeddccbb,\n\n };\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "macros/tests/endian_read.rs", "rank": 19, "score": 41015.04531094255 }, { "content": "#[test]\n\nfn should_read_nested_be() {\n\n let bytes = vec![\n\n 0x00, 0x11, 0x22, 0x33, 0x44, 0x02, 0xaa, 0xbb, 0xcc, 0xdd, 0x55, 0x66, 0x77, 0x88,\n\n ];\n\n let result: TestContainer = bytes.read_be(0).expect(\"Read should have worked\");\n\n let expected = TestContainer {\n\n test: Test {\n\n first: 0x00,\n\n second: 0x11223344,\n\n },\n\n list: ListContainer(vec![0xaabbccdd, 0x55667788]),\n\n };\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n\nmod padding {\n\n use super::*;\n\n\n\n #[derive(Debug, Default, PartialEq, no_std_io::EndianRead, no_std_io::EndianWrite)]\n", "file_path": "macros/tests/endian_read.rs", "rank": 20, "score": 41015.04531094255 }, { "content": "#[test]\n\nfn should_write_le() {\n\n let value = Test {\n\n first: 0xaa,\n\n second: 0xeeddccbb,\n\n };\n\n let mut bytes = vec![0; 5];\n\n let result = bytes.write_le(0, &value).expect(\"Write should have worked\");\n\n\n\n assert_eq!(result, 5);\n\n assert_eq!(bytes, [0xaa, 0xbb, 0xcc, 0xdd, 0xee]);\n\n}\n\n\n", "file_path": "macros/tests/endian_write.rs", "rank": 21, "score": 41015.04531094255 }, { "content": "#[test]\n\nfn should_read_dynamic_size_be() {\n\n let bytes = vec![0x02, 0x11, 0x22, 0x33, 0x44, 0xaa, 0xbb, 0xcc, 0xdd];\n\n let result: ListContainer<u32> = bytes.read_be(0).expect(\"Read should have worked\");\n\n let expected = ListContainer(vec![0x11223344, 0xaabbccdd]);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "macros/tests/endian_read.rs", "rank": 22, "score": 39816.843995872 }, { "content": "#[test]\n\nfn should_write_dynamic_size_be() {\n\n let value = ListContainer::<u32>(vec![0x11223344, 0xaabbccdd]);\n\n let mut bytes = vec![];\n\n let result = bytes.write_be(0, &value).expect(\"Write should have worked\");\n\n\n\n assert_eq!(result, 9);\n\n assert_eq!(\n\n bytes,\n\n [0x02, 0x11, 0x22, 0x33, 0x44, 0xaa, 0xbb, 0xcc, 0xdd]\n\n );\n\n}\n\n\n", "file_path": "macros/tests/endian_write.rs", "rank": 23, "score": 39816.843995872 }, { "content": "#[test]\n\nfn should_write_nested_le() {\n\n let value = TestContainer {\n\n test: Test {\n\n first: 0x00,\n\n second: 0x44332211,\n\n },\n\n list: ListContainer(vec![0xddccbbaa, 0x88776655]),\n\n };\n\n let mut bytes = vec![];\n\n let result = bytes.write_le(0, &value).expect(\"Write should have worked\");\n\n\n\n assert_eq!(result, 14);\n\n assert_eq!(\n\n bytes,\n\n [0x00, 0x11, 0x22, 0x33, 0x44, 0x02, 0xaa, 0xbb, 0xcc, 0xdd, 0x55, 0x66, 0x77, 0x88,]\n\n )\n\n}\n\n\n", "file_path": "macros/tests/endian_write.rs", "rank": 24, "score": 39816.843995872 }, { "content": "#[test]\n\nfn should_read_nested_le() {\n\n let bytes = vec![\n\n 0x00, 0x11, 0x22, 0x33, 0x44, 0x02, 0xaa, 0xbb, 0xcc, 0xdd, 0x55, 0x66, 0x77, 0x88,\n\n ];\n\n let result: TestContainer = bytes.read_le(0).expect(\"Read should have worked\");\n\n let expected = TestContainer {\n\n test: Test {\n\n first: 0x00,\n\n second: 0x44332211,\n\n },\n\n list: ListContainer(vec![0xddccbbaa, 0x88776655]),\n\n };\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "macros/tests/endian_read.rs", "rank": 25, "score": 39816.843995872 }, { "content": "fn create_write_field(\n\n field: &Field,\n\n field_method: &proc_macro2::TokenStream,\n\n) -> proc_macro2::TokenStream {\n\n let field_ident = field.ident.as_ref().expect(\"Field should have identity\");\n\n let pad_before = match MacroArgs::from_attributes(&field.attrs) {\n\n Some(MacroArgs { pad_before }) => {\n\n quote! { ::no_std_io::Cursor::increment_by(&mut stream, #pad_before); }\n\n }\n\n _ => quote! {},\n\n };\n\n\n\n quote! {\n\n #pad_before\n\n ::no_std_io::StreamWriter::#field_method(&mut stream, &self.#field_ident)?;\n\n }\n\n}\n\n\n", "file_path": "macros/src/endian_write.rs", "rank": 26, "score": 39816.843995872 }, { "content": "#[test]\n\nfn should_read_dynamic_size_le() {\n\n let bytes = vec![0x02, 0x11, 0x22, 0x33, 0x44, 0xaa, 0xbb, 0xcc, 0xdd];\n\n let result: ListContainer<u32> = bytes.read_le(0).expect(\"Read should have worked\");\n\n let expected = ListContainer(vec![0x44332211, 0xddccbbaa]);\n\n\n\n assert_eq!(result, expected);\n\n}\n\n\n", "file_path": "macros/tests/endian_read.rs", "rank": 27, "score": 38731.33878986753 }, { "content": "#[test]\n\nfn should_write_dynamic_size_le() {\n\n let value = ListContainer::<u32>(vec![0x44332211, 0xddccbbaa]);\n\n let mut bytes = vec![];\n\n let result = bytes.write_le(0, &value).expect(\"Write should have worked\");\n\n\n\n assert_eq!(result, 9);\n\n assert_eq!(\n\n bytes,\n\n [0x02, 0x11, 0x22, 0x33, 0x44, 0xaa, 0xbb, 0xcc, 0xdd]\n\n );\n\n}\n\n\n", "file_path": "macros/tests/endian_write.rs", "rank": 28, "score": 38731.33878986753 }, { "content": "use snafu::Snafu;\n\n\n\n#[derive(Debug, PartialEq, Snafu)]\n\npub enum Error {\n\n #[snafu(display(\n\n \"Invalid size: wanted 0x{:x} at offset offset: 0x{:x}, but data length is 0x{:x} \",\n\n wanted_size,\n\n offset,\n\n data_len\n\n ))]\n\n InvalidSize {\n\n wanted_size: usize,\n\n offset: usize,\n\n data_len: usize,\n\n },\n\n #[snafu(display(\n\n \"Invalid alignment: wanted size: {}, source size: {}, source offset: {}\",\n\n wanted_size,\n\n source_size,\n\n source_offset\n", "file_path": "src/error.rs", "rank": 29, "score": 31102.750022269185 }, { "content": " ))]\n\n InvalidAlignment {\n\n wanted_size: usize,\n\n source_size: usize,\n\n source_offset: usize,\n\n },\n\n /// Generic read error message to describe a custom read error by the implementor.\n\n #[snafu(display(\"Invalid read: {}\", message))]\n\n InvalidRead { message: &'static str },\n\n /// Generic write error message to describe a custom write error by the implementor.\n\n #[snafu(display(\"Invalid write: {}\", message))]\n\n InvalidWrite { message: &'static str },\n\n}\n\n\n\n#[inline(always)]\n\npub(crate) fn add_error_context<T>(\n\n error: Result<T, Error>,\n\n offset: usize,\n\n data_len: usize,\n\n) -> Result<T, Error> {\n", "file_path": "src/error.rs", "rank": 30, "score": 31101.219645642108 }, { "content": " error.map_err(|error| match error {\n\n Error::InvalidSize {\n\n wanted_size,\n\n offset: error_offset,\n\n ..\n\n } => Error::InvalidSize {\n\n wanted_size,\n\n offset: offset + error_offset,\n\n data_len,\n\n },\n\n _ => error,\n\n })\n\n}\n", "file_path": "src/error.rs", "rank": 31, "score": 31097.845675162785 }, { "content": "fn create_get_size_field(field: &Field) -> proc_macro2::TokenStream {\n\n let field_ident = field.ident.as_ref().expect(\"Field should have identity\");\n\n let pad_before = match MacroArgs::from_attributes(&field.attrs) {\n\n Some(MacroArgs { pad_before }) => pad_before,\n\n _ => 0,\n\n };\n\n\n\n quote! {\n\n size += #pad_before;\n\n size += ::no_std_io::EndianWrite::get_size(&self.#field_ident);\n\n }\n\n}\n\n\n", "file_path": "macros/src/endian_write.rs", "rank": 32, "score": 26746.986981136215 }, { "content": " }\n\n\n\n let slice = self.get_mut_slice();\n\n Ok(&mut slice[offset..offset_end])\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::Reader;\n\n\n\n pub struct MockWriter {\n\n bytes: [u8; 8],\n\n }\n\n\n\n impl MockWriter {\n\n fn new(bytes: [u8; 8]) -> Self {\n\n Self { bytes }\n\n }\n", "file_path": "src/writer.rs", "rank": 34, "score": 24.426715113232365 }, { "content": " impl EndianWrite for OffsetErrorTest {\n\n fn get_size(&self) -> usize {\n\n 0\n\n }\n\n fn try_write_le(&self, _dst: &mut [u8]) -> Result<usize, Error> {\n\n unimplemented!()\n\n }\n\n fn try_write_be(&self, _dst: &mut [u8]) -> Result<usize, Error> {\n\n Err(Error::InvalidSize {\n\n wanted_size: 8,\n\n offset: 1,\n\n data_len: 0,\n\n })\n\n }\n\n }\n\n\n\n #[test]\n\n fn should_bubble_up_error_offsets_for_vec() {\n\n let value = OffsetErrorTest(0);\n\n let mut bytes = vec![];\n", "file_path": "src/writer.rs", "rank": 35, "score": 24.211734068393387 }, { "content": " #[derive(Debug, PartialEq)]\n\n struct Repeat(u8);\n\n\n\n impl EndianWrite for Repeat {\n\n fn get_size(&self) -> usize {\n\n 3\n\n }\n\n\n\n fn try_write_le(&self, dst: &mut [u8]) -> Result<usize, Error> {\n\n let bytes: [u8; 3] = [self.0, self.0, self.0];\n\n dst[0..3].copy_from_slice(&bytes);\n\n Ok(bytes.len())\n\n }\n\n\n\n fn try_write_be(&self, _dst: &mut [u8]) -> Result<usize, Error> {\n\n unimplemented!()\n\n }\n\n }\n\n\n\n #[test]\n", "file_path": "src/stream/writer.rs", "rank": 36, "score": 22.68769396371942 }, { "content": "}\n\n\n\nimpl<T> StreamWriter for T where T: Writer + Cursor {}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::Reader;\n\n\n\n pub struct MockStream {\n\n bytes: [u8; 8],\n\n index: usize,\n\n }\n\n\n\n impl MockStream {\n\n fn new(bytes: [u8; 8]) -> Self {\n\n Self { bytes, index: 0 }\n\n }\n\n\n\n fn get_bytes(&self) -> [u8; 8] {\n", "file_path": "src/stream/writer.rs", "rank": 38, "score": 21.595254185158446 }, { "content": "\n\n impl EndianWrite for CustomErrorTest {\n\n fn get_size(&self) -> usize {\n\n 0\n\n }\n\n fn try_write_le(&self, _dst: &mut [u8]) -> Result<usize, Error> {\n\n unimplemented!()\n\n }\n\n fn try_write_be(&self, _dst: &mut [u8]) -> Result<usize, Error> {\n\n Err(Error::InvalidRead {\n\n message: \"Custom error!\",\n\n })\n\n }\n\n }\n\n\n\n #[test]\n\n fn should_bubble_up_custom_errors_for_vec() {\n\n let value = CustomErrorTest(0);\n\n let mut bytes = vec![];\n\n let result = bytes.write_be(0, &value).unwrap_err();\n", "file_path": "src/writer.rs", "rank": 43, "score": 21.088929031356418 }, { "content": " #[test]\n\n fn should_bubble_up_custom_errors_for_slice() {\n\n let value = CustomErrorTest(0);\n\n let bytes = &mut [];\n\n let result = bytes.write_le(0, &value).unwrap_err();\n\n let expected = Error::InvalidRead {\n\n message: \"Custom error!\",\n\n };\n\n assert_eq!(result, expected)\n\n }\n\n\n\n #[derive(Debug)]\n\n struct OffsetErrorTest(u32);\n\n\n\n impl EndianWrite for OffsetErrorTest {\n\n fn get_size(&self) -> usize {\n\n 0\n\n }\n\n fn try_write_le(&self, _dst: &mut [u8]) -> Result<usize, Error> {\n\n Err(Error::InvalidSize {\n", "file_path": "src/writer.rs", "rank": 50, "score": 20.310302932887794 }, { "content": " use super::*;\n\n\n\n pub struct MockReader {\n\n bytes: [u8; 8],\n\n }\n\n\n\n impl MockReader {\n\n fn new(bytes: [u8; 8]) -> Self {\n\n Self { bytes }\n\n }\n\n }\n\n\n\n impl Reader for MockReader {\n\n fn get_slice(&self) -> &[u8] {\n\n &self.bytes\n\n }\n\n }\n\n\n\n mod get_slice_of_size {\n\n use super::*;\n", "file_path": "src/reader.rs", "rank": 51, "score": 20.304919395756833 }, { "content": " self.get_sized_mut_slice(offset, length)\n\n }\n\n\n\n /// Writes bytes to an offset and returns the number of bytes written.\n\n ///\n\n /// Errors if the byte slice length will not fit at the offset.\n\n #[inline(always)]\n\n fn write_bytes(&mut self, offset: usize, bytes: &[u8]) -> WriterResult<usize> {\n\n let length = bytes.len();\n\n let slice = self.get_sized_mut_slice(offset, length)?;\n\n\n\n slice.copy_from_slice(bytes);\n\n Ok(length)\n\n }\n\n\n\n /// Same as [Writer::write_bytes], but checks to make sure the bytes can safely be written to the offset.\n\n /// Returns 0 as the write size if the bytes won't fit into the offset.\n\n #[inline(always)]\n\n fn checked_write_bytes(&mut self, offset: usize, bytes: &[u8]) -> usize {\n\n self.write_bytes(offset, bytes).unwrap_or(0)\n", "file_path": "src/writer.rs", "rank": 53, "score": 19.75780630422855 }, { "content": " wanted_size: 8,\n\n offset: 1,\n\n data_len: 0,\n\n })\n\n }\n\n fn try_write_be(&self, _dst: &mut [u8]) -> Result<usize, Error> {\n\n unimplemented!()\n\n }\n\n }\n\n\n\n #[test]\n\n fn should_bubble_up_error_offsets_for_vec() {\n\n let value = OffsetErrorTest(0);\n\n let mut bytes = vec![];\n\n let result = bytes.write_le(2, &value).unwrap_err();\n\n let expected = Error::InvalidSize {\n\n wanted_size: 8,\n\n offset: 3,\n\n data_len: 2,\n\n };\n", "file_path": "src/writer.rs", "rank": 54, "score": 19.648048185514476 }, { "content": "\n\n #[derive(Debug)]\n\n struct OffsetErrorTest(u32);\n\n\n\n impl EndianRead for OffsetErrorTest {\n\n fn try_read_le(_bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n Err(Error::InvalidSize {\n\n wanted_size: 8,\n\n offset: 1,\n\n data_len: 0,\n\n })\n\n }\n\n\n\n fn try_read_be(_bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n unimplemented!()\n\n }\n\n }\n\n\n\n #[test]\n\n fn should_bubble_up_error_offsets() {\n", "file_path": "src/reader.rs", "rank": 60, "score": 18.590674310374077 }, { "content": "use crate::Error;\n\nuse core::{convert::TryInto, mem};\n\n\n\n/// The result of a read, including the value that was\n\n/// read and the number of bytes it consumed.\n\n#[derive(Debug, PartialEq)]\n\npub struct ReadOutput<T: Sized> {\n\n data: T,\n\n read_bytes: usize,\n\n}\n\n\n\nimpl<T: Sized> ReadOutput<T> {\n\n #[inline(always)]\n\n pub fn new(data: T, read_bytes: usize) -> Self {\n\n Self { data, read_bytes }\n\n }\n\n\n\n /// Consumes the read output and returns the inner data.\n\n #[inline(always)]\n\n pub fn into_data(self) -> T {\n", "file_path": "src/endian/read.rs", "rank": 62, "score": 18.386225428349086 }, { "content": " /// This should only be used when reading data from a format or protocol\n\n /// that explicitly defines big endian.\n\n #[inline(always)]\n\n fn write_be<T: EndianWrite>(&mut self, offset: usize, value: &T) -> WriterResult<usize> {\n\n let bytes = self.get_mut_slice_at_offset(offset);\n\n add_error_context(\n\n value.try_write_be(bytes),\n\n offset,\n\n self.get_mut_slice().len(),\n\n )\n\n }\n\n\n\n /// Same as [Writer::write_be], but checks to make sure the bytes can safely be written to the offset.\n\n /// Returns 0 as the write size if the bytes won't fit into the offset.\n\n #[inline(always)]\n\n fn checked_write_be<T: EndianWrite>(&mut self, offset: usize, value: &T) -> usize {\n\n self.write_be(offset, value).unwrap_or(0)\n\n }\n\n}\n\n\n", "file_path": "src/writer.rs", "rank": 63, "score": 18.318900871346678 }, { "content": " index: usize,\n\n }\n\n\n\n impl MockStream {\n\n fn new(bytes: [u8; 8]) -> Self {\n\n Self { bytes, index: 0 }\n\n }\n\n }\n\n\n\n impl Reader for MockStream {\n\n fn get_slice(&self) -> &[u8] {\n\n &self.bytes\n\n }\n\n }\n\n\n\n impl Cursor for MockStream {\n\n fn get_index(&self) -> usize {\n\n self.index\n\n }\n\n\n", "file_path": "src/stream/reader.rs", "rank": 65, "score": 17.757863232624782 }, { "content": " data_len: bytes.len(),\n\n });\n\n }\n\n\n\n Ok(ReadOutput {\n\n data: i64::from_be_bytes(bytes[..byte_count].try_into().unwrap()),\n\n read_bytes: byte_count,\n\n })\n\n }\n\n}\n\n\n\nimpl<const SIZE: usize> EndianRead for [u8; SIZE] {\n\n #[inline(always)]\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n if SIZE > bytes.len() {\n\n return Err(Error::InvalidSize {\n\n wanted_size: SIZE,\n\n offset: 0,\n\n data_len: bytes.len(),\n\n });\n", "file_path": "src/endian/read.rs", "rank": 66, "score": 17.486616609089022 }, { "content": "\n\n fn get_bytes(&self) -> [u8; 8] {\n\n self.bytes.clone()\n\n }\n\n }\n\n\n\n impl Writer for MockWriter {\n\n fn get_mut_slice(&mut self) -> &mut [u8] {\n\n &mut self.bytes\n\n }\n\n }\n\n\n\n impl Reader for MockWriter {\n\n fn get_slice(&self) -> &[u8] {\n\n &self.bytes\n\n }\n\n }\n\n\n\n mod get_sized_mut_slice {\n\n use super::*;\n", "file_path": "src/writer.rs", "rank": 67, "score": 17.10935016658505 }, { "content": " assert_eq!(\n\n error,\n\n Error::InvalidSize {\n\n wanted_size: 4,\n\n offset: 6,\n\n data_len: 8,\n\n }\n\n );\n\n }\n\n\n\n #[derive(Debug)]\n\n struct CustomErrorTest;\n\n\n\n impl EndianRead for CustomErrorTest {\n\n fn try_read_le(_bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n unimplemented!()\n\n }\n\n\n\n fn try_read_be(_bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n Err(Error::InvalidRead {\n", "file_path": "src/reader.rs", "rank": 68, "score": 17.034303440518396 }, { "content": " self.bytes.clone()\n\n }\n\n }\n\n\n\n impl Writer for MockStream {\n\n fn get_mut_slice(&mut self) -> &mut [u8] {\n\n &mut self.bytes\n\n }\n\n }\n\n\n\n impl Reader for MockStream {\n\n fn get_slice(&self) -> &[u8] {\n\n &self.bytes\n\n }\n\n }\n\n\n\n impl Cursor for MockStream {\n\n fn get_index(&self) -> usize {\n\n self.index\n\n }\n", "file_path": "src/stream/writer.rs", "rank": 69, "score": 16.99148978322036 }, { "content": " self.cursor\n\n }\n\n\n\n #[inline(always)]\n\n fn set_index(&mut self, index: usize) {\n\n self.cursor = index;\n\n }\n\n}\n\n\n\nimpl<'a> From<StreamContainer<&'a mut [u8]>> for &'a mut [u8] {\n\n #[inline(always)]\n\n fn from(stream: StreamContainer<&'a mut [u8]>) -> Self {\n\n stream.into_raw()\n\n }\n\n}\n\n\n\nimpl<'a> From<StreamContainer<&'a [u8]>> for &'a [u8] {\n\n #[inline(always)]\n\n fn from(stream: StreamContainer<&'a [u8]>) -> Self {\n\n stream.into_raw()\n", "file_path": "src/stream/container.rs", "rank": 70, "score": 16.284672301550923 }, { "content": " #[inline(always)]\n\n fn write_le<T: EndianWrite>(&mut self, offset: usize, value: &T) -> WriterResult<usize> {\n\n let bytes = self.get_mut_slice_at_offset(offset);\n\n add_error_context(\n\n value.try_write_le(bytes),\n\n offset,\n\n self.get_mut_slice().len(),\n\n )\n\n }\n\n\n\n /// Same as [Writer::write_le], but checks to make sure the bytes can safely be written to the offset.\n\n /// Returns 0 as the write size if the bytes won't fit into the offset.\n\n #[inline(always)]\n\n fn checked_write_le<T: EndianWrite>(&mut self, offset: usize, value: &T) -> usize {\n\n self.write_le(offset, value).unwrap_or(0)\n\n }\n\n\n\n /// Writes a value in its big endian representation.\n\n ///\n\n /// Prefer endian agnostic methods when possible.\n", "file_path": "src/writer.rs", "rank": 71, "score": 16.244799996288556 }, { "content": " Ok(ReadOutput {\n\n read_bytes: result.get_read_bytes(),\n\n data: result.into_data() != 0,\n\n })\n\n }\n\n}\n\n\n\nimpl EndianRead for u8 {\n\n #[inline(always)]\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n let byte_count = mem::size_of::<u8>();\n\n\n\n if byte_count > bytes.len() {\n\n return Err(Error::InvalidSize {\n\n wanted_size: byte_count,\n\n offset: 0,\n\n data_len: bytes.len(),\n\n });\n\n }\n\n\n", "file_path": "src/endian/read.rs", "rank": 72, "score": 16.101996796302867 }, { "content": " Err(Error::InvalidRead {\n\n message: \"Custom error!\",\n\n })\n\n }\n\n fn try_write_be(&self, _dst: &mut [u8]) -> Result<usize, Error> {\n\n unimplemented!()\n\n }\n\n }\n\n\n\n #[test]\n\n fn should_bubble_up_custom_errors_for_vec() {\n\n let value = CustomErrorTest(0);\n\n let mut bytes = vec![];\n\n let result = bytes.write_le(0, &value).unwrap_err();\n\n let expected = Error::InvalidRead {\n\n message: \"Custom error!\",\n\n };\n\n assert_eq!(result, expected)\n\n }\n\n\n", "file_path": "src/writer.rs", "rank": 73, "score": 15.989208204031076 }, { "content": " read_bytes: SIZE,\n\n })\n\n }\n\n}\n\n\n\nimpl EndianRead for () {\n\n fn try_read_le(_bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n Ok(ReadOutput::new((), 0))\n\n }\n\n\n\n fn try_read_be(_bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n Ok(ReadOutput::new((), 0))\n\n }\n\n}\n", "file_path": "src/endian/read.rs", "rank": 74, "score": 15.825081765981405 }, { "content": " /// out vector of the correct size if the read is invalid.\n\n #[cfg(feature = \"alloc\")]\n\n #[inline(always)]\n\n fn default_read_byte_vec(&self, offset: usize, size: usize) -> Vec<u8> {\n\n self.read_byte_vec(offset, size)\n\n .unwrap_or_else(|_| vec![0; size])\n\n }\n\n}\n\n\n\nimpl<const SIZE: usize> Reader for [u8; SIZE] {\n\n #[inline(always)]\n\n fn get_slice(&self) -> &[u8] {\n\n self\n\n }\n\n}\n\n\n\nimpl Reader for &[u8] {\n\n #[inline(always)]\n\n fn get_slice(&self) -> &[u8] {\n\n self\n", "file_path": "src/reader.rs", "rank": 75, "score": 15.749662301437223 }, { "content": " }\n\n\n\n fn try_read_be(_bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n Err(Error::InvalidSize {\n\n wanted_size: 8,\n\n offset: 1,\n\n data_len: 0,\n\n })\n\n }\n\n }\n\n\n\n #[test]\n\n fn should_bubble_up_error_offsets() {\n\n let bytes = vec![];\n\n let result = bytes.read_be::<OffsetErrorTest>(2).unwrap_err();\n\n let expected = Error::InvalidSize {\n\n wanted_size: 8,\n\n offset: 3,\n\n data_len: 0,\n\n };\n", "file_path": "src/reader.rs", "rank": 76, "score": 15.20785567104437 }, { "content": " list.push(item);\n\n }\n\n\n\n let result = ListContainer(list);\n\n let read_bytes = stream.get_index() + 1;\n\n\n\n Ok(ReadOutput::new(result, read_bytes))\n\n }\n\n\n\n #[inline(always)]\n\n fn try_read_be(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n if bytes.is_empty() {\n\n return Err(Error::InvalidSize {\n\n wanted_size: 1,\n\n offset: 0,\n\n data_len: 0,\n\n });\n\n }\n\n\n\n let count = bytes[0] as usize;\n", "file_path": "macros/tests/endian_read.rs", "rank": 77, "score": 15.09565295640286 }, { "content": " message: \"Custom error!\",\n\n })\n\n }\n\n }\n\n\n\n #[test]\n\n fn should_bubble_up_custom_errors() {\n\n let result = vec![].read_be::<CustomErrorTest>(0).unwrap_err();\n\n let expected = Error::InvalidRead {\n\n message: \"Custom error!\",\n\n };\n\n assert_eq!(result, expected)\n\n }\n\n\n\n #[derive(Debug)]\n\n struct OffsetErrorTest(u32);\n\n\n\n impl EndianRead for OffsetErrorTest {\n\n fn try_read_le(_bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n unimplemented!()\n", "file_path": "src/reader.rs", "rank": 78, "score": 14.909592151109083 }, { "content": " #[inline(always)]\n\n fn try_read_be(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n let byte_count = mem::size_of::<i16>();\n\n\n\n if byte_count > bytes.len() {\n\n return Err(Error::InvalidSize {\n\n wanted_size: byte_count,\n\n offset: 0,\n\n data_len: bytes.len(),\n\n });\n\n }\n\n\n\n Ok(ReadOutput {\n\n data: i16::from_be_bytes(bytes[..byte_count].try_into().unwrap()),\n\n read_bytes: byte_count,\n\n })\n\n }\n\n}\n\n\n\nimpl EndianRead for u32 {\n", "file_path": "src/endian/read.rs", "rank": 79, "score": 14.829201782891001 }, { "content": " let self_len = self.len();\n\n\n\n if offset_end > self_len {\n\n self.resize(offset_end, 0);\n\n }\n\n\n\n add_error_context(\n\n value.try_write_be(&mut self[offset..]),\n\n offset,\n\n self.get_mut_slice().len(),\n\n )\n\n }\n\n\n\n #[inline(always)]\n\n fn get_sized_mut_slice(&mut self, offset: usize, length: usize) -> WriterResult<&mut [u8]> {\n\n let offset_end = offset + length;\n\n let self_len = self.len();\n\n\n\n if offset_end > self_len {\n\n self.resize(offset_end, 0);\n", "file_path": "src/writer.rs", "rank": 80, "score": 14.79216238007008 }, { "content": " wanted_size: byte_count,\n\n offset: 0,\n\n data_len: bytes.len(),\n\n });\n\n }\n\n\n\n Ok(ReadOutput {\n\n data: u64::from_be_bytes(bytes[..byte_count].try_into().unwrap()),\n\n read_bytes: byte_count,\n\n })\n\n }\n\n}\n\n\n\nimpl EndianRead for i64 {\n\n #[inline(always)]\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n let byte_count = mem::size_of::<i64>();\n\n\n\n if byte_count > bytes.len() {\n\n return Err(Error::InvalidSize {\n", "file_path": "src/endian/read.rs", "rank": 81, "score": 14.72508265555217 }, { "content": " let byte_count = mem::size_of::<u32>();\n\n\n\n if byte_count > bytes.len() {\n\n return Err(Error::InvalidSize {\n\n wanted_size: byte_count,\n\n offset: 0,\n\n data_len: bytes.len(),\n\n });\n\n }\n\n\n\n Ok(ReadOutput {\n\n data: u32::from_be_bytes(bytes[..byte_count].try_into().unwrap()),\n\n read_bytes: byte_count,\n\n })\n\n }\n\n}\n\n\n\nimpl EndianRead for i32 {\n\n #[inline(always)]\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n", "file_path": "src/endian/read.rs", "rank": 82, "score": 14.72508265555217 }, { "content": " if byte_count > bytes.len() {\n\n return Err(Error::InvalidSize {\n\n wanted_size: byte_count,\n\n offset: 0,\n\n data_len: bytes.len(),\n\n });\n\n }\n\n\n\n Ok(ReadOutput {\n\n data: i32::from_be_bytes(bytes[..byte_count].try_into().unwrap()),\n\n read_bytes: byte_count,\n\n })\n\n }\n\n}\n\n\n\nimpl EndianRead for u64 {\n\n #[inline(always)]\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n let byte_count = mem::size_of::<u64>();\n\n\n", "file_path": "src/endian/read.rs", "rank": 83, "score": 14.72508265555217 }, { "content": "use darling::FromMeta;\n\nuse syn::Attribute;\n\n\n\n#[derive(Debug, FromMeta)]\n\npub struct MacroArgs {\n\n pub pad_before: usize,\n\n}\n\n\n\nimpl MacroArgs {\n\n pub fn from_attribute(attr: &Attribute) -> Option<Self> {\n\n attr.parse_meta()\n\n .ok()\n\n .and_then(|meta| MacroArgs::from_meta(&meta).ok())\n\n }\n\n\n\n pub fn from_attributes(attrs: &[Attribute]) -> Option<Self> {\n\n attrs.iter().find_map(Self::from_attribute)\n\n }\n\n}\n", "file_path": "macros/src/macro_args.rs", "rank": 84, "score": 14.70128760834322 }, { "content": " read_bytes: byte_count,\n\n })\n\n }\n\n}\n\n\n\nimpl EndianRead for i8 {\n\n #[inline(always)]\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n let byte_count = mem::size_of::<i8>();\n\n\n\n if byte_count > bytes.len() {\n\n return Err(Error::InvalidSize {\n\n wanted_size: byte_count,\n\n offset: 0,\n\n data_len: bytes.len(),\n\n });\n\n }\n\n\n\n Ok(ReadOutput {\n\n data: i8::from_le_bytes(bytes[..byte_count].try_into().unwrap()),\n", "file_path": "src/endian/read.rs", "rank": 85, "score": 14.622730467408545 }, { "content": "\n\nimpl EndianRead for i16 {\n\n #[inline(always)]\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n let byte_count = mem::size_of::<i16>();\n\n\n\n if byte_count > bytes.len() {\n\n return Err(Error::InvalidSize {\n\n wanted_size: byte_count,\n\n offset: 0,\n\n data_len: bytes.len(),\n\n });\n\n }\n\n\n\n Ok(ReadOutput {\n\n data: i16::from_le_bytes(bytes[..byte_count].try_into().unwrap()),\n\n read_bytes: byte_count,\n\n })\n\n }\n\n\n", "file_path": "src/endian/read.rs", "rank": 86, "score": 14.622730467408545 }, { "content": " }\n\n}\n\n\n\nimpl EndianRead for u16 {\n\n #[inline(always)]\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n let byte_count = mem::size_of::<u16>();\n\n\n\n if byte_count > bytes.len() {\n\n return Err(Error::InvalidSize {\n\n wanted_size: byte_count,\n\n offset: 0,\n\n data_len: bytes.len(),\n\n });\n\n }\n\n\n\n Ok(ReadOutput {\n\n data: u16::from_le_bytes(bytes[..byte_count].try_into().unwrap()),\n\n read_bytes: byte_count,\n\n })\n", "file_path": "src/endian/read.rs", "rank": 87, "score": 14.622730467408545 }, { "content": " #[inline(always)]\n\n fn get_sized_mut_slice(&mut self, offset: usize, length: usize) -> WriterResult<&mut [u8]> {\n\n let data = self.get_mut_slice();\n\n let offset_end = offset + length;\n\n\n\n if data.len() < offset_end {\n\n return Err(Error::InvalidSize {\n\n wanted_size: length,\n\n data_len: data.len(),\n\n offset,\n\n });\n\n }\n\n\n\n Ok(&mut data[offset..offset_end])\n\n }\n\n\n\n /// Same as [Writer::get_sized_mut_slice], except the length comes from `T.len()`.\n\n #[inline(always)]\n\n fn get_type_sized_mut_slice<T: Sized>(&mut self, offset: usize) -> WriterResult<&mut [u8]> {\n\n let length = mem::size_of::<T>();\n", "file_path": "src/writer.rs", "rank": 88, "score": 14.579608016072285 }, { "content": "\n\n #[test]\n\n fn should_return_error_if_size_is_too_large_for_offset() {\n\n let mut writer = MockWriter::new([1, 2, 3, 4, 5, 6, 7, 8]);\n\n let bytes = [0xaa, 0xbb, 0xcc, 0xdd];\n\n let error = writer\n\n .write_bytes(6, &bytes)\n\n .expect_err(\"Length should have been too large\");\n\n\n\n assert_eq!(\n\n error,\n\n Error::InvalidSize {\n\n wanted_size: 4,\n\n offset: 6,\n\n data_len: 8,\n\n }\n\n );\n\n }\n\n\n\n #[test]\n", "file_path": "src/writer.rs", "rank": 89, "score": 14.524202171291128 }, { "content": " use crate::Error;\n\n use alloc::vec;\n\n\n\n #[test]\n\n fn should_return_a_value() {\n\n let mut reader = MockStream::new([0x11, 0x22, 0x33, 0x44, 0xaa, 0xbb, 0xcc, 0xdd]);\n\n reader.set_index(4);\n\n let value = reader\n\n .read_byte_stream(3)\n\n .expect(\"Read should have been successful.\");\n\n\n\n assert_eq!(value, vec![0xaa, 0xbb, 0xcc]);\n\n }\n\n\n\n #[test]\n\n fn should_return_error_if_size_is_too_large_for_offset() {\n\n let mut reader = MockStream::new([0x11, 0x22, 0x33, 0x44, 0xaa, 0xbb, 0xcc, 0xdd]);\n\n reader.set_index(6);\n\n let error = reader\n\n .read_byte_stream(4)\n", "file_path": "src/stream/reader.rs", "rank": 90, "score": 14.519760590735181 }, { "content": " /// Same as [StreamWriter::write_stream_be], but does not write if there is not enough space.\n\n #[inline(always)]\n\n fn checked_write_stream_be<T: EndianWrite>(&mut self, value: &T) -> usize {\n\n let index = self.swap_incremented_index_for_type::<T>();\n\n self.checked_write_be(index, value)\n\n }\n\n\n\n /// Same as [Writer::write_bytes], but uses the current stream instead of an offset.\n\n #[inline(always)]\n\n fn write_stream_bytes(&mut self, bytes: &[u8]) -> WriterResult<usize> {\n\n let index = self.swap_incremented_index(bytes.len());\n\n self.write_bytes(index, bytes)\n\n }\n\n\n\n /// Same as [Writer::checked_write_bytes], but does not write if there is not enough space.\n\n #[inline(always)]\n\n fn checked_write_stream_bytes(&mut self, bytes: &[u8]) -> usize {\n\n let index = self.swap_incremented_index(bytes.len());\n\n self.checked_write_bytes(index, bytes)\n\n }\n", "file_path": "src/stream/writer.rs", "rank": 91, "score": 14.419858134179892 }, { "content": "\n\n #[inline(always)]\n\n fn get_sized_mut_slice(&mut self, offset: usize, length: usize) -> WriterResult<&mut [u8]> {\n\n self.raw.get_sized_mut_slice(offset, length)\n\n }\n\n\n\n #[inline(always)]\n\n fn write_le<U: EndianWrite>(&mut self, offset: usize, value: &U) -> WriterResult<usize> {\n\n self.raw.write_le(offset, value)\n\n }\n\n\n\n #[inline(always)]\n\n fn write_be<U: EndianWrite>(&mut self, offset: usize, value: &U) -> WriterResult<usize> {\n\n self.raw.write_be(offset, value)\n\n }\n\n}\n\n\n\nimpl<T: Reader> Cursor for StreamContainer<T> {\n\n #[inline(always)]\n\n fn get_index(&self) -> usize {\n", "file_path": "src/stream/container.rs", "rank": 92, "score": 14.40615149767834 }, { "content": " #[inline(always)]\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n let byte_count = mem::size_of::<u32>();\n\n\n\n if byte_count > bytes.len() {\n\n return Err(Error::InvalidSize {\n\n wanted_size: byte_count,\n\n offset: 0,\n\n data_len: bytes.len(),\n\n });\n\n }\n\n\n\n Ok(ReadOutput {\n\n data: u32::from_le_bytes(bytes[..byte_count].try_into().unwrap()),\n\n read_bytes: byte_count,\n\n })\n\n }\n\n\n\n #[inline(always)]\n\n fn try_read_be(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n", "file_path": "src/endian/read.rs", "rank": 93, "score": 14.4017750293293 }, { "content": " .expect_err(\"Length should have been too large\");\n\n\n\n assert_eq!(\n\n error,\n\n Error::InvalidSize {\n\n wanted_size: 4,\n\n offset: 6,\n\n data_len: 8,\n\n }\n\n );\n\n }\n\n }\n\n\n\n mod default_read_byte_stream {\n\n use super::*;\n\n use alloc::vec;\n\n\n\n #[test]\n\n fn should_return_a_value() {\n\n let mut reader = MockStream::new([0x11, 0x22, 0x33, 0x44, 0xaa, 0xbb, 0xcc, 0xdd]);\n", "file_path": "src/stream/reader.rs", "rank": 94, "score": 14.376406537351558 }, { "content": "\n\n impl EndianRead for Sum {\n\n fn try_read_le(bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n let sum = bytes[0].wrapping_add(bytes[1]);\n\n Ok(ReadOutput::new(Sum(sum), 2))\n\n }\n\n\n\n fn try_read_be(_bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n unimplemented!()\n\n }\n\n }\n\n\n\n #[test]\n\n fn should_read_values_with_dynamic_read_lengths() {\n\n let mut reader = MockStream::new([0x11, 0x22, 0xaa, 0xbb, 0x88, 0x99, 0x01, 0x02]);\n\n let value = reader\n\n .read_stream_le::<Sum>()\n\n .expect(\"Read should have been successful.\");\n\n\n\n assert_eq!(value, Sum(0x33));\n", "file_path": "src/stream/reader.rs", "rank": 95, "score": 14.255294875405765 }, { "content": " .write_le(0, &value)\n\n .expect(\"Write should have succeeded\");\n\n\n\n assert_eq!(written_length, 4);\n\n\n\n let result = writer\n\n .read_le::<u32>(0)\n\n .expect(\"Read should have succeeded\");\n\n assert_eq!(result, 0xaabbccddu32);\n\n assert_eq!(writer.len(), 4);\n\n }\n\n\n\n #[derive(Debug)]\n\n struct CustomErrorTest(u32);\n\n\n\n impl EndianWrite for CustomErrorTest {\n\n fn get_size(&self) -> usize {\n\n 0\n\n }\n\n fn try_write_le(&self, _dst: &mut [u8]) -> Result<usize, Error> {\n", "file_path": "src/writer.rs", "rank": 96, "score": 14.23973256508147 }, { "content": " impl EndianRead for CustomErrorTest {\n\n fn try_read_le(_bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n Err(Error::InvalidRead {\n\n message: \"Custom error!\",\n\n })\n\n }\n\n\n\n fn try_read_be(_bytes: &[u8]) -> Result<ReadOutput<Self>, Error> {\n\n unimplemented!()\n\n }\n\n }\n\n\n\n #[test]\n\n fn should_bubble_up_custom_errors() {\n\n let result = vec![].read_le::<CustomErrorTest>(0).unwrap_err();\n\n let expected = Error::InvalidRead {\n\n message: \"Custom error!\",\n\n };\n\n assert_eq!(result, expected)\n\n }\n", "file_path": "src/reader.rs", "rank": 97, "score": 14.198727046306415 }, { "content": "impl<const SIZE: usize> Writer for [u8; SIZE] {\n\n #[inline(always)]\n\n fn get_mut_slice(&mut self) -> &mut [u8] {\n\n self\n\n }\n\n}\n\n\n\nimpl Writer for &mut [u8] {\n\n #[inline(always)]\n\n fn get_mut_slice(&mut self) -> &mut [u8] {\n\n self\n\n }\n\n}\n\n\n\n#[cfg(feature = \"alloc\")]\n\nimpl Writer for Vec<u8> {\n\n #[inline(always)]\n\n fn get_mut_slice(&mut self) -> &mut [u8] {\n\n self.as_mut_slice()\n\n }\n", "file_path": "src/writer.rs", "rank": 98, "score": 14.13693309486042 }, { "content": " fn set_index(&mut self, index: usize) {\n\n self.index = index;\n\n }\n\n }\n\n\n\n mod read_stream {\n\n use super::*;\n\n use crate::Error;\n\n\n\n #[test]\n\n fn should_return_a_value() {\n\n let mut reader = MockStream::new(u64::to_ne_bytes(0x1122334411223344));\n\n let value = reader\n\n .read_stream::<u32>()\n\n .expect(\"Read should have been successful.\");\n\n\n\n assert_eq!(value, 0x11223344);\n\n assert_eq!(reader.get_index(), 4);\n\n }\n\n\n", "file_path": "src/stream/reader.rs", "rank": 99, "score": 14.1364225900454 } ]
Rust
src/flows_node.rs
maidsafe/routing_model
97f11a3dda8a6ff12a4db19b2096867ec9fa2d59
use crate::{ state::JoiningState, utilities::{ GenesisPfxInfo, LocalEvent, Name, ProofRequest, RelocatedInfo, Rpc, TryResult, WaitedEvent, }, }; use unwrap::unwrap; #[derive(Debug, PartialEq)] pub struct JoiningRelocateCandidate<'a>(pub &'a mut JoiningState); impl<'a> JoiningRelocateCandidate<'a> { pub fn start_event_loop(&mut self, relocated_info: RelocatedInfo) { self.0.join_routine.relocated_info = Some(relocated_info); self.connect_or_send_candidate_info(); self.start_refused_timeout(); } pub fn try_next(&mut self, event: WaitedEvent) -> TryResult { let result = match event { WaitedEvent::Rpc(rpc) => self.try_rpc(rpc), WaitedEvent::LocalEvent(local_event) => self.try_local_event(local_event), _ => TryResult::Unhandled, }; if result == TryResult::Unhandled { self.discard(); } TryResult::Handled } fn try_rpc(&mut self, rpc: Rpc) -> TryResult { if !rpc .destination() .map(|name| self.0.action.is_our_name(name)) .unwrap_or(false) { return TryResult::Unhandled; } match rpc { Rpc::NodeApproval(_, info) => { self.exit(info); TryResult::Handled } Rpc::ConnectionInfoResponse { source, .. } => { self.send_candidate_info(source); TryResult::Handled } Rpc::ResourceProof { proof, source, .. } => { self.start_compute_resource_proof(source, proof); TryResult::Handled } Rpc::ResourceProofReceipt { source, .. } => { self.send_next_proof_response(source); TryResult::Handled } _ => TryResult::Unhandled, } } fn try_local_event(&mut self, local_event: LocalEvent) -> TryResult { match local_event { LocalEvent::ResourceProofForElderReady(source) => { self.send_next_proof_response(source); TryResult::Handled } LocalEvent::JoiningTimeoutResendInfo => { self.connect_or_send_candidate_info(); TryResult::Handled } _ => TryResult::Unhandled, } } fn exit(&mut self, info: GenesisPfxInfo) { self.0.join_routine.routine_complete_output = Some(info); } fn discard(&mut self) {} fn send_next_proof_response(&mut self, source: Name) { if let Some(next_part) = self.0.action.get_next_resource_proof_part(source) { self.0 .action .send_resource_proof_response(source, next_part); } } fn send_candidate_info(&mut self, destination: Name) { self.0 .action .send_candidate_info(destination, unwrap!(self.0.join_routine.relocated_info)); } fn connect_or_send_candidate_info(&mut self) { let relocated_info = unwrap!(self.0.join_routine.relocated_info); let (connected, unconnected) = self.0.action.get_connected_and_unconnected(relocated_info); for name in unconnected { self.0.action.send_connection_info_request(name); } for name in connected { self.0.action.send_candidate_info(name, relocated_info); } self.0 .action .schedule_event(LocalEvent::JoiningTimeoutResendInfo); } fn start_refused_timeout(&mut self) { self.0 .action .schedule_event(LocalEvent::JoiningTimeoutProofRefused); } fn start_compute_resource_proof(&mut self, source: Name, proof: ProofRequest) { self.0.action.start_compute_resource_proof(source, proof); } }
use crate::{ state::JoiningState, utilities::{ GenesisPfxInfo, LocalEvent, Name, ProofRequest, RelocatedInfo, Rpc, TryResult, WaitedEvent, }, }; use unwrap::unwrap; #[derive(Debug, PartialEq)] pub struct JoiningRelocateCandidate<'a>(pub &'a mut JoiningState); impl<'a> JoiningRelocateCandidate<'a> { pub fn start_event_loop(&mut self, relocated_info: RelocatedInfo) { self.0.join_routine.relocated_info = Some(relocated_info); self.connect_or_send_candidate_info(); self.start_refused_timeout(); } pub fn try_next(&mut self, event: WaitedEvent) -> TryResult { let result = match event { WaitedEvent::Rpc(rpc) => self.try_rpc(rpc), WaitedEvent::LocalEvent(local_event) => self.try_local_event(local_event), _ => TryResult::Unhandled, }; if result == TryResult::Unhandled { self.discard(); } TryResult::Handled } fn try_rpc(&mut self, rpc: Rpc) -> TryResult { if !rpc .destination() .map(|name| self.0.action.is_our_name(name)) .unwrap_or(false) { return TryResult::Unhandled; } match rpc { Rpc::NodeApproval(_, info) => { self.exit(info); TryResult::Handled } Rpc::ConnectionInfoResponse { source, .. } => { self.send_candidate_info(source); TryResult::Handled } Rpc::ResourceProof { proof, source, .. } => { self.start_compute_resource_proof(source, proof); TryResult::Handled } Rpc::ResourceProofReceipt { source, .. } => { self.send_next_proof_response(source); TryResult::Handled } _ => TryResult::Unhandled, } } fn try_local_event(&mut self, local_event: LocalEvent) -> TryResult { match local_event { LocalEvent::ResourceProofForElderReady(source) => { self.send_next_proof_response(source); TryResult::H
fn exit(&mut self, info: GenesisPfxInfo) { self.0.join_routine.routine_complete_output = Some(info); } fn discard(&mut self) {} fn send_next_proof_response(&mut self, source: Name) { if let Some(next_part) = self.0.action.get_next_resource_proof_part(source) { self.0 .action .send_resource_proof_response(source, next_part); } } fn send_candidate_info(&mut self, destination: Name) { self.0 .action .send_candidate_info(destination, unwrap!(self.0.join_routine.relocated_info)); } fn connect_or_send_candidate_info(&mut self) { let relocated_info = unwrap!(self.0.join_routine.relocated_info); let (connected, unconnected) = self.0.action.get_connected_and_unconnected(relocated_info); for name in unconnected { self.0.action.send_connection_info_request(name); } for name in connected { self.0.action.send_candidate_info(name, relocated_info); } self.0 .action .schedule_event(LocalEvent::JoiningTimeoutResendInfo); } fn start_refused_timeout(&mut self) { self.0 .action .schedule_event(LocalEvent::JoiningTimeoutProofRefused); } fn start_compute_resource_proof(&mut self, source: Name, proof: ProofRequest) { self.0.action.start_compute_resource_proof(source, proof); } }
andled } LocalEvent::JoiningTimeoutResendInfo => { self.connect_or_send_candidate_info(); TryResult::Handled } _ => TryResult::Unhandled, } }
function_block-function_prefixed
[ { "content": "fn process_events(mut state: MemberState, events: &[Event]) -> MemberState {\n\n for event in events.iter().cloned() {\n\n if TryResult::Unhandled == state.try_next(event) {\n\n state.failure_event(event);\n\n }\n\n\n\n if state.failure.is_some() {\n\n break;\n\n }\n\n }\n\n\n\n state\n\n}\n\n\n", "file_path": "src/scenario_tests.rs", "rank": 0, "score": 70473.15102197281 }, { "content": "struct RandomEvents(Vec<Event>);\n\n\n\nimpl RandomEvents {\n\n /// With a 50% probability of skipping the event, try to handle each one in `self`.\n\n fn handle<T: Rng>(&self, member_state: &mut MemberState, rng: &mut T) {\n\n for optional_event in &self.0 {\n\n if rng.gen() {\n\n assert_eq!(TryResult::Handled, member_state.try_next(*optional_event));\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/functional_tests.rs", "rank": 1, "score": 61421.939163968695 }, { "content": "fn get_relocated_info(candidate: Candidate, section_info: SectionInfo) -> RelocatedInfo {\n\n RelocatedInfo {\n\n candidate,\n\n expected_age: candidate.0.age.increment_by_one(),\n\n target_interval_centre: TARGET_INTERVAL_1,\n\n section_info,\n\n }\n\n}\n\n\n\n//////////////////\n\n/// Dst\n\n//////////////////\n\n\n\nmod dst_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn rpc_expect_candidate() {\n\n run_test(\n\n \"\",\n", "file_path": "src/scenario_tests.rs", "rank": 2, "score": 54848.40901606174 }, { "content": "fn arrange_initial_state(state: &MemberState, events: &[Event]) -> MemberState {\n\n let state = process_events(state.clone(), events);\n\n state.action.remove_processed_state();\n\n state\n\n}\n\n\n", "file_path": "src/scenario_tests.rs", "rank": 3, "score": 46754.79900931307 }, { "content": "#[derive(Debug, PartialEq, Default, Clone)]\n\nstruct AssertState {\n\n action_our_events: Vec<Event>,\n\n}\n\n\n", "file_path": "src/scenario_tests.rs", "rank": 4, "score": 31932.98778826998 }, { "content": "fn run_test(\n\n test_name: &str,\n\n start_state: &MemberState,\n\n events: &[Event],\n\n expected_state: &AssertState,\n\n) {\n\n let final_state = process_events(start_state.clone(), &events);\n\n let action = final_state.action.inner();\n\n\n\n let final_state = (\n\n AssertState {\n\n action_our_events: action.our_events,\n\n },\n\n final_state.failure,\n\n );\n\n let expected_state = (expected_state.clone(), None);\n\n\n\n assert_eq!(expected_state, final_state, \"{}\", test_name);\n\n}\n\n\n", "file_path": "src/scenario_tests.rs", "rank": 5, "score": 31717.64758974336 }, { "content": "#[test]\n\nfn relocate_adult_src() {\n\n let mut rng = get_rng();\n\n let nodes = iter::repeat_with(|| rng.gen())\n\n .take(6)\n\n .collect::<Vec<Node>>();\n\n\n\n let action = Action::new(\n\n InnerAction::new_with_our_attributes(rng.gen())\n\n .with_next_target_interval(rng.gen())\n\n .extend_current_nodes_with(&NodeState::default_elder(), &nodes),\n\n );\n\n\n\n // Sort into elders and adults.\n\n let to_become_adults = unwrap!(action.check_elder());\n\n let relocating_node = unwrap!(to_become_adults.changes.choose(&mut rng)).0;\n\n action.mark_elder_change(to_become_adults);\n\n\n\n let mut member_state = MemberState {\n\n action,\n\n ..Default::default()\n", "file_path": "src/functional_tests.rs", "rank": 6, "score": 30324.447536577212 }, { "content": "#[test]\n\nfn relocate_adult_dst() {\n\n let mut rng = get_rng();\n\n\n\n let dst_nodes = iter::repeat_with(|| rng.gen())\n\n .take(6)\n\n .collect::<Vec<Node>>();\n\n\n\n let action = Action::new(\n\n InnerAction::new_with_our_attributes(rng.gen())\n\n .with_next_target_interval(rng.gen())\n\n .extend_current_nodes_with(&NodeState::default_elder(), &dst_nodes),\n\n );\n\n let dst_name = action.our_name();\n\n\n\n // Sort into elders and adults.\n\n let to_become_adults = unwrap!(action.check_elder());\n\n action.mark_elder_change(to_become_adults);\n\n\n\n let mut member_state = MemberState {\n\n action,\n", "file_path": "src/functional_tests.rs", "rank": 7, "score": 30324.447536577212 }, { "content": " ConnectionInfoResponse {\n\n source: Name,\n\n destination: Name,\n\n connection_info: i32,\n\n },\n\n\n\n Merge(SectionInfo),\n\n}\n\n\n\nimpl Rpc {\n\n pub fn to_event(&self) -> Event {\n\n Event::Rpc(*self)\n\n }\n\n\n\n pub fn destination(&self) -> Option<Name> {\n\n match self {\n\n Rpc::RefuseCandidate(_)\n\n | Rpc::RelocateResponse(_)\n\n | Rpc::RelocatedInfo(_)\n\n | Rpc::ExpectCandidate(_)\n", "file_path": "src/utilities.rs", "rank": 8, "score": 29472.853285733454 }, { "content": "\n\n pub fn to_test_event(&self) -> Option<TestEvent> {\n\n match *self {\n\n Event::TestEvent(test_event) => Some(test_event),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum Rpc {\n\n RefuseCandidate(Candidate),\n\n RelocateResponse(RelocatedInfo),\n\n RelocatedInfo(RelocatedInfo),\n\n\n\n ExpectCandidate(Candidate),\n\n\n\n ResourceProof {\n\n candidate: Candidate,\n\n source: Name,\n", "file_path": "src/utilities.rs", "rank": 9, "score": 29468.10269601567 }, { "content": " | Rpc::Merge(_) => None,\n\n\n\n Rpc::NodeApproval(candidate, _)\n\n | Rpc::ResourceProof { candidate, .. }\n\n | Rpc::ResourceProofReceipt { candidate, .. } => Some(candidate.0.name),\n\n\n\n Rpc::ResourceProofResponse { destination, .. }\n\n | Rpc::CandidateInfo(CandidateInfo { destination, .. })\n\n | Rpc::ConnectionInfoRequest { destination, .. }\n\n | Rpc::ConnectionInfoResponse { destination, .. } => Some(*destination),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum ParsecVote {\n\n ExpectCandidate(Candidate),\n\n\n\n Online(Candidate, Candidate),\n\n PurgeCandidate(Candidate),\n", "file_path": "src/utilities.rs", "rank": 10, "score": 29464.8324394878 }, { "content": " proof: ProofRequest,\n\n },\n\n ResourceProofReceipt {\n\n candidate: Candidate,\n\n source: Name,\n\n },\n\n NodeApproval(Candidate, GenesisPfxInfo),\n\n\n\n ResourceProofResponse {\n\n candidate: Candidate,\n\n destination: Name,\n\n proof: Proof,\n\n },\n\n CandidateInfo(CandidateInfo),\n\n\n\n ConnectionInfoRequest {\n\n source: Name,\n\n destination: Name,\n\n connection_info: i32,\n\n },\n", "file_path": "src/utilities.rs", "rank": 11, "score": 29464.390341809605 }, { "content": "\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub struct CandidateInfo {\n\n pub old_public_id: Candidate,\n\n pub new_public_id: Candidate,\n\n pub destination: Name,\n\n pub waiting_candidate_name: Name,\n\n pub valid: bool,\n\n}\n\n\n\n// Event passed to get out of \"Wait for\" state in flow diagram:\n\n// Pass to try_next to the implementations.\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum WaitedEvent {\n\n Rpc(Rpc),\n\n ParsecConsensus(ParsecVote),\n\n LocalEvent(LocalEvent),\n\n}\n\n\n\n// Event allowing to drive the tests and collect output, a superset of WaitedEvent.\n", "file_path": "src/utilities.rs", "rank": 12, "score": 29462.987803374628 }, { "content": "#[derive(Debug, PartialEq, Default, Copy, Clone)]\n\npub struct ProofSource(pub i32);\n\n\n\nimpl ProofSource {\n\n pub fn next_part(&mut self) -> Option<Proof> {\n\n if self.0 > -1 {\n\n self.0 -= 1;\n\n }\n\n\n\n self.resend()\n\n }\n\n\n\n fn resend(self) -> Option<Proof> {\n\n match self.0.cmp(&0) {\n\n Ordering::Greater => Some(Proof::ValidPart),\n\n Ordering::Equal => Some(Proof::ValidEnd),\n\n Ordering::Less => None,\n\n }\n\n }\n\n}\n", "file_path": "src/utilities.rs", "rank": 13, "score": 29461.923646735457 }, { "content": " NodeDetectedOffline(Node),\n\n NodeDetectedBackOnline(Node),\n\n}\n\n\n\nimpl LocalEvent {\n\n pub fn to_event(&self) -> Event {\n\n Event::LocalEvent(*self)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum TestEvent {\n\n SetChurnNeeded(ChurnNeeded),\n\n SetShortestPrefix(Option<Section>),\n\n SetWorkUnitEnoughToRelocate(Node),\n\n SetResourceProof(Name, ProofSource),\n\n}\n\n\n\nimpl TestEvent {\n\n pub fn to_event(self) -> Event {\n", "file_path": "src/utilities.rs", "rank": 14, "score": 29460.214992972185 }, { "content": " Event::TestEvent(self)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum ActionTriggered {\n\n WorkUnitIncremented,\n\n MergeInfoStored(SectionInfo),\n\n OurSectionChanged(SectionInfo),\n\n\n\n CompleteMerge,\n\n CompleteSplit,\n\n\n\n Scheduled(LocalEvent),\n\n\n\n ComputeResourceProofForElder(Name),\n\n\n\n // WaitedEvent that should be handled by a flow but are not.\n\n NotYetImplementedErrorTriggered,\n\n // Unexpected event ignored.\n", "file_path": "src/utilities.rs", "rank": 15, "score": 29459.149834954424 }, { "content": "impl NodeChange {\n\n pub fn to_event(self) -> Event {\n\n Event::NodeChange(self)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord)]\n\npub struct RelocatedInfo {\n\n pub candidate: Candidate,\n\n pub expected_age: Age,\n\n pub target_interval_centre: Name,\n\n pub section_info: SectionInfo,\n\n}\n\n\n\nimpl RelocatedInfo {\n\n #[allow(dead_code)]\n\n pub fn old_public_id(&self) -> Candidate {\n\n self.candidate\n\n }\n\n}\n", "file_path": "src/utilities.rs", "rank": 16, "score": 29458.428202094743 }, { "content": "\n\nimpl State {\n\n pub fn is_relocating(self) -> bool {\n\n self == State::RelocatingAgeIncrease\n\n || self == State::RelocatingHop\n\n || self == State::RelocatingBackOnline\n\n }\n\n\n\n pub fn waiting_candidate_info(self) -> Option<RelocatedInfo> {\n\n match self {\n\n State::WaitingCandidateInfo(info) => Some(info),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn is_not_yet_full_node(self) -> bool {\n\n match self {\n\n State::WaitingCandidateInfo(_) | State::WaitingProofing | State::RelocatingHop => true,\n\n _ => false,\n\n }\n", "file_path": "src/utilities.rs", "rank": 17, "score": 29458.28836495745 }, { "content": "#[derive(Clone, Copy, Default, PartialEq, PartialOrd, Eq, Ord)]\n\npub struct Attributes {\n\n pub age: Age,\n\n pub name: Name,\n\n}\n\n\n\nimpl Debug for Attributes {\n\n fn fmt(&self, formatter: &mut Formatter) -> fmt::Result {\n\n write!(formatter, \"{:?}, {:?}\", self.age, self.name)\n\n }\n\n}\n\n\n\nimpl Distribution<Attributes> for Standard {\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Attributes {\n\n Attributes {\n\n age: rng.gen(),\n\n name: rng.gen(),\n\n }\n\n }\n\n}\n", "file_path": "src/utilities.rs", "rank": 18, "score": 29458.09502587353 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum Event {\n\n Rpc(Rpc),\n\n ParsecConsensus(ParsecVote),\n\n LocalEvent(LocalEvent),\n\n TestEvent(TestEvent),\n\n\n\n NodeChange(NodeChange),\n\n ActionTriggered(ActionTriggered),\n\n}\n\n\n\nimpl Event {\n\n pub fn to_waited_event(&self) -> Option<WaitedEvent> {\n\n match *self {\n\n Event::Rpc(rpc) => Some(WaitedEvent::Rpc(rpc)),\n\n Event::ParsecConsensus(parsec_vote) => Some(WaitedEvent::ParsecConsensus(parsec_vote)),\n\n Event::LocalEvent(local_event) => Some(WaitedEvent::LocalEvent(local_event)),\n\n Event::TestEvent(_) | Event::NodeChange(_) | Event::ActionTriggered(_) => None,\n\n }\n\n }\n", "file_path": "src/utilities.rs", "rank": 19, "score": 29457.875634357824 }, { "content": "impl ParsecVote {\n\n pub fn to_event(&self) -> Event {\n\n Event::ParsecConsensus(*self)\n\n }\n\n\n\n pub fn candidate(&self) -> Option<Candidate> {\n\n match self {\n\n ParsecVote::ExpectCandidate(candidate)\n\n | ParsecVote::Online(candidate, _)\n\n | ParsecVote::PurgeCandidate(candidate)\n\n | ParsecVote::RefuseCandidate(candidate)\n\n | ParsecVote::RelocateResponse(RelocatedInfo { candidate, .. }) => Some(*candidate),\n\n\n\n ParsecVote::CheckResourceProof\n\n | ParsecVote::AddElderNode(_)\n\n | ParsecVote::RemoveElderNode(_)\n\n | ParsecVote::NewSectionInfo(_)\n\n | ParsecVote::WorkUnitIncrement\n\n | ParsecVote::CheckRelocate\n\n | ParsecVote::RelocatedInfo(_)\n", "file_path": "src/utilities.rs", "rank": 20, "score": 29457.768735765905 }, { "content": " | ParsecVote::CheckElder\n\n | ParsecVote::Offline(_)\n\n | ParsecVote::BackOnline(_)\n\n | ParsecVote::NeighbourMerge(_) => None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum LocalEvent {\n\n TimeoutAccept,\n\n CheckResourceProofTimeout,\n\n\n\n TimeoutWorkUnit,\n\n TimeoutCheckRelocate,\n\n\n\n TimeoutCheckElder,\n\n JoiningTimeoutResendInfo,\n\n JoiningTimeoutProofRefused,\n\n ResourceProofForElderReady(Name),\n", "file_path": "src/utilities.rs", "rank": 21, "score": 29457.168842471976 }, { "content": "pub struct ProofRequest {\n\n pub value: i32,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum Proof {\n\n ValidPart,\n\n ValidEnd,\n\n Invalid,\n\n}\n\n\n\nimpl Proof {\n\n pub fn is_valid(self) -> bool {\n\n match self {\n\n Proof::ValidPart | Proof::ValidEnd => true,\n\n Proof::Invalid => false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/utilities.rs", "rank": 22, "score": 29456.374172418404 }, { "content": " UnexpectedEventErrorTriggered,\n\n}\n\n\n\nimpl ActionTriggered {\n\n pub fn to_event(self) -> Event {\n\n Event::ActionTriggered(self)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum TryResult {\n\n Handled,\n\n Unhandled,\n\n}\n", "file_path": "src/utilities.rs", "rank": 23, "score": 29455.679285141647 }, { "content": " fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Name {\n\n Name(rng.gen_range(-9999, 10000))\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Default, PartialEq, PartialOrd, Eq, Ord)]\n\npub struct Age(pub i32);\n\n\n\nimpl Distribution<Age> for Standard {\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Age {\n\n Age(rng.gen_range(5, 101))\n\n }\n\n}\n\n\n\nimpl Age {\n\n pub fn increment_by_one(self) -> Age {\n\n Age(self.0 + 1)\n\n }\n\n}\n\n\n", "file_path": "src/utilities.rs", "rank": 24, "score": 29454.950495536068 }, { "content": "\n\n#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord)]\n\npub struct Candidate(pub Attributes);\n\n\n\nimpl Candidate {\n\n pub fn name(self) -> Name {\n\n self.0.name\n\n }\n\n}\n\n\n\nimpl Distribution<Candidate> for Standard {\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Candidate {\n\n Candidate(rng.gen())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Default, PartialEq, PartialOrd, Eq, Ord)]\n\npub struct Node(pub Attributes);\n\n\n\nimpl Node {\n", "file_path": "src/utilities.rs", "rank": 25, "score": 29454.926657094475 }, { "content": " pub fn name(self) -> Name {\n\n self.0.name\n\n }\n\n}\n\n\n\nimpl Distribution<Node> for Standard {\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Node {\n\n Node(rng.gen())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum NodeChange {\n\n AddWithState(Node, State),\n\n ReplaceWith(Name, Node, State),\n\n State(Node, State),\n\n Remove(Name),\n\n Elder(Node, bool),\n\n}\n\n\n", "file_path": "src/utilities.rs", "rank": 26, "score": 29454.843325641097 }, { "content": "// Copyright 2020 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse rand::{\n\n distributions::{Distribution, Standard},\n\n Rng,\n\n};\n\nuse std::cmp::Ordering;\n\nuse std::fmt::{self, Debug, Formatter};\n\n\n\n#[derive(Debug, Clone, Copy, Default, PartialEq, PartialOrd, Eq, Ord)]\n\npub struct Name(pub i32);\n\n\n\nimpl Distribution<Name> for Standard {\n", "file_path": "src/utilities.rs", "rank": 27, "score": 29454.056218131256 }, { "content": "impl Default for NodeState {\n\n fn default() -> NodeState {\n\n NodeState {\n\n node: Default::default(),\n\n work_units_done: Default::default(),\n\n is_elder: Default::default(),\n\n state: State::Online,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Default, PartialEq, PartialOrd, Ord, Eq)]\n\npub struct Section(pub i32);\n\n\n\n#[derive(Debug, Clone, Copy, Default, PartialEq, PartialOrd, Ord, Eq)]\n\npub struct SectionInfo(pub Section, pub i32 /*contain full membership */);\n\n\n\nimpl Distribution<SectionInfo> for Standard {\n\n fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> SectionInfo {\n\n // Avoid randomly generating default `Section(0)`.\n", "file_path": "src/utilities.rs", "rank": 28, "score": 29453.65760940783 }, { "content": "\n\n#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Eq, Ord)]\n\npub enum State {\n\n // Online ordered first Online node are chosen for elder\n\n Online,\n\n // Relocating an adult that has reached its work unit count\n\n RelocatingAgeIncrease,\n\n // Relocating to a new hop with a shorter section prefix\n\n RelocatingHop,\n\n // Relocating back online node\n\n RelocatingBackOnline,\n\n // Complete relocation, only waiting for info to be processed\n\n Relocated(RelocatedInfo),\n\n // Not a full adult / Not known public id: still wait candidate info / connection\n\n WaitingCandidateInfo(RelocatedInfo),\n\n // Not a full adult: still wait proofing\n\n WaitingProofing,\n\n // When a node that was previous online lost connection\n\n Offline,\n\n}\n", "file_path": "src/utilities.rs", "rank": 29, "score": 29453.06845249031 }, { "content": " SectionInfo(Section(rng.gen_range(1, i32::max_value())), rng.gen())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Default, PartialEq, PartialOrd, Ord, Eq)]\n\npub struct GenesisPfxInfo(pub SectionInfo);\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, PartialOrd, Ord, Eq)]\n\npub enum ChurnNeeded {\n\n Split,\n\n Merge,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct ChangeElder {\n\n pub changes: Vec<(Node, bool)>,\n\n pub new_section: SectionInfo,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n", "file_path": "src/utilities.rs", "rank": 30, "score": 29452.461880380713 }, { "content": " CheckResourceProof,\n\n\n\n AddElderNode(Node),\n\n RemoveElderNode(Node),\n\n NewSectionInfo(SectionInfo),\n\n\n\n WorkUnitIncrement,\n\n CheckRelocate,\n\n RefuseCandidate(Candidate),\n\n RelocateResponse(RelocatedInfo),\n\n RelocatedInfo(RelocatedInfo),\n\n\n\n CheckElder,\n\n\n\n Offline(Node),\n\n BackOnline(Node),\n\n\n\n NeighbourMerge(SectionInfo),\n\n}\n\n\n", "file_path": "src/utilities.rs", "rank": 31, "score": 29451.363575779385 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct NodeState {\n\n pub node: Node,\n\n pub work_units_done: i32,\n\n pub is_elder: bool,\n\n pub state: State,\n\n}\n\n\n\nimpl NodeState {\n\n pub fn default_elder() -> NodeState {\n\n NodeState {\n\n is_elder: true,\n\n ..NodeState::default()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/utilities.rs", "rank": 32, "score": 29449.435841461476 }, { "content": "fn get_rng() -> XorShiftRng {\n\n let env_var_name = \"ROUTING_MODEL_SEED\";\n\n let seed = env::var(env_var_name)\n\n .ok()\n\n .map(|value| {\n\n unwrap!(\n\n value.parse::<u64>(),\n\n \"Env var 'ROUTING_MODEL_SEED={}' is not a valid u64.\",\n\n value\n\n )\n\n })\n\n .unwrap_or_else(rand::random);\n\n println!(\n\n \"To replay this '{}', set env var {}={}\",\n\n unwrap!(thread::current().name()),\n\n env_var_name,\n\n seed\n\n );\n\n XorShiftRng::seed_from_u64(seed)\n\n}\n\n\n", "file_path": "src/functional_tests.rs", "rank": 33, "score": 25871.587081814287 }, { "content": "fn initial_state_young_elders() -> MemberState {\n\n MemberState {\n\n action: Action::new(INNER_ACTION_YOUNG_ELDERS.clone()),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/scenario_tests.rs", "rank": 34, "score": 24865.093951410523 }, { "content": "fn initial_state_old_elders() -> MemberState {\n\n MemberState {\n\n action: Action::new(INNER_ACTION_OLD_ELDERS.clone()),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/scenario_tests.rs", "rank": 35, "score": 24865.093951410523 }, { "content": " Rpc::ConnectionInfoResponse {\n\n source: NAME_110,\n\n destination: OUR_NAME,\n\n connection_info: NAME_110.0,\n\n }\n\n .to_event(),\n\n Rpc::ConnectionInfoResponse {\n\n source: NAME_111,\n\n destination: OUR_NAME,\n\n connection_info: NAME_111.0,\n\n }\n\n .to_event(),\n\n TestEvent::SetResourceProof(NAME_111, ProofSource(2)).to_event(),\n\n LocalEvent::ResourceProofForElderReady(NAME_111).to_event(),\n\n TestEvent::SetResourceProof(NAME_110, ProofSource(2)).to_event(),\n\n LocalEvent::ResourceProofForElderReady(NAME_110).to_event(),\n\n Rpc::ResourceProofReceipt {\n\n candidate: OUR_NODE_CANDIDATE,\n\n source: NAME_111,\n\n }\n", "file_path": "src/scenario_tests.rs", "rank": 37, "score": 26.44642952565736 }, { "content": " self.send_rpc(Rpc::ConnectionInfoRequest {\n\n source,\n\n destination,\n\n connection_info: source.0,\n\n });\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn send_connection_info_response(&self, destination: Name) {\n\n let source = self.our_name();\n\n self.send_rpc(Rpc::ConnectionInfoResponse {\n\n source,\n\n destination,\n\n connection_info: source.0,\n\n });\n\n }\n\n\n\n pub fn send_candidate_info(&self, destination: Name, relocated_info: RelocatedInfo) {\n\n let _ = self.0.borrow_mut().connected.insert(destination);\n\n\n", "file_path": "src/actions.rs", "rank": 38, "score": 25.760472639130988 }, { "content": " self.get_section_elders(info.section_info)\n\n .into_iter()\n\n .map(Node::name)\n\n .partition(|name| self.0.borrow().connected.contains(name))\n\n }\n\n\n\n pub fn get_section_elders(&self, info: SectionInfo) -> Vec<Node> {\n\n unwrap!(self.0.borrow().section_members.get(&info)).clone()\n\n }\n\n\n\n pub fn get_next_resource_proof_part(&self, source: Name) -> Option<Proof> {\n\n self.0\n\n .borrow_mut()\n\n .resource_proofs_for_elder\n\n .get_mut(&source)\n\n .and_then(ProofSource::next_part)\n\n }\n\n\n\n pub fn send_connection_info_request(&self, destination: Name) {\n\n let source = self.our_name();\n", "file_path": "src/actions.rs", "rank": 40, "score": 24.142688952860208 }, { "content": " Rpc::ConnectionInfoRequest {\n\n source: OUR_NAME,\n\n destination: NAME_110,\n\n connection_info: OUR_NAME.0,\n\n }\n\n .to_event(),\n\n Rpc::ConnectionInfoRequest {\n\n source: OUR_NAME,\n\n destination: NAME_111,\n\n connection_info: OUR_NAME.0,\n\n }\n\n .to_event(),\n\n ActionTriggered::Scheduled(LocalEvent::JoiningTimeoutResendInfo).to_event(),\n\n ActionTriggered::Scheduled(LocalEvent::JoiningTimeoutProofRefused).to_event(),\n\n ],\n\n routine_complete_output: None,\n\n },\n\n );\n\n }\n\n\n", "file_path": "src/scenario_tests.rs", "rank": 41, "score": 24.08345796700436 }, { "content": "// Copyright 2020 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::{\n\n state::{MemberState, StartResourceProofState},\n\n utilities::{\n\n Candidate, CandidateInfo, LocalEvent, Name, ParsecVote, Proof, RelocatedInfo, Rpc,\n\n TryResult, WaitedEvent,\n\n },\n\n};\n\nuse unwrap::unwrap;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct RespondToRelocateRequests<'a>(pub &'a mut MemberState);\n", "file_path": "src/flows_dst.rs", "rank": 42, "score": 23.775874029526847 }, { "content": " #[allow(dead_code)]\n\n fn start_event_loop(&mut self) {\n\n self.0\n\n .action\n\n .schedule_event(LocalEvent::CheckResourceProofTimeout);\n\n }\n\n\n\n pub fn try_next(&mut self, event: WaitedEvent) -> TryResult {\n\n match event {\n\n WaitedEvent::Rpc(rpc) => self.try_rpc(rpc),\n\n WaitedEvent::ParsecConsensus(vote) => self.try_consensus(vote),\n\n WaitedEvent::LocalEvent(local_event) => self.try_local_event(local_event),\n\n }\n\n }\n\n\n\n fn try_rpc(&mut self, rpc: Rpc) -> TryResult {\n\n match rpc {\n\n Rpc::ResourceProofResponse {\n\n candidate, proof, ..\n\n } => {\n", "file_path": "src/flows_dst.rs", "rank": 44, "score": 23.602265464685765 }, { "content": " .to_event(),\n\n Rpc::ResourceProofReceipt {\n\n candidate: OUR_NODE_CANDIDATE,\n\n source: NAME_111,\n\n }\n\n .to_event(),\n\n ],\n\n );\n\n\n\n run_joining_test(\n\n \"When connected, resend connection info or candidate info as needed.\",\n\n &initial_state,\n\n &[LocalEvent::JoiningTimeoutResendInfo.to_event()],\n\n &AssertJoiningState {\n\n action_our_events: vec![\n\n Rpc::ConnectionInfoRequest {\n\n source: OUR_NAME,\n\n destination: NAME_109,\n\n connection_info: OUR_NAME.0,\n\n }\n", "file_path": "src/scenario_tests.rs", "rank": 47, "score": 22.84859393523637 }, { "content": " pub fn send_candidate_proof_request(&self, candidate: Candidate) {\n\n let source = self.our_name();\n\n let proof = ProofRequest { value: source.0 };\n\n self.send_rpc(Rpc::ResourceProof {\n\n candidate,\n\n proof,\n\n source,\n\n });\n\n }\n\n\n\n pub fn send_candidate_proof_receipt(&self, candidate: Candidate) {\n\n let source = self.our_name();\n\n self.send_rpc(Rpc::ResourceProofReceipt { candidate, source });\n\n }\n\n\n\n pub fn start_compute_resource_proof(&self, source: Name, _proof: ProofRequest) {\n\n self.action_triggered(ActionTriggered::ComputeResourceProofForElder(source));\n\n }\n\n\n\n pub fn get_connected_and_unconnected(&self, info: RelocatedInfo) -> (Vec<Name>, Vec<Name>) {\n", "file_path": "src/actions.rs", "rank": 48, "score": 22.662717004033283 }, { "content": " let mut initial_state = initial_joining_state_with_dst_200();\n\n initial_state.start(CANDIDATE_RELOCATED_INFO_132);\n\n\n\n let initial_state = arrange_initial_joining_state(&initial_state, &[]);\n\n\n\n run_joining_test(\n\n \"\",\n\n &initial_state,\n\n &[\n\n Rpc::ConnectionInfoResponse {\n\n source: NAME_110,\n\n destination: OUR_NAME,\n\n connection_info: NAME_110.0,\n\n }\n\n .to_event(),\n\n Rpc::ConnectionInfoResponse {\n\n source: NAME_111,\n\n destination: OUR_NAME,\n\n connection_info: NAME_111.0,\n\n }\n", "file_path": "src/scenario_tests.rs", "rank": 49, "score": 22.24077849751282 }, { "content": "// Copyright 2020 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::{\n\n actions::{Action, InnerAction},\n\n state::{JoiningState, MemberState},\n\n utilities::{\n\n ActionTriggered, Age, Attributes, Candidate, CandidateInfo, ChurnNeeded, Event,\n\n GenesisPfxInfo, LocalEvent, Name, Node, NodeChange, NodeState, ParsecVote, Proof,\n\n ProofRequest, ProofSource, RelocatedInfo, Rpc, Section, SectionInfo, State, TestEvent,\n\n TryResult,\n\n },\n\n};\n\nuse lazy_static::lazy_static;\n", "file_path": "src/scenario_tests.rs", "rank": 50, "score": 21.509587280469304 }, { "content": " source: OUR_NAME,\n\n destination: NAME_110,\n\n connection_info: OUR_NAME.0,\n\n }\n\n .to_event(),\n\n Rpc::ConnectionInfoRequest {\n\n source: OUR_NAME,\n\n destination: NAME_111,\n\n connection_info: OUR_NAME.0,\n\n }\n\n .to_event(),\n\n ActionTriggered::Scheduled(LocalEvent::JoiningTimeoutResendInfo).to_event(),\n\n ],\n\n routine_complete_output: None,\n\n },\n\n );\n\n }\n\n\n\n #[test]\n\n fn joining_receive_two_connection_info() {\n", "file_path": "src/scenario_tests.rs", "rank": 51, "score": 21.363076223993357 }, { "content": " fn joining_computed_two_proofs() {\n\n let mut initial_state = initial_joining_state_with_dst_200();\n\n initial_state.start(CANDIDATE_RELOCATED_INFO_132);\n\n let initial_state = arrange_initial_joining_state(&initial_state, &[]);\n\n\n\n run_joining_test(\n\n \"When proof computed, start sending response to correct Elder.\",\n\n &initial_state,\n\n &[\n\n TestEvent::SetResourceProof(NAME_111, ProofSource(2)).to_event(),\n\n LocalEvent::ResourceProofForElderReady(NAME_111).to_event(),\n\n TestEvent::SetResourceProof(NAME_110, ProofSource(2)).to_event(),\n\n LocalEvent::ResourceProofForElderReady(NAME_110).to_event(),\n\n ],\n\n &AssertJoiningState {\n\n action_our_events: vec![\n\n Rpc::ResourceProofResponse {\n\n candidate: OUR_NODE_CANDIDATE,\n\n destination: NAME_111,\n\n proof: Proof::ValidPart,\n", "file_path": "src/scenario_tests.rs", "rank": 52, "score": 21.323203911663782 }, { "content": " TestEvent::SetShortestPrefix(value) => self.0.borrow_mut().shortest_prefix = value,\n\n TestEvent::SetWorkUnitEnoughToRelocate(node) => {\n\n set_enough_work_to_relocate(node.name())\n\n }\n\n TestEvent::SetResourceProof(name, proof) => {\n\n let _ = self\n\n .0\n\n .borrow_mut()\n\n .resource_proofs_for_elder\n\n .insert(name, proof);\n\n }\n\n }\n\n }\n\n\n\n pub fn vote_parsec(&self, vote: ParsecVote) {\n\n self.0.borrow_mut().our_events.push(vote.to_event());\n\n }\n\n\n\n pub fn send_rpc(&self, rpc: Rpc) {\n\n self.0.borrow_mut().our_events.push(rpc.to_event());\n", "file_path": "src/actions.rs", "rank": 53, "score": 21.281297348299432 }, { "content": " let new_public_id = Candidate(self.0.borrow().our_attributes);\n\n self.send_rpc(Rpc::CandidateInfo(CandidateInfo {\n\n old_public_id: relocated_info.candidate,\n\n new_public_id,\n\n destination,\n\n waiting_candidate_name: relocated_info.target_interval_centre,\n\n valid: true,\n\n }));\n\n }\n\n\n\n pub fn send_resource_proof_response(&self, destination: Name, proof: Proof) {\n\n let candidate = Candidate(self.0.borrow().our_attributes);\n\n self.send_rpc(Rpc::ResourceProofResponse {\n\n candidate,\n\n destination,\n\n proof,\n\n });\n\n }\n\n\n\n pub fn send_merge_rpc(&self) {\n", "file_path": "src/actions.rs", "rank": 54, "score": 21.141984531460672 }, { "content": "\n\nimpl<'a> RespondToRelocateRequests<'a> {\n\n pub fn try_next(&mut self, event: WaitedEvent) -> TryResult {\n\n match event {\n\n WaitedEvent::Rpc(rpc) => self.try_rpc(rpc),\n\n WaitedEvent::ParsecConsensus(vote) => self.try_consensus(vote),\n\n _ => TryResult::Unhandled,\n\n }\n\n }\n\n\n\n fn try_rpc(&mut self, rpc: Rpc) -> TryResult {\n\n match rpc {\n\n Rpc::ExpectCandidate(candidate) => {\n\n self.vote_parsec_expect_candidate(candidate);\n\n TryResult::Handled\n\n }\n\n _ => TryResult::Unhandled,\n\n }\n\n }\n\n\n", "file_path": "src/flows_dst.rs", "rank": 55, "score": 20.964473834158067 }, { "content": "\n\n fn check_sibling_merge_info(&self) {\n\n if self.0.action.has_sibling_merge_info() {\n\n let new_section = self.0.action.merge_sibling_info_to_new_section();\n\n self.0\n\n .action\n\n .vote_parsec(ParsecVote::NewSectionInfo(new_section));\n\n }\n\n }\n\n\n\n pub fn try_next(&mut self, event: WaitedEvent) -> TryResult {\n\n match event {\n\n WaitedEvent::ParsecConsensus(vote) => self.try_consensus(vote),\n\n WaitedEvent::Rpc(_) | WaitedEvent::LocalEvent(_) => TryResult::Unhandled,\n\n }\n\n }\n\n\n\n fn try_consensus(&mut self, vote: ParsecVote) -> TryResult {\n\n match vote {\n\n ParsecVote::NewSectionInfo(_) => {\n", "file_path": "src/flows_elder.rs", "rank": 56, "score": 20.85056850102928 }, { "content": " self.rpc_proof(candidate, proof);\n\n TryResult::Handled\n\n }\n\n Rpc::CandidateInfo(info) => {\n\n self.rpc_info(info);\n\n TryResult::Handled\n\n }\n\n _ => TryResult::Unhandled,\n\n }\n\n }\n\n\n\n fn try_consensus(&mut self, vote: ParsecVote) -> TryResult {\n\n let for_candidate = self.has_candidate() && vote.candidate() == Some(self.candidate());\n\n\n\n match vote {\n\n ParsecVote::CheckResourceProof => {\n\n self.set_resource_proof_candidate();\n\n self.check_request_resource_proof();\n\n TryResult::Handled\n\n }\n", "file_path": "src/flows_dst.rs", "rank": 57, "score": 20.258205757248515 }, { "content": " /// Joining Relocate Node\n\n //////////////////\n\n\n\n #[test]\n\n fn joining_start() {\n\n let mut initial_state = initial_joining_state_with_dst_200();\n\n initial_state.start(CANDIDATE_RELOCATED_INFO_132);\n\n\n\n run_joining_test(\n\n \"\",\n\n &initial_state,\n\n &[],\n\n &AssertJoiningState {\n\n action_our_events: vec![\n\n Rpc::ConnectionInfoRequest {\n\n source: OUR_NAME,\n\n destination: NAME_109,\n\n connection_info: OUR_NAME.0,\n\n }\n\n .to_event(),\n", "file_path": "src/scenario_tests.rs", "rank": 58, "score": 20.232401501194605 }, { "content": " &initial_state,\n\n &[Rpc::ResourceProofResponse {\n\n candidate: CANDIDATE_1,\n\n destination: OUR_NAME,\n\n proof: Proof::ValidEnd,\n\n }\n\n .to_event()],\n\n &AssertState {\n\n action_our_events: vec![\n\n ParsecVote::Online(CANDIDATE_1_OLD, CANDIDATE_1).to_event(),\n\n Rpc::ResourceProofReceipt {\n\n candidate: CANDIDATE_1,\n\n source: OUR_NAME,\n\n }\n\n .to_event(),\n\n ],\n\n },\n\n );\n\n }\n\n\n", "file_path": "src/scenario_tests.rs", "rank": 59, "score": 20.1064375734303 }, { "content": " // Proving node:\n\n pub resource_proofs_for_elder: BTreeMap<Name, ProofSource>,\n\n}\n\n\n\nimpl InnerAction {\n\n pub fn new_with_our_attributes(name: Attributes) -> Self {\n\n Self {\n\n our_attributes: name,\n\n our_section: Default::default(),\n\n our_current_nodes: Default::default(),\n\n\n\n our_events: Default::default(),\n\n\n\n shortest_prefix: Default::default(),\n\n section_members: Default::default(),\n\n next_target_interval: Name(0),\n\n\n\n merge_infos: Default::default(),\n\n churn_needed: Default::default(),\n\n\n", "file_path": "src/actions.rs", "rank": 60, "score": 19.872548277858932 }, { "content": " }\n\n\n\n pub fn try_next(&mut self, event: WaitedEvent) -> TryResult {\n\n match event {\n\n WaitedEvent::LocalEvent(local_event) => self.try_local_event(local_event),\n\n WaitedEvent::Rpc(rpc) => self.try_rpc(rpc),\n\n WaitedEvent::ParsecConsensus(vote) => self.try_consensus(vote),\n\n }\n\n }\n\n\n\n fn try_local_event(&mut self, local_event: LocalEvent) -> TryResult {\n\n match local_event {\n\n LocalEvent::TimeoutCheckRelocate => {\n\n self.vote_parsec_check_relocate();\n\n self.start_check_relocate_timeout();\n\n TryResult::Handled\n\n }\n\n _ => TryResult::Unhandled,\n\n }\n\n }\n", "file_path": "src/flows_src.rs", "rank": 61, "score": 19.856884390972724 }, { "content": "// Copyright 2020 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::utilities::{\n\n ActionTriggered, Attributes, Candidate, CandidateInfo, ChangeElder, ChurnNeeded, Event,\n\n GenesisPfxInfo, LocalEvent, Name, Node, NodeChange, NodeState, ParsecVote, Proof, ProofRequest,\n\n ProofSource, RelocatedInfo, Rpc, Section, SectionInfo, State, TestEvent,\n\n};\n\nuse itertools::Itertools;\n\nuse std::{\n\n cell::RefCell,\n\n collections::{BTreeMap, BTreeSet},\n\n fmt::{self, Debug, Formatter},\n\n rc::Rc,\n", "file_path": "src/actions.rs", "rank": 63, "score": 19.81915831831189 }, { "content": " #[test]\n\n fn joining_resend_timeout() {\n\n let mut initial_state = initial_joining_state_with_dst_200();\n\n initial_state.start(CANDIDATE_RELOCATED_INFO_132);\n\n\n\n let initial_state = arrange_initial_joining_state(&initial_state, &[]);\n\n\n\n run_joining_test(\n\n \"When not yet connected, resend CandidateInfo.\",\n\n &initial_state,\n\n &[LocalEvent::JoiningTimeoutResendInfo.to_event()],\n\n &AssertJoiningState {\n\n action_our_events: vec![\n\n Rpc::ConnectionInfoRequest {\n\n source: OUR_NAME,\n\n destination: NAME_109,\n\n connection_info: OUR_NAME.0,\n\n }\n\n .to_event(),\n\n Rpc::ConnectionInfoRequest {\n", "file_path": "src/scenario_tests.rs", "rank": 64, "score": 19.79103863760532 }, { "content": " ],\n\n );\n\n\n\n run_test(\n\n \"Start resource proofing candidate: Send RPC.\",\n\n &initial_state,\n\n &[CANDIDATE_INFO_VALID_RPC_1.to_event()],\n\n &AssertState {\n\n action_our_events: vec![Rpc::ResourceProof {\n\n candidate: CANDIDATE_1,\n\n source: OUR_NAME,\n\n proof: OUR_PROOF_REQUEST,\n\n }\n\n .to_event()],\n\n },\n\n );\n\n }\n\n\n\n #[test]\n\n fn parsec_expect_candidate_then_candidate_info_twice() {\n", "file_path": "src/scenario_tests.rs", "rank": 65, "score": 19.609959016410333 }, { "content": "\n\n let optional_after_check_resource_proof = RandomEvents(vec![Rpc::ResourceProofResponse {\n\n candidate: new_public_id,\n\n destination: dst_name,\n\n proof: Proof::ValidPart,\n\n }\n\n .to_event()]);\n\n\n\n for (i, required_event) in required_events.iter().enumerate() {\n\n assert_eq!(TryResult::Handled, member_state.try_next(*required_event));\n\n optional_any_time.handle(&mut member_state, &mut rng);\n\n if i > 0 {\n\n optional_after_expect_candidate.handle(&mut member_state, &mut rng);\n\n }\n\n if i > 2 {\n\n optional_after_check_resource_proof.handle(&mut member_state, &mut rng);\n\n }\n\n }\n\n\n\n assert!(member_state\n", "file_path": "src/functional_tests.rs", "rank": 66, "score": 19.511195838444298 }, { "content": " &[\n\n Rpc::ResourceProof {\n\n candidate: OUR_NODE_CANDIDATE,\n\n source: NAME_111,\n\n proof: ProofRequest { value: NAME_111.0 },\n\n }\n\n .to_event(),\n\n TestEvent::SetResourceProof(NAME_111, ProofSource(2)).to_event(),\n\n LocalEvent::ResourceProofForElderReady(NAME_111).to_event(),\n\n Rpc::ResourceProofReceipt {\n\n candidate: OUR_NODE_CANDIDATE,\n\n source: NAME_111,\n\n }\n\n .to_event(),\n\n ],\n\n );\n\n\n\n run_joining_test(\n\n \"On receiving receipt for end, do not send anymore.\",\n\n &initial_state,\n", "file_path": "src/scenario_tests.rs", "rank": 67, "score": 19.13528926186884 }, { "content": " }\n\n .to_event(),\n\n Rpc::ResourceProofResponse {\n\n candidate: OUR_NODE_CANDIDATE,\n\n destination: NAME_110,\n\n proof: Proof::ValidPart,\n\n }\n\n .to_event(),\n\n ],\n\n routine_complete_output: None,\n\n },\n\n );\n\n }\n\n\n\n #[test]\n\n fn joining_got_part_proof_receipt() {\n\n let mut initial_state = initial_joining_state_with_dst_200();\n\n initial_state.start(CANDIDATE_RELOCATED_INFO_132);\n\n\n\n let initial_state = arrange_initial_joining_state(\n", "file_path": "src/scenario_tests.rs", "rank": 68, "score": 18.91415063155522 }, { "content": " &initial_state,\n\n &[\n\n Rpc::ResourceProof {\n\n candidate: OUR_NODE_CANDIDATE,\n\n source: NAME_111,\n\n proof: ProofRequest { value: NAME_111.0 },\n\n }\n\n .to_event(),\n\n TestEvent::SetResourceProof(NAME_111, ProofSource(2)).to_event(),\n\n LocalEvent::ResourceProofForElderReady(NAME_111).to_event(),\n\n ],\n\n );\n\n\n\n run_joining_test(\n\n \"On receiving receipt, send the next part (end) of the proof to that Elder.\",\n\n &initial_state,\n\n &[Rpc::ResourceProofReceipt {\n\n candidate: OUR_NODE_CANDIDATE,\n\n source: NAME_111,\n\n }\n", "file_path": "src/scenario_tests.rs", "rank": 69, "score": 18.839332777112872 }, { "content": " #[test]\n\n fn rpc_unexpected_candidate_info_resource_proof_response() {\n\n let description = \"Get unexpected RPC CandidateInfo and ResourceProofResponse. \\\n\n Candidate RPC may arrive after candidate was purged or accepted\";\n\n run_test(\n\n description,\n\n &initial_state_old_elders(),\n\n &[\n\n CANDIDATE_INFO_VALID_RPC_1.to_event(),\n\n Rpc::ResourceProofResponse {\n\n candidate: CANDIDATE_1,\n\n destination: OUR_NAME,\n\n proof: Proof::ValidEnd,\n\n }\n\n .to_event(),\n\n ],\n\n &AssertState::default(),\n\n );\n\n }\n\n\n", "file_path": "src/scenario_tests.rs", "rank": 70, "score": 18.821018839554444 }, { "content": "pub struct ProcessSplit<'a>(pub &'a mut MemberState);\n\n\n\nimpl<'a> ProcessSplit<'a> {\n\n pub fn start_event_loop(&mut self) {\n\n self.routine_state_mut().is_active = true;\n\n self.vote_for_split_sections();\n\n }\n\n\n\n fn exit_event_loop(&mut self) {\n\n self.routine_state_mut().is_active = false;\n\n self.0\n\n .as_start_merge_split_and_change_elders()\n\n .transition_exit_process_split()\n\n }\n\n\n\n pub fn try_next(&mut self, event: WaitedEvent) -> TryResult {\n\n match event {\n\n WaitedEvent::ParsecConsensus(vote) => self.try_consensus(&vote),\n\n WaitedEvent::Rpc(_) | WaitedEvent::LocalEvent(_) => TryResult::Unhandled,\n\n }\n", "file_path": "src/flows_elder.rs", "rank": 71, "score": 18.774779326221136 }, { "content": " let initial_state = arrange_initial_state(\n\n &initial_state_old_elders(),\n\n &[\n\n ParsecVote::ExpectCandidate(CANDIDATE_1_OLD).to_event(),\n\n ParsecVote::CheckResourceProof.to_event(),\n\n CANDIDATE_INFO_VALID_RPC_1.to_event(),\n\n ],\n\n );\n\n\n\n run_test(\n\n \"Start resource proofing candidate: Send same RPC again.\",\n\n &initial_state,\n\n &[CANDIDATE_INFO_VALID_RPC_1.to_event()],\n\n &AssertState {\n\n action_our_events: vec![Rpc::ResourceProof {\n\n candidate: CANDIDATE_1,\n\n source: OUR_NAME,\n\n proof: OUR_PROOF_REQUEST,\n\n }\n\n .to_event()],\n", "file_path": "src/scenario_tests.rs", "rank": 72, "score": 18.66883276620834 }, { "content": "impl<'a> StartMergeSplitAndChangeElders<'a> {\n\n // TODO - remove the `allow` once we have a test for this method.\n\n #[allow(dead_code)]\n\n fn start_event_loop(&mut self) {\n\n self.start_check_elder_timeout()\n\n }\n\n\n\n pub fn try_next(&mut self, event: WaitedEvent) -> TryResult {\n\n match event {\n\n WaitedEvent::ParsecConsensus(vote) => self.try_consensus(&vote),\n\n WaitedEvent::Rpc(rpc) => self.try_rpc(rpc),\n\n WaitedEvent::LocalEvent(LocalEvent::TimeoutCheckElder) => {\n\n self.vote_parsec_check_elder();\n\n TryResult::Handled\n\n }\n\n _ => TryResult::Unhandled,\n\n }\n\n }\n\n\n\n fn try_consensus(&mut self, vote: &ParsecVote) -> TryResult {\n", "file_path": "src/flows_elder.rs", "rank": 73, "score": 18.65719617731992 }, { "content": " ParsecVote::CheckResourceProof.to_event(),\n\n ParsecVote::Online(old_public_id, new_public_id).to_event(),\n\n ParsecVote::CheckElder.to_event(),\n\n ];\n\n\n\n let optional_any_time = RandomEvents(vec![\n\n ParsecVote::WorkUnitIncrement.to_event(),\n\n ParsecVote::CheckRelocate.to_event(),\n\n Rpc::ExpectCandidate(old_public_id).to_event(),\n\n ]);\n\n\n\n let optional_after_expect_candidate = RandomEvents(vec![\n\n Rpc::CandidateInfo(candidate_info).to_event(),\n\n Rpc::ConnectionInfoResponse {\n\n source: rng.gen(),\n\n destination: dst_name,\n\n connection_info: rng.gen(),\n\n }\n\n .to_event(),\n\n ]);\n", "file_path": "src/functional_tests.rs", "rank": 74, "score": 18.630848954955788 }, { "content": " LocalEvent::TimeoutAccept => {\n\n self.vote_parsec_purge_candidate();\n\n TryResult::Handled\n\n }\n\n LocalEvent::CheckResourceProofTimeout => {\n\n self.vote_parsec_check_resource_proof();\n\n TryResult::Handled\n\n }\n\n _ => TryResult::Unhandled,\n\n }\n\n }\n\n\n\n fn rpc_info(&mut self, info: CandidateInfo) {\n\n if self.has_candidate()\n\n && self.candidate() == info.old_public_id\n\n && self.0.action.is_valid_waited_info(info)\n\n {\n\n self.cache_candidate_info_and_send_resource_proof(info)\n\n } else {\n\n self.discard()\n", "file_path": "src/flows_dst.rs", "rank": 75, "score": 18.58117622998124 }, { "content": "// Copyright 2020 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::{\n\n state::{MemberState, ProcessElderChangeState, ProcessSplitState},\n\n utilities::{\n\n ChangeElder, LocalEvent, Node, ParsecVote, Rpc, SectionInfo, TryResult, WaitedEvent,\n\n },\n\n};\n\nuse unwrap::unwrap;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct StartMergeSplitAndChangeElders<'a>(pub &'a mut MemberState);\n\n\n", "file_path": "src/flows_elder.rs", "rank": 76, "score": 18.439366778916856 }, { "content": " .to_event()],\n\n &AssertJoiningState {\n\n action_our_events: vec![Rpc::ResourceProofResponse {\n\n candidate: OUR_NODE_CANDIDATE,\n\n destination: NAME_111,\n\n proof: Proof::ValidEnd,\n\n }\n\n .to_event()],\n\n routine_complete_output: None,\n\n },\n\n );\n\n }\n\n\n\n #[test]\n\n fn joining_got_end_proof_receipt() {\n\n let mut initial_state = initial_joining_state_with_dst_200();\n\n initial_state.start(CANDIDATE_RELOCATED_INFO_132);\n\n\n\n let initial_state = arrange_initial_joining_state(\n\n &initial_state,\n", "file_path": "src/scenario_tests.rs", "rank": 77, "score": 18.314218335407883 }, { "content": " &[Rpc::ResourceProofResponse {\n\n candidate: CANDIDATE_1,\n\n destination: OUR_NAME,\n\n proof: Proof::ValidEnd,\n\n }\n\n .to_event()],\n\n &AssertState::default(),\n\n );\n\n }\n\n\n\n #[test]\n\n fn parsec_expect_candidate_then_candidate_info_then_invalid_proof() {\n\n let initial_state = arrange_initial_state(\n\n &initial_state_old_elders(),\n\n &[\n\n ParsecVote::ExpectCandidate(CANDIDATE_1_OLD).to_event(),\n\n ParsecVote::CheckResourceProof.to_event(),\n\n CANDIDATE_INFO_VALID_RPC_1.to_event(),\n\n ],\n\n );\n", "file_path": "src/scenario_tests.rs", "rank": 78, "score": 18.245358528427925 }, { "content": "// Copyright 2020 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::{\n\n state::{MemberState, StartRelocateSrcState},\n\n utilities::{Candidate, LocalEvent, ParsecVote, RelocatedInfo, Rpc, TryResult, WaitedEvent},\n\n};\n\nuse unwrap::unwrap;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct StartDecidesOnNodeToRelocate<'a>(pub &'a mut MemberState);\n\n\n\nimpl<'a> StartDecidesOnNodeToRelocate<'a> {\n\n // TODO - remove the `allow` once we have a test for this method.\n", "file_path": "src/flows_src.rs", "rank": 79, "score": 18.06770847205224 }, { "content": " #[allow(dead_code)]\n\n fn start_event_loop(&mut self) {\n\n self.start_work_unit_timeout()\n\n }\n\n\n\n pub fn try_next(&mut self, event: WaitedEvent) -> TryResult {\n\n match event {\n\n WaitedEvent::LocalEvent(local_event) => self.try_local_event(local_event),\n\n WaitedEvent::ParsecConsensus(vote) => self.try_consensus(vote),\n\n\n\n WaitedEvent::Rpc(_) => TryResult::Unhandled,\n\n }\n\n }\n\n\n\n fn try_local_event(&mut self, local_event: LocalEvent) -> TryResult {\n\n match local_event {\n\n LocalEvent::TimeoutWorkUnit => {\n\n self.vote_parsec_work_unit_increment();\n\n self.start_work_unit_timeout();\n\n TryResult::Handled\n", "file_path": "src/flows_src.rs", "rank": 80, "score": 18.06541623939316 }, { "content": " &initial_state_old_elders(),\n\n &[\n\n ParsecVote::ExpectCandidate(CANDIDATE_1_OLD).to_event(),\n\n ParsecVote::CheckResourceProof.to_event(),\n\n CANDIDATE_INFO_VALID_RPC_1.to_event(),\n\n ],\n\n );\n\n\n\n run_test(\n\n \"Respond to proof from current candidate with receipt.\",\n\n &initial_state,\n\n &[Rpc::ResourceProofResponse {\n\n candidate: CANDIDATE_1,\n\n destination: OUR_NAME,\n\n proof: Proof::ValidPart,\n\n }\n\n .to_event()],\n\n &AssertState {\n\n action_our_events: vec![Rpc::ResourceProofReceipt {\n\n candidate: CANDIDATE_1,\n", "file_path": "src/scenario_tests.rs", "rank": 81, "score": 17.854921253442942 }, { "content": " ParsecVote::CheckResourceProof.to_event(),\n\n CANDIDATE_INFO_VALID_RPC_1.to_event(),\n\n ],\n\n );\n\n\n\n run_test(\n\n \"Discard final proof from a candidate that is not the current one.\",\n\n &initial_state,\n\n &[Rpc::ResourceProofResponse {\n\n candidate: CANDIDATE_2,\n\n destination: OUR_NAME,\n\n proof: Proof::ValidEnd,\n\n }\n\n .to_event()],\n\n &AssertState::default(),\n\n );\n\n }\n\n\n\n #[test]\n\n fn parsec_expect_candidate_then_purge_and_online_for_wrong_candidate() {\n", "file_path": "src/scenario_tests.rs", "rank": 82, "score": 17.81554509333013 }, { "content": " match vote {\n\n ParsecVote::NeighbourMerge(merge_info) => {\n\n self.store_merge_infos(*merge_info);\n\n TryResult::Handled\n\n }\n\n ParsecVote::CheckElder => {\n\n self.check_merge();\n\n TryResult::Handled\n\n }\n\n _ => TryResult::Unhandled,\n\n }\n\n }\n\n\n\n fn try_rpc(&mut self, rpc: Rpc) -> TryResult {\n\n match rpc {\n\n Rpc::Merge(section_info) => {\n\n self.vote_parsec_neighbour_merge(section_info);\n\n TryResult::Handled\n\n }\n\n\n", "file_path": "src/flows_elder.rs", "rank": 83, "score": 17.781652303505037 }, { "content": " #[test]\n\n fn parsec_expect_candidate_then_candidate_info_then_end_proof_twice() {\n\n let initial_state = arrange_initial_state(\n\n &initial_state_old_elders(),\n\n &[\n\n ParsecVote::ExpectCandidate(CANDIDATE_1_OLD).to_event(),\n\n ParsecVote::CheckResourceProof.to_event(),\n\n CANDIDATE_INFO_VALID_RPC_1.to_event(),\n\n Rpc::ResourceProofResponse {\n\n candidate: CANDIDATE_1,\n\n destination: OUR_NAME,\n\n proof: Proof::ValidEnd,\n\n }\n\n .to_event(),\n\n ],\n\n );\n\n\n\n run_test(\n\n \"Discard further ResourceProofResponse once voted online.\",\n\n &initial_state,\n", "file_path": "src/scenario_tests.rs", "rank": 84, "score": 17.667297071031918 }, { "content": " fn try_consensus(&mut self, vote: ParsecVote) -> TryResult {\n\n match vote {\n\n ParsecVote::ExpectCandidate(candidate) => {\n\n self.consensused_expect_candidate(candidate);\n\n TryResult::Handled\n\n }\n\n\n\n // Delegate to other event loops\n\n _ => TryResult::Unhandled,\n\n }\n\n }\n\n\n\n fn consensused_expect_candidate(&mut self, candidate: Candidate) {\n\n if self.0.action.check_shortest_prefix().is_some() {\n\n self.send_expect_candidate_rpc(candidate);\n\n return;\n\n }\n\n\n\n if let Some(info) = self.0.action.get_waiting_candidate_info(candidate) {\n\n self.resend_relocate_response_rpc(info);\n", "file_path": "src/flows_dst.rs", "rank": 85, "score": 17.495591846761005 }, { "content": "\n\n fn try_rpc(&mut self, rpc: Rpc) -> TryResult {\n\n match rpc {\n\n Rpc::RefuseCandidate(candidate) => {\n\n self.vote_parsec_refuse_candidate(candidate);\n\n TryResult::Handled\n\n }\n\n Rpc::RelocateResponse(info) => {\n\n self.vote_parsec_relocation_response(info);\n\n TryResult::Handled\n\n }\n\n _ => TryResult::Unhandled,\n\n }\n\n }\n\n\n\n fn try_consensus(&mut self, vote: ParsecVote) -> TryResult {\n\n match vote {\n\n ParsecVote::CheckRelocate => {\n\n self.check_need_relocate();\n\n self.update_wait_and_allow_resend();\n", "file_path": "src/flows_src.rs", "rank": 86, "score": 17.490764026910767 }, { "content": " &[Rpc::ResourceProofReceipt {\n\n candidate: OUR_NODE_CANDIDATE,\n\n source: NAME_111,\n\n }\n\n .to_event()],\n\n &AssertJoiningState {\n\n action_our_events: vec![],\n\n routine_complete_output: None,\n\n },\n\n );\n\n }\n\n\n\n #[test]\n\n fn joining_resend_timeout_one_proof_completed_one_in_progress() {\n\n let mut initial_state = initial_joining_state_with_dst_200();\n\n initial_state.start(CANDIDATE_RELOCATED_INFO_132);\n\n\n\n let initial_state = arrange_initial_joining_state(\n\n &initial_state,\n\n &[\n", "file_path": "src/scenario_tests.rs", "rank": 87, "score": 17.42084826946385 }, { "content": " source: OUR_NAME,\n\n }\n\n .to_event()],\n\n },\n\n );\n\n }\n\n\n\n #[test]\n\n fn parsec_expect_candidate_then_candidate_info_then_end_proof() {\n\n let initial_state = arrange_initial_state(\n\n &initial_state_old_elders(),\n\n &[\n\n ParsecVote::ExpectCandidate(CANDIDATE_1_OLD).to_event(),\n\n ParsecVote::CheckResourceProof.to_event(),\n\n CANDIDATE_INFO_VALID_RPC_1.to_event(),\n\n ],\n\n );\n\n\n\n run_test(\n\n \"Vote candidate online when receiving the end of the proof and respond with receipt.\",\n", "file_path": "src/scenario_tests.rs", "rank": 88, "score": 17.33803084848153 }, { "content": "// Copyright 2020 MaidSafe.net limited.\n\n//\n\n// This SAFE Network Software is licensed to you under the MIT license <LICENSE-MIT\n\n// http://opensource.org/licenses/MIT> or the Modified BSD license <LICENSE-BSD\n\n// https://opensource.org/licenses/BSD-3-Clause>, at your option. This file may not be copied,\n\n// modified, or distributed except according to those terms. Please review the Licences for the\n\n// specific language governing permissions and limitations relating to use of the SAFE Network\n\n// Software.\n\n\n\nuse crate::{\n\n actions::{Action, InnerAction},\n\n state::MemberState,\n\n utilities::{\n\n Candidate, CandidateInfo, Event, Node, NodeState, ParsecVote, Proof, RelocatedInfo, Rpc,\n\n State, TestEvent, TryResult,\n\n },\n\n};\n\nuse rand::{self, seq::SliceRandom, Rng, SeedableRng};\n\nuse rand_xorshift::XorShiftRng;\n\nuse std::{env, iter, thread};\n\nuse unwrap::unwrap;\n\n\n", "file_path": "src/functional_tests.rs", "rank": 89, "score": 16.98801717640239 }, { "content": " }\n\n .to_event(),\n\n Rpc::ResourceProof {\n\n candidate: OUR_NODE_CANDIDATE,\n\n source: NAME_110,\n\n proof: ProofRequest { value: NAME_111.0 },\n\n }\n\n .to_event(),\n\n ],\n\n &AssertJoiningState {\n\n action_our_events: vec![\n\n ActionTriggered::ComputeResourceProofForElder(NAME_111).to_event(),\n\n ActionTriggered::ComputeResourceProofForElder(NAME_110).to_event(),\n\n ],\n\n routine_complete_output: None,\n\n },\n\n );\n\n }\n\n\n\n #[test]\n", "file_path": "src/scenario_tests.rs", "rank": 90, "score": 16.962840908180738 }, { "content": " }\n\n\n\n pub fn split_needed(&self) -> bool {\n\n self.0\n\n .borrow()\n\n .churn_needed\n\n .map_or(false, |v| v == ChurnNeeded::Split)\n\n }\n\n\n\n pub fn complete_merge(&self) {\n\n self.0.borrow_mut().complete_merge()\n\n }\n\n\n\n pub fn has_sibling_merge_info(&self) -> bool {\n\n match self.0.borrow().merge_infos {\n\n Some(merge_info) => {\n\n let our_section = self.our_section().0;\n\n let their_section = merge_info.0;\n\n // Currently Section.0 is a just a (signed) number representing a name, as such we\n\n // simply use the arithmetic distance to determine sibling status.\n", "file_path": "src/actions.rs", "rank": 91, "score": 16.869733969867276 }, { "content": " .to_event(),\n\n ],\n\n &AssertJoiningState {\n\n action_our_events: vec![\n\n Rpc::CandidateInfo(CandidateInfo {\n\n old_public_id: OUR_NODE_CANDIDATE_OLD,\n\n new_public_id: OUR_NODE_CANDIDATE,\n\n destination: NAME_110,\n\n waiting_candidate_name: TARGET_INTERVAL_1,\n\n valid: true,\n\n })\n\n .to_event(),\n\n Rpc::CandidateInfo(CandidateInfo {\n\n old_public_id: OUR_NODE_CANDIDATE_OLD,\n\n new_public_id: OUR_NODE_CANDIDATE,\n\n destination: NAME_111,\n\n waiting_candidate_name: TARGET_INTERVAL_1,\n\n valid: true,\n\n })\n\n .to_event(),\n", "file_path": "src/scenario_tests.rs", "rank": 92, "score": 16.760695987364052 }, { "content": "\n\n pub fn remove_processed_state(&self) {\n\n let inner = &mut self.0.borrow_mut();\n\n inner.our_events.clear();\n\n }\n\n\n\n pub fn process_test_events(&self, event: TestEvent) {\n\n let set_enough_work_to_relocate = |name: Name| {\n\n let _ = self\n\n .0\n\n .borrow_mut()\n\n .our_current_nodes\n\n .get_mut(&name)\n\n .map(|state| state.work_units_done = state.node.0.age.0);\n\n };\n\n\n\n match event {\n\n TestEvent::SetChurnNeeded(churn_needed) => {\n\n self.0.borrow_mut().churn_needed = Some(churn_needed)\n\n }\n", "file_path": "src/actions.rs", "rank": 93, "score": 16.73336527559251 }, { "content": " .to_event(),\n\n Rpc::CandidateInfo(CandidateInfo {\n\n old_public_id: OUR_NODE_CANDIDATE_OLD,\n\n new_public_id: OUR_NODE_CANDIDATE,\n\n destination: NAME_110,\n\n waiting_candidate_name: TARGET_INTERVAL_1,\n\n valid: true,\n\n })\n\n .to_event(),\n\n Rpc::CandidateInfo(CandidateInfo {\n\n old_public_id: OUR_NODE_CANDIDATE_OLD,\n\n new_public_id: OUR_NODE_CANDIDATE,\n\n destination: NAME_111,\n\n waiting_candidate_name: TARGET_INTERVAL_1,\n\n valid: true,\n\n })\n\n .to_event(),\n\n ActionTriggered::Scheduled(LocalEvent::JoiningTimeoutResendInfo).to_event(),\n\n ],\n\n routine_complete_output: None,\n", "file_path": "src/scenario_tests.rs", "rank": 94, "score": 16.61673730371223 }, { "content": " ],\n\n routine_complete_output: None,\n\n },\n\n );\n\n }\n\n\n\n #[test]\n\n fn joining_receive_two_resource_proof() {\n\n let mut initial_state = initial_joining_state_with_dst_200();\n\n initial_state.start(CANDIDATE_RELOCATED_INFO_132);\n\n let initial_state = arrange_initial_joining_state(&initial_state, &[]);\n\n\n\n run_joining_test(\n\n \"Start computing resource proof when receiving ResourceProof RPC and setup timers.\",\n\n &initial_state,\n\n &[\n\n Rpc::ResourceProof {\n\n candidate: OUR_NODE_CANDIDATE,\n\n source: NAME_111,\n\n proof: ProofRequest { value: NAME_111.0 },\n", "file_path": "src/scenario_tests.rs", "rank": 95, "score": 16.47824282237776 }, { "content": " ];\n\n\n\n let optional_random_events = RandomEvents(vec![\n\n ParsecVote::WorkUnitIncrement.to_event(),\n\n ParsecVote::CheckRelocate.to_event(),\n\n Rpc::RelocateResponse(relocated_info).to_event(),\n\n ]);\n\n\n\n for required_event in &required_events {\n\n assert_eq!(TryResult::Handled, member_state.try_next(*required_event));\n\n optional_random_events.handle(&mut member_state, &mut rng);\n\n }\n\n\n\n assert_eq!(\n\n State::Relocated(relocated_info),\n\n unwrap!(member_state.action.node_state(relocating_node.0.name)).state\n\n );\n\n\n\n assert_eq!(\n\n TryResult::Handled,\n", "file_path": "src/functional_tests.rs", "rank": 96, "score": 16.436063835854178 }, { "content": "\n\n run_test(\n\n \"Discard invalid proofs.\",\n\n &initial_state,\n\n &[Rpc::ResourceProofResponse {\n\n candidate: CANDIDATE_1,\n\n destination: OUR_NAME,\n\n proof: Proof::Invalid,\n\n }\n\n .to_event()],\n\n &AssertState::default(),\n\n );\n\n }\n\n\n\n #[test]\n\n fn parsec_expect_candidate_then_candidate_info_then_end_proof_wrong_candidate() {\n\n let initial_state = arrange_initial_state(\n\n &initial_state_old_elders(),\n\n &[\n\n ParsecVote::ExpectCandidate(CANDIDATE_1_OLD).to_event(),\n", "file_path": "src/scenario_tests.rs", "rank": 97, "score": 16.258283475574398 }, { "content": " self.0.action.purge_node_info(self.waiting_candidate_name());\n\n self.finish_resource_proof()\n\n }\n\n\n\n fn finish_resource_proof(&mut self) {\n\n self.routine_state_mut().candidate = None;\n\n self.routine_state_mut().candidate_info = None;\n\n self.routine_state_mut().voted_online = false;\n\n\n\n self.0\n\n .action\n\n .schedule_event(LocalEvent::CheckResourceProofTimeout);\n\n }\n\n\n\n fn check_request_resource_proof(&mut self) {\n\n if self.has_candidate() {\n\n self.schedule_proof_timeout()\n\n } else {\n\n self.finish_resource_proof()\n\n }\n", "file_path": "src/flows_dst.rs", "rank": 98, "score": 16.1497482673272 }, { "content": " }\n\n\n\n fn schedule_proof_timeout(&mut self) {\n\n self.0.action.schedule_event(LocalEvent::TimeoutAccept);\n\n }\n\n\n\n fn send_resource_proof_receipt_rpc(&mut self) {\n\n self.0\n\n .action\n\n .send_candidate_proof_receipt(self.new_candidate());\n\n }\n\n\n\n fn candidate(&self) -> Candidate {\n\n unwrap!(self.routine_state().candidate).1\n\n }\n\n\n\n fn waiting_candidate_name(&self) -> Name {\n\n unwrap!(self.routine_state().candidate).0\n\n }\n\n\n", "file_path": "src/flows_dst.rs", "rank": 99, "score": 16.11019130503104 } ]
Rust
src/adapters/twitter.rs
w3f/polkadot-registrar-bot
3c5aa36cf5de8edae0ac434947eb585b7ca92c75
use crate::adapters::Adapter; use crate::primitives::{ExternalMessage, ExternalMessageType, MessageId, Timestamp}; use crate::Result; use hmac::{Hmac, Mac}; use rand::{thread_rng, Rng}; use reqwest::header::{self, HeaderValue}; use reqwest::{Client, Request}; use serde::de::DeserializeOwned; use serde::Serialize; use sha1::Sha1; use std::collections::{HashMap, HashSet}; use std::convert::{TryFrom, TryInto}; use std::time::{SystemTime, UNIX_EPOCH}; use std::{cmp::Ordering, hash::Hash}; #[derive(Clone, Debug, Eq, PartialEq)] pub struct ReceivedMessageContext { sender: TwitterId, id: u64, message: String, } #[derive(Debug, Clone, Eq, Hash, PartialEq, Serialize, Deserialize)] pub struct TwitterId(u64); impl TwitterId { pub fn as_u64(&self) -> u64 { self.0 } } impl Ord for TwitterId { fn cmp(&self, other: &Self) -> Ordering { self.0.cmp(&other.0) } } impl PartialOrd for TwitterId { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl From<u64> for TwitterId { fn from(val: u64) -> Self { TwitterId(val) } } impl TryFrom<String> for TwitterId { type Error = anyhow::Error; fn try_from(val: String) -> Result<Self> { Ok(TwitterId(val.parse::<u64>()?)) } } pub struct TwitterBuilder { consumer_key: Option<String>, consumer_secret: Option<String>, token: Option<String>, token_secret: Option<String>, } impl TwitterBuilder { pub fn new() -> Self { TwitterBuilder { consumer_key: None, consumer_secret: None, token: None, token_secret: None, } } pub fn consumer_key(mut self, key: String) -> Self { self.consumer_key = Some(key); self } pub fn consumer_secret(mut self, key: String) -> Self { self.consumer_secret = Some(key); self } pub fn token(mut self, token: String) -> Self { self.token = Some(token); self } pub fn token_secret(mut self, secret: String) -> Self { self.token_secret = Some(secret); self } pub fn build(self) -> Result<TwitterClient> { Ok(TwitterClient { client: Client::new(), consumer_key: self .consumer_key .ok_or_else(|| anyhow!("consumer key name not specified"))?, consumer_secret: self .consumer_secret .ok_or_else(|| anyhow!("consumer secret name not specified"))?, token: self.token.ok_or_else(|| anyhow!("token not specified"))?, token_secret: self .token_secret .ok_or_else(|| anyhow!("token secret not specified"))?, twitter_ids: HashMap::new(), cache: HashSet::new(), }) } } fn gen_nonce() -> String { let random: [u8; 16] = thread_rng().gen(); hex::encode(random) } fn gen_timestamp() -> u64 { let start = SystemTime::now(); start .duration_since(UNIX_EPOCH) .expect("Time went backwards") .as_secs() } #[derive(Clone)] pub struct TwitterClient { client: Client, consumer_key: String, consumer_secret: String, token: String, token_secret: String, twitter_ids: HashMap<TwitterId, String>, cache: HashSet<MessageId>, } impl TwitterClient { async fn request_messages(&mut self) -> Result<Vec<ExternalMessage>> { debug!("Requesting Twitter messages"); let mut messages = self .get_request::<ApiMessageRequest>( "https://api.twitter.com/1.1/direct_messages/events/list.json", None, ) .await? .parse()?; messages.retain(|message| !self.cache.contains(&message.id.into())); if messages.is_empty() { debug!("No new Twitter messages found"); return Ok(vec![]); } else { debug!("Fetched {} message(-s)", messages.len()); } #[rustfmt::skip] let mut to_lookup: Vec<&TwitterId> = messages .iter() .filter(|message| { !self.twitter_ids.contains_key(&message.sender) }) .map(|message| &message.sender) .collect(); to_lookup.sort(); to_lookup.dedup(); debug!("Looking up Twitter Ids"); if !to_lookup.is_empty() { let lookup_results = self.lookup_twitter_id(Some(&to_lookup), None).await?; self.twitter_ids.extend(lookup_results); } let mut parsed_messages = vec![]; for message in messages { let sender = self .twitter_ids .get(&message.sender) .ok_or_else(|| anyhow!("Failed to find Twitter handle based on Id"))? .clone(); let id = message.id.into(); parsed_messages.push(ExternalMessage { origin: ExternalMessageType::Twitter(sender), id, timestamp: Timestamp::now(), values: vec![message.message.into()], }); self.cache.insert(id); } Ok(parsed_messages) } fn authenticate_request( &self, url: &str, request: &mut Request, params: Option<&[(&str, &str)]>, ) -> Result<()> { use urlencoding::encode; let nonce = gen_nonce(); let timestamp = gen_timestamp().to_string(); let mut fields = vec![ ("oauth_consumer_key", self.consumer_key.as_str()), ("oauth_nonce", nonce.as_str()), ("oauth_signature_method", "HMAC-SHA1"), ("oauth_timestamp", &timestamp), ("oauth_token", self.token.as_str()), ("oauth_version", "1.0"), ]; if let Some(params) = params { fields.append(&mut params.to_vec()); } fields.sort_by(|(a, _), (b, _)| a.cmp(b)); let mut params = String::new(); for (name, val) in &fields { params.push_str(&format!("{}={}&", encode(name), encode(val))); } params.pop(); let base = format!("GET&{}&{}", encode(url), encode(&params)); let sign_key = format!( "{}&{}", encode(&self.consumer_secret), encode(&self.token_secret) ); let mut mac: Hmac<Sha1> = Hmac::new_from_slice(sign_key.as_bytes()).unwrap(); mac.update(base.as_bytes()); let sig = base64::encode(mac.finalize().into_bytes()); fields.push(("oauth_signature", &sig)); fields.sort_by(|(a, _), (b, _)| a.cmp(b)); let mut oauth_header = String::new(); oauth_header.push_str("OAuth "); for (name, val) in &fields { oauth_header.push_str(&format!("{}={}, ", encode(name), encode(val))) } oauth_header.pop(); oauth_header.pop(); request .headers_mut() .insert(header::AUTHORIZATION, HeaderValue::from_str(&oauth_header)?); Ok(()) } async fn get_request<T: DeserializeOwned>( &self, url: &str, params: Option<&[(&str, &str)]>, ) -> Result<T> { let mut full_url = String::from(url); if let Some(params) = params { full_url.push('?'); for (key, val) in params { full_url.push_str(&format!("{}={}&", key, val)); } full_url.pop(); } let mut request = self.client.get(&full_url).build()?; self.authenticate_request(url, &mut request, params)?; let resp = self.client.execute(request).await?; let txt = resp.text().await?; debug!("Twitter response: {:?}", txt); serde_json::from_str::<T>(&txt).map_err(|err| err.into()) } async fn lookup_twitter_id( &self, twitter_ids: Option<&[&TwitterId]>, accounts: Option<&[&String]>, ) -> Result<HashMap<TwitterId, String>> { let mut params = vec![]; let mut lookup = String::new(); if let Some(twitter_ids) = twitter_ids { for twitter_id in twitter_ids { lookup.push_str(&twitter_id.as_u64().to_string()); lookup.push(','); } lookup.pop(); params.push(("user_id", lookup.as_str())) } let mut lookup = String::new(); if let Some(accounts) = accounts { for account in accounts { lookup.push_str(&account.as_str().replace('@', "")); lookup.push(','); } lookup.pop(); params.push(("screen_name", lookup.as_str())) } #[derive(Deserialize)] struct UserObject { id: TwitterId, screen_name: String, } debug!("Params: {:?}", params); let user_objects = self .get_request::<Vec<UserObject>>( "https://api.twitter.com/1.1/users/lookup.json", Some(&params), ) .await?; if user_objects.is_empty() { return Err(anyhow!("unrecognized data")); } Ok(user_objects .into_iter() .map(|obj| (obj.id, format!("@{}", obj.screen_name.to_lowercase()))) .collect()) } } #[derive(Debug, Deserialize, Serialize)] struct ApiMessageRequest { events: Vec<ApiEvent>, } #[derive(Debug, Deserialize, Serialize)] struct ApiEvent { #[serde(rename = "type")] t_type: String, id: String, created_timestamp: Option<String>, message_create: ApiMessageCreate, } #[derive(Debug, Deserialize, Serialize)] struct ApiMessageCreate { target: ApiTarget, sender_id: Option<String>, message_data: ApiMessageData, } #[derive(Debug, Deserialize, Serialize)] struct ApiTarget { recipient_id: String, } #[derive(Debug, Deserialize, Serialize)] struct ApiMessageData { text: String, } impl ApiMessageRequest { fn parse(self) -> Result<Vec<ReceivedMessageContext>> { let mut messages = vec![]; for event in self.events { let message = ReceivedMessageContext { sender: event .message_create .sender_id .ok_or_else(|| anyhow!("unrecognized data"))? .try_into()?, message: event.message_create.message_data.text, id: event.id.parse().map_err(|_| anyhow!("unrecognized data"))?, }; messages.push(message); } Ok(messages) } } #[async_trait] impl Adapter for TwitterClient { type MessageType = (); fn name(&self) -> &'static str { "Twitter" } async fn fetch_messages(&mut self) -> Result<Vec<ExternalMessage>> { self.request_messages().await } async fn send_message(&mut self, _to: &str, _content: Self::MessageType) -> Result<()> { unimplemented!() } }
use crate::adapters::Adapter; use crate::primitives::{ExternalMessage, ExternalMessageType, MessageId, Timestamp}; use crate::Result; use hmac::{Hmac, Mac}; use rand::{thread_rng, Rng}; use reqwest::header::{self, HeaderValue}; use reqwest::{Client, Request}; use serde::de::DeserializeOwned; use serde::Serialize; use sha1::Sha1; use std::collections::{HashMap, HashSet}; use std::convert::{TryFrom, TryInto}; use std::time::{SystemTime, UNIX_EPOCH}; use std::{cmp::Ordering, hash::Hash}; #[derive(Clone, Debug, Eq, PartialEq)] pub struct ReceivedMessageContext { sender: TwitterId, id: u64, message: String, } #[derive(Debug, Clone, Eq, Hash, PartialEq, Serialize, Deserialize)] pub struct TwitterId(u64); impl TwitterId { pub fn as_u64(&self) -> u64 { self.0 } } impl Ord for TwitterId { fn cmp(&self, other: &Self) -> Ordering { self.0.cmp(&other.0) } } impl PartialOrd for TwitterId { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl From<u64> for TwitterId { fn from(val: u64) -> Self { TwitterId(val) } } impl TryFrom<String> for TwitterId { type Error = anyhow::Error; fn try_from(val: String) -> Result<Self> { Ok(TwitterId(val.parse::<u64>()?)) } } pub struct TwitterBuilder { consumer_key: Option<String>, consumer_secret: Option<String>, token: Option<String>, token_secret: Option<String>, } impl TwitterBuilder { pub fn new() -> Self { TwitterBuilder { consumer_key: None, consumer_secret: None, token: None, token_secret: None, } } pub fn consumer_key(mut self, key: String) -> Self { self.consumer_key = Some(key); self } pub fn consumer_secret(mut self, key: String) -> Self { self.consumer_secret = Some(key); self } pub fn token(mut self, token: String) -> Self { self.token = Some(token); self } pub fn token_secret(mut self, secret: String) -> Self { self.token_secret = Some(secret); self } pub fn build(self) -> Result<TwitterClient> { Ok(TwitterClient { client: Client::new(), consumer_key: self .consumer_key .ok_or_else(|| anyhow!("consumer key name not specified"))?, consumer_secret: self .consumer_secret .ok_or_else(|| anyhow!("consumer secret name not specified"))?, token: self.token.ok_or_else(|| anyhow!("token not specified"))?, token_secret: self .token_secret .ok_or_else(|| anyhow!("token secret not specified"))?, twitter_ids: HashMap::new(), cache: HashSet::new(), }) } } fn gen_nonce() -> String { let random: [u8; 16] = thread_rng().gen(); hex::encode(random) } fn gen_timestamp() -> u64 { let start = SystemTime::now(); start .duration_since(UNIX_EPOCH) .expect("Time went backwards") .as_secs() } #[derive(Clone)] pub struct TwitterClient { client: Client, consumer_key: String, consumer_secret: String, token: String, token_secret: String, twitter_ids: HashMap<TwitterId, String>, cache: HashSet<MessageId>, } impl TwitterClient { async fn request_messages(&mut self) -> Result<Vec<ExternalMessage>> { debug!("Requesting Twitter messages"); let mut messages = self .get_request::<ApiMessageRequest>( "https://api.twitter.com/1.1/direct_messages/events/list.json", None, ) .await? .parse()?; messages.retain(|message| !self.cache.contains(&message.id.into())); if messages.is_empty() { debug!("No new Twitter messages found"); return Ok(vec![]); } else { debug!("Fetched {} message(-s)", messages.len()); } #[rustfmt::skip] let mut to_lookup: Vec<&TwitterId> = messages .iter() .filter(|message| { !self.twitter_ids.contains_key(&message.sender) }) .map(|message| &message.sender) .collect(); to_lookup.sort(); to_lookup.dedup(); debug!("Looking up Twitter Ids"); if !to_lookup.is_empty() { let lookup_results = self.lookup_twitter_id(Some(&to_lookup), None).await?; self.twitter_ids.extend(lookup_results); } let mut parsed_messages = vec![]; for message in messages { let sender = self .twitter_ids .get(&message.sender) .ok_or_else(|| anyhow!("Failed to find Twitter handle based on Id"))? .clone(); let id = message.id.into(); parsed_messages.push(ExternalMessage { origin: ExternalMessageType::Twitter(sender), id, timestamp: Timestamp::now(), values: vec![message.message.into()], }); self.cache.insert(id); } Ok(parsed_messages) } fn authenticate_request( &self, url: &str, request: &mut Request, params: Option<&[(&str, &str)]>, ) -> Result<()> { use urlencoding::encode; let nonce = gen_nonce(); let timestamp = gen_timestamp().to_string(); let mut fields = vec![ ("oauth_consumer_key", self.consumer_key.as_str()), ("oauth_nonce", nonce.as_str()), ("oauth_signature_method", "HMAC-SHA1"), ("oauth_timestamp", &timestamp), ("oauth_token", self.token.as_str()), ("oauth_version", "1.0"), ]; if let Some(params) = params { fields.append(&mut params.to_vec()); } fields.sort_by(|(a, _), (b, _)| a.cmp(b)); let mut params = String::new(); for (name, val) in &fields { params.push_str(&format!("{}={}&", encode(name), encode(val))); } params.pop(); let base = format!("GET&{}&{}", encode(url), encode(&params)); let sign_key = format!( "{}&{}", encode(&self.consumer_secret), encode(&self.token_secret) ); let mut mac: Hmac<Sha1> = Hmac::new_from_slice(sign_key.as_bytes()).unwrap(); mac.update(base.as_bytes()); let sig = base64::encode(mac.finalize().into_bytes()); fields.push(("oauth_signature", &sig)); fields.sort_by(|(a, _), (b, _)| a.cmp(b)); let mut oauth_header = String::new(); oauth_header.push_str("OAuth "); for (name, val) in &fields { oauth_header.push_str(&format!("{}={}, ", encode(name), encode(val))) } oauth_header.pop(); oauth_header.pop(); request .headers_mut() .insert(header::AUTHORIZATION, HeaderValue::from_str(&oauth_header)?); Ok(()) } async fn get_request<T: DeserializeOwned>( &self, url: &str, params: Option<&[(&str, &str)]>, ) -> Result<T> { let mut full_url = String::from(url); if let Some(params) = params { full_url.push('?'); for (key, val) in params { full_url.push_str(&format!("{}={}&", key, val)); } full_url.pop(); } let mut request = self.client.get(&full_url).build()?; self.authenticate_request(url, &mut request, params)?; let resp = self.client.execute(request).await?; let txt = resp.text().await?; debug!("Twitter response: {:?}", txt); serde_json::from_str::<T>(&txt).map_err(|err| err.into()) } async fn lookup_twitter_id( &self, twitter_ids: Option<&[&TwitterId]>, accounts: Option<&[&String]>, ) -> Result<HashMap<TwitterId, String>> { let mut params = vec![]; let mut lookup = String::new(); if let Some(twitter_ids) = twitter_ids { for twitter_id in twitter_ids { lookup.push_str(&twitter_id.as_u64().to_string()); lookup.push(','); } lookup.pop(); params.push(("user_id", lookup.as_str())) } let mut lookup = String::new(); if let Some(accounts) = accounts { for account in accounts { lookup.push_str(&account.as_str().replace('@', "")); lookup.push(','); } lookup.pop(); params.push(("screen_name", lookup.as_str())) } #[derive(Deserialize)] struct UserObject { id: TwitterId, screen_name: String, } debug!("Params: {:?}", params); let user_objects = self .get_request::<Vec<UserObject>>( "https://api.twitter.com/1.1/users/lookup.json", Some(&params), ) .await?; if user_objects.is_empty() { return Err(anyhow!("unrecognized data")); } Ok(user_objects .
} #[derive(Debug, Deserialize, Serialize)] struct ApiMessageRequest { events: Vec<ApiEvent>, } #[derive(Debug, Deserialize, Serialize)] struct ApiEvent { #[serde(rename = "type")] t_type: String, id: String, created_timestamp: Option<String>, message_create: ApiMessageCreate, } #[derive(Debug, Deserialize, Serialize)] struct ApiMessageCreate { target: ApiTarget, sender_id: Option<String>, message_data: ApiMessageData, } #[derive(Debug, Deserialize, Serialize)] struct ApiTarget { recipient_id: String, } #[derive(Debug, Deserialize, Serialize)] struct ApiMessageData { text: String, } impl ApiMessageRequest { fn parse(self) -> Result<Vec<ReceivedMessageContext>> { let mut messages = vec![]; for event in self.events { let message = ReceivedMessageContext { sender: event .message_create .sender_id .ok_or_else(|| anyhow!("unrecognized data"))? .try_into()?, message: event.message_create.message_data.text, id: event.id.parse().map_err(|_| anyhow!("unrecognized data"))?, }; messages.push(message); } Ok(messages) } } #[async_trait] impl Adapter for TwitterClient { type MessageType = (); fn name(&self) -> &'static str { "Twitter" } async fn fetch_messages(&mut self) -> Result<Vec<ExternalMessage>> { self.request_messages().await } async fn send_message(&mut self, _to: &str, _content: Self::MessageType) -> Result<()> { unimplemented!() } }
into_iter() .map(|obj| (obj.id, format!("@{}", obj.screen_name.to_lowercase()))) .collect()) }
function_block-function_prefix_line
[ { "content": "fn try_decode_hex(display_name: &mut String) {\n\n if display_name.starts_with(\"0x\") {\n\n // Might be a false positive. Leave it as is if it cannot be decoded.\n\n if let Ok(name) = hex::decode(&display_name[2..]) {\n\n if let Ok(name) = String::from_utf8(name) {\n\n *display_name = name;\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Message)]\n\n#[rtype(result = \"crate::Result<()>\")]\n\npub enum WatcherMessage {\n\n Ack(AckResponse),\n\n NewJudgementRequest(JudgementRequest),\n\n PendingJudgementsRequests(Vec<JudgementRequest>),\n\n ActiveDisplayNames(Vec<DisplayNameEntryRaw>),\n\n}\n\n\n\n#[derive(Debug, Clone, Message)]\n\n#[rtype(result = \"crate::Result<()>\")]\n\npub enum ClientCommand {\n\n ProvideJudgement(IdentityContext),\n\n RequestPendingJudgements,\n\n RequestDisplayNames,\n\n Ping,\n\n}\n\n\n", "file_path": "src/connector.rs", "rank": 2, "score": 146848.73220789921 }, { "content": "fn is_too_similar(existing: &str, new: &str, limit: f64) -> bool {\n\n let name_str = existing.to_lowercase();\n\n let account_str = new.to_lowercase();\n\n\n\n let similarities = [\n\n jaro(&name_str, &account_str),\n\n jaro_words(&name_str, &account_str, &[\" \", \"-\", \"_\"]),\n\n ];\n\n\n\n similarities.iter().any(|&s| s > limit)\n\n}\n\n\n", "file_path": "src/display_name.rs", "rank": 5, "score": 143159.4279016283 }, { "content": "pub fn alice_judgement_request() -> WatcherMessage {\n\n WatcherMessage::new_judgement_request(JudgementRequest::alice())\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 6, "score": 138693.98146425432 }, { "content": "pub fn bob_judgement_request() -> WatcherMessage {\n\n WatcherMessage::new_judgement_request(JudgementRequest::bob())\n\n}\n\n\n\n// async fn new_env() -> (TestServer, ConnectorMocker, MessageInjector) {\n\nasync fn new_env() -> (Database, ConnectorMocker, TestServer, MessageInjector) {\n\n // Setup MongoDb database.\n\n let random: u32 = thread_rng().gen_range(u32::MIN..u32::MAX);\n\n let db = Database::new(\n\n \"mongodb://localhost:27017/\",\n\n &format!(\"registrar_test_{}\", random),\n\n )\n\n .await\n\n .unwrap();\n\n\n\n // Setup API\n\n let (server, actor) = run_test_server(db.clone()).await;\n\n\n\n // Setup message verifier and injector.\n\n let injector = MessageInjector::new();\n", "file_path": "src/tests/mod.rs", "rank": 7, "score": 138693.98146425432 }, { "content": "fn jaro_words(left: &str, right: &str, delimiter: &[&str]) -> f64 {\n\n fn splitter<'a>(string: &'a str, delimiter: &[&str]) -> Vec<&'a str> {\n\n let mut all = vec![];\n\n\n\n for del in delimiter {\n\n let mut words: Vec<&str> = string\n\n .split(del)\n\n .map(|s| s.trim())\n\n .filter(|s| !s.is_empty())\n\n .collect();\n\n\n\n all.append(&mut words);\n\n }\n\n\n\n all\n\n }\n\n\n\n let left_words = splitter(left, delimiter);\n\n let right_words = splitter(right, delimiter);\n\n\n", "file_path": "src/display_name.rs", "rank": 8, "score": 116206.56728067304 }, { "content": "type Subscriber = Recipient<JsonResult<ResponseAccountState>>;\n\n\n\n#[derive(Clone, Debug, Message)]\n\n#[rtype(result = \"()\")]\n\npub struct SubscribeAccountState {\n\n pub subscriber: Subscriber,\n\n pub id_context: IdentityContext,\n\n}\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Message)]\n\n#[rtype(result = \"()\")]\n\npub struct NotifyAccountState {\n\n pub state: JudgementStateBlanked,\n\n pub notifications: Vec<NotificationMessage>,\n\n}\n\n\n\n// Identical to `NotifyAccountState`, but gets sent from the server to the\n\n// session for type-safety purposes.\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Message)]\n\n#[rtype(result = \"()\")]\n", "file_path": "src/api/judgement_state.rs", "rank": 10, "score": 90547.46222812582 }, { "content": "fn open_config() -> Result<Config> {\n\n // Open config file.\n\n let content = fs::read_to_string(\"config.yaml\")\n\n .or_else(|_| fs::read_to_string(\"/etc/registrar/config.yaml\"))\n\n .map_err(|_| {\n\n anyhow!(\"Failed to open config at 'config.yaml' or '/etc/registrar/config.yaml'.\")\n\n })?;\n\n\n\n // Parse config file as JSON.\n\n let config = serde_yaml::from_str::<Config>(&content)\n\n .map_err(|err| anyhow!(\"Failed to parse config: {:?}\", err))?;\n\n\n\n Ok(config)\n\n}\n\n\n\nasync fn config_adapter_listener(db: Database, config: AdapterConfig) -> Result<()> {\n\n let watchers = config.watcher.clone();\n\n let dn_config = config.display_name.clone();\n\n run_adapters(config.clone(), db.clone()).await?;\n\n run_connector(db, watchers, dn_config).await\n", "file_path": "src/lib.rs", "rank": 11, "score": 89242.58649749053 }, { "content": "fn config() -> DisplayNameConfig {\n\n DisplayNameConfig {\n\n enabled: true,\n\n limit: 0.85,\n\n }\n\n}\n\n\n\n#[actix::test]\n\nasync fn valid_display_name() {\n\n let (db, connector, mut api, _) = new_env().await;\n\n let verifier = DisplayNameVerifier::new(db.clone(), config());\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n\n // Insert judgement request.\n\n connector.inject(alice_judgement_request()).await;\n\n let states = connector.inserted_states().await;\n\n let mut alice = states[0].clone();\n\n verifier.verify_display_name(&alice).await.unwrap();\n\n\n\n // Subscribe to endpoint.\n", "file_path": "src/tests/display_name_verification.rs", "rank": 14, "score": 81288.45685302673 }, { "content": "/// Convenience function for creating a full identity context when only the\n\n/// address itself is present. Only supports Kusama and Polkadot for now.\n\npub fn create_context(address: ChainAddress) -> IdentityContext {\n\n let chain = if address.as_str().starts_with('1') {\n\n ChainName::Polkadot\n\n } else {\n\n ChainName::Kusama\n\n };\n\n\n\n IdentityContext { address, chain }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::primitives::JudgementState;\n\n\n\n #[test]\n\n fn command_status() {\n\n let resp = Command::from_str(\"status Alice\").unwrap();\n\n assert_eq!(\n\n resp,\n", "file_path": "src/adapters/admin.rs", "rank": 15, "score": 72705.10150383832 }, { "content": "/// Handles incoming and outgoing websocket messages to and from the Watcher.\n\nstruct Connector {\n\n #[allow(clippy::type_complexity)]\n\n sink: Option<SinkWrite<Message, SplitSink<Framed<BoxedSocket, Codec>, Message>>>,\n\n db: Database,\n\n dn_verifier: DisplayNameVerifier,\n\n endpoint: String,\n\n network: ChainName,\n\n outgoing: UnboundedSender<ClientCommand>,\n\n inserted_states: Arc<RwLock<Vec<JudgementState>>>,\n\n // Tracks the last message received from the Watcher. If a certain treshold\n\n // was exceeded, the Connector attempts to reconnect.\n\n last_watcher_msg: Timestamp,\n\n}\n\n\n\nimpl Connector {\n\n async fn start(\n\n endpoint: String,\n\n network: ChainName,\n\n db: Database,\n\n dn_verifier: DisplayNameVerifier,\n", "file_path": "src/connector.rs", "rank": 16, "score": 54415.58813844443 }, { "content": "export interface FieldValue {\n\n type: string;\n\n value: string;\n", "file_path": "www/src/json.ts", "rank": 17, "score": 54379.864008569915 }, { "content": "struct Listener {\n\n client: Client,\n\n messages: Arc<Mutex<Vec<ExternalMessage>>>,\n\n db: Database,\n\n admins: Vec<MatrixHandle>,\n\n}\n\n\n\nimpl Listener {\n\n pub fn new(\n\n client: Client,\n\n messages: Arc<Mutex<Vec<ExternalMessage>>>,\n\n db: Database,\n\n admins: Vec<MatrixHandle>,\n\n ) -> Self {\n\n Self {\n\n client,\n\n messages,\n\n db,\n\n admins,\n\n }\n", "file_path": "src/adapters/matrix.rs", "rank": 18, "score": 52889.46060099318 }, { "content": "export interface Field {\n\n value: FieldValue;\n\n challenge: Challenge;\n\n failed_attempts: number;\n", "file_path": "www/src/json.ts", "rank": 19, "score": 52561.862349594136 }, { "content": "export interface CheckDisplayNameResult {\n\n type: string;\n\n value: any;\n", "file_path": "www/src/json.ts", "rank": 20, "score": 50681.69699394974 }, { "content": "#[async_trait]\n\npub trait Adapter {\n\n type MessageType;\n\n\n\n fn name(&self) -> &'static str;\n\n async fn fetch_messages(&mut self) -> Result<Vec<ExternalMessage>>;\n\n async fn send_message(&mut self, to: &str, content: Self::MessageType) -> Result<()>;\n\n}\n\n\n\n// Filler for adapters that do not send messages.\n\nimpl From<ExpectedMessage> for () {\n\n fn from(_: ExpectedMessage) -> Self {}\n\n}\n\n\n\npub struct AdapterListener {\n\n db: Database,\n\n}\n\n\n\nimpl AdapterListener {\n\n pub async fn new(db: Database) -> Self {\n\n AdapterListener { db }\n", "file_path": "src/adapters/mod.rs", "rank": 21, "score": 48360.388082951176 }, { "content": " handleDisplayNameCheck(data: GenericMessage, display_name: string) {\n\n this.manager.wipeIntroduction();\n\n\n\n if (data.type == \"ok\") {\n\n let check: CheckDisplayNameResult = data.message;\n\n if (check.type == \"ok\") {\n\n this.manager.setDisplayNameVerification(display_name, BadgeValid);\n\n } else if (check.type = \"violations\") {\n\n let violations: Violation[] = check.value;\n\n this.manager.setDisplayNameViolation(display_name, violations, false);\n\n } else {\n\n // Should never occur.\n\n this.notifications.unexpectedError(\"pdnc#1\")\n\n }\n\n } else if (data.type == \"err\") {\n\n // Should never occur.\n\n this.notifications.unexpectedError(\"pdnc#2\")\n\n } else {\n\n // Should never occur.\n\n this.notifications.unexpectedError(\"pdnc#3\")\n\n }\n\n\n\n this.manager.resetButton();\n\n this.manager.wipeLiveUpdateInfo();\n\n this.manager.wipeVerificationOverviewContent();\n\n this.manager.wipeEmailSecondChallengeContent();\n\n this.manager.wipeUnsupportedContent();\n", "file_path": "www/src/index.ts", "rank": 22, "score": 47673.04655719074 }, { "content": "use crate::connector::DisplayNameEntry;\n\nuse crate::database::Database;\n\nuse crate::primitives::{ChainName, IdentityContext, JudgementState};\n\nuse crate::{DisplayNameConfig, Result};\n\nuse strsim::jaro;\n\n\n\nconst VIOLATIONS_CAP: usize = 5;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct DisplayNameVerifier {\n\n db: Database,\n\n config: DisplayNameConfig,\n\n}\n\n\n\nimpl DisplayNameVerifier {\n\n pub fn new(db: Database, config: DisplayNameConfig) -> Self {\n\n DisplayNameVerifier { db, config }\n\n }\n\n pub async fn check_similarities(\n\n &self,\n", "file_path": "src/display_name.rs", "rank": 23, "score": 35382.29707647094 }, { "content": " name: &str,\n\n chain: ChainName,\n\n // Skip comparison for this account, usually for the issuer itself\n\n // (required when re-requesting judgement).\n\n skip: Option<&IdentityContext>,\n\n ) -> Result<Vec<DisplayNameEntry>> {\n\n let current = self.db.fetch_display_names(chain).await?;\n\n\n\n let mut violations = vec![];\n\n for existing in current {\n\n if let Some(to_skip) = skip {\n\n // Skip account if specified.\n\n if &existing.context == to_skip {\n\n continue;\n\n }\n\n }\n\n\n\n if is_too_similar(name, &existing.display_name, self.config.limit) {\n\n // Only show up to `VIOLATIONS_CAP` violations.\n\n if violations.len() == VIOLATIONS_CAP {\n", "file_path": "src/display_name.rs", "rank": 24, "score": 35379.0330930086 }, { "content": " break;\n\n }\n\n\n\n violations.push(existing);\n\n }\n\n }\n\n\n\n Ok(violations)\n\n }\n\n pub async fn verify_display_name(&self, state: &JudgementState) -> Result<()> {\n\n if !self.config.enabled {\n\n return Ok(());\n\n }\n\n\n\n let name = if let Some(name) = state.display_name() {\n\n name\n\n } else {\n\n return Ok(());\n\n };\n\n\n", "file_path": "src/display_name.rs", "rank": 25, "score": 35371.8570447329 }, { "content": " let violations = self\n\n .check_similarities(name, state.context.chain, Some(&state.context))\n\n .await?;\n\n\n\n if !violations.is_empty() {\n\n self.db\n\n .insert_display_name_violations(&state.context, &violations)\n\n .await?;\n\n } else {\n\n self.db.set_display_name_valid(state).await?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/display_name.rs", "rank": 27, "score": 35364.76166565868 }, { "content": " let mut total = 0.0;\n\n\n\n for left_word in &left_words {\n\n let mut temp = 0.0;\n\n\n\n for right_word in &right_words {\n\n let sim = jaro(left_word, right_word);\n\n\n\n if sim > temp {\n\n temp = sim;\n\n }\n\n }\n\n\n\n total += temp;\n\n }\n\n\n\n total as f64 / left_words.len().max(right_words.len()) as f64\n\n}\n", "file_path": "src/display_name.rs", "rank": 31, "score": 35357.19341340155 }, { "content": "trait ToWsMessage {\n\n fn to_ws(&self) -> Message;\n\n}\n\n\n\nimpl<T: Serialize> ToWsMessage for T {\n\n fn to_ws(&self) -> Message {\n\n Message::Text(serde_json::to_string(&self).unwrap().into())\n\n }\n\n}\n\n\n\nimpl<T: DeserializeOwned> From<Option<Result<Frame, ProtocolError>>> for JsonResult<T> {\n\n fn from(val: Option<Result<Frame, ProtocolError>>) -> Self {\n\n match val.unwrap().unwrap() {\n\n Frame::Text(t) => serde_json::from_slice::<JsonResult<T>>(&t).unwrap(),\n\n _ => panic!(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tests/mod.rs", "rank": 46, "score": 32679.20749666303 }, { "content": " )\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\", tag = \"type\", content = \"value\")]\n\npub enum Outcome {\n\n Ok,\n\n Violations(Vec<DisplayNameEntry>),\n\n}\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Message)]\n\n#[rtype(result = \"JsonResult<Outcome>\")]\n\npub struct CheckDisplayName {\n\n pub check: String,\n\n pub chain: ChainName,\n\n}\n\n\n\npub async fn check_display_name(req: web::Json<CheckDisplayName>) -> HttpResponse {\n\n HttpResponse::Ok().json(\n\n DisplayNameChecker::from_registry()\n\n .send(req.into_inner())\n\n .await\n\n .unwrap(),\n\n )\n\n}\n", "file_path": "src/api/display_name_check.rs", "rank": 47, "score": 32504.370729281927 }, { "content": " let mut alice = states[0].clone();\n\n verifier.verify_display_name(&alice).await.unwrap();\n\n\n\n // Subscribe to endpoint.\n\n stream.send(IdentityContext::alice().to_ws()).await.unwrap();\n\n\n\n // Set expected result.\n\n let field = alice.get_field_mut(&IdentityFieldValue::DisplayName(\"Alice\".to_string()));\n\n let (passed, violations) = field.expected_display_name_check_mut();\n\n *passed = false;\n\n *violations = names;\n\n\n\n let expected = ResponseAccountState {\n\n state: alice.into(),\n\n // The UI already shows invalid display names in a specific way,\n\n // notification is not required.\n\n notifications: vec![],\n\n };\n\n\n\n // Check expected state.\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(expected));\n\n}\n", "file_path": "src/tests/display_name_verification.rs", "rank": 48, "score": 32495.539692036706 }, { "content": " stream.send(IdentityContext::alice().to_ws()).await.unwrap();\n\n\n\n // Set expected result.\n\n let field = alice.get_field_mut(&IdentityFieldValue::DisplayName(\"Alice\".to_string()));\n\n let (passed, violations) = field.expected_display_name_check_mut();\n\n *passed = true;\n\n *violations = vec![];\n\n\n\n let expected = ResponseAccountState {\n\n state: alice.into(),\n\n // The UI already shows invalid display names in a specific way,\n\n // notification is not required.\n\n notifications: vec![],\n\n };\n\n\n\n // Check current state.\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(expected));\n\n}\n\n\n", "file_path": "src/tests/display_name_verification.rs", "rank": 49, "score": 32495.010657605282 }, { "content": "use super::*;\n\nuse crate::api::{JsonResult, ResponseAccountState};\n\nuse crate::connector::DisplayNameEntry;\n\nuse crate::display_name::DisplayNameVerifier;\n\nuse crate::primitives::{IdentityContext, IdentityFieldValue};\n\nuse crate::DisplayNameConfig;\n\nuse futures::{SinkExt, StreamExt};\n\n\n\nimpl From<&str> for DisplayNameEntry {\n\n fn from(val: &str) -> Self {\n\n DisplayNameEntry {\n\n display_name: val.to_string(),\n\n // Filler value.\n\n context: IdentityContext::bob(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tests/display_name_verification.rs", "rank": 50, "score": 32493.968547824592 }, { "content": " DisplayNameChecker {\n\n verifier: DisplayNameVerifier::new(db, config),\n\n }\n\n }\n\n}\n\n\n\nimpl SystemService for DisplayNameChecker {}\n\nimpl Supervised for DisplayNameChecker {}\n\n\n\nimpl Actor for DisplayNameChecker {\n\n type Context = Context<Self>;\n\n}\n\n\n\nimpl Handler<CheckDisplayName> for DisplayNameChecker {\n\n type Result = ResponseActFuture<Self, JsonResult<Outcome>>;\n\n\n\n fn handle(&mut self, msg: CheckDisplayName, _ctx: &mut Self::Context) -> Self::Result {\n\n let verifier = self.verifier.clone();\n\n\n\n Box::pin(\n", "file_path": "src/api/display_name_check.rs", "rank": 51, "score": 32487.44431416183 }, { "content": "#[actix::test]\n\nasync fn invalid_display_name() {\n\n let (db, connector, mut api, _) = new_env().await;\n\n let verifier = DisplayNameVerifier::new(db.clone(), config());\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n\n // Pre-fill database with active display names\n\n let names = vec![\n\n DisplayNameEntry::from(\"Alice\"),\n\n DisplayNameEntry::from(\"alice\"),\n\n DisplayNameEntry::from(\"Alicee\"),\n\n ];\n\n\n\n for name in &names {\n\n db.insert_display_name(name).await.unwrap();\n\n }\n\n\n\n // Insert judgement request.\n\n connector.inject(alice_judgement_request()).await;\n\n let states = connector.inserted_states().await;\n", "file_path": "src/tests/display_name_verification.rs", "rank": 52, "score": 32487.17783436413 }, { "content": " async move {\n\n trace!(\"Received a similarities check: {:?}\", msg);\n\n verifier\n\n .check_similarities(msg.check.as_str(), msg.chain, None)\n\n .await\n\n .map(|violations| {\n\n let outcome = if violations.is_empty() {\n\n Outcome::Ok\n\n } else {\n\n Outcome::Violations(violations)\n\n };\n\n\n\n JsonResult::Ok(outcome)\n\n })\n\n .map_err(|err| {\n\n error!(\"Failed to check for display name similarities: {:?}\", err)\n\n })\n\n .unwrap_or_else(|_| JsonResult::Err(\"Backend error, contact admin\".to_string()))\n\n }\n\n .into_actor(self),\n", "file_path": "src/api/display_name_check.rs", "rank": 53, "score": 32485.974629390475 }, { "content": "use super::JsonResult;\n\nuse crate::connector::DisplayNameEntry;\n\nuse crate::database::Database;\n\nuse crate::primitives::ChainName;\n\nuse crate::{display_name::DisplayNameVerifier, DisplayNameConfig};\n\nuse actix::prelude::*;\n\nuse actix_web::{web, HttpResponse};\n\n\n\npub struct DisplayNameChecker {\n\n verifier: DisplayNameVerifier,\n\n}\n\n\n\nimpl Default for DisplayNameChecker {\n\n fn default() -> Self {\n\n panic!(\"DisplayNameChecker is not initialized\");\n\n }\n\n}\n\n\n\nimpl DisplayNameChecker {\n\n pub fn new(db: Database, config: DisplayNameConfig) -> Self {\n", "file_path": "src/api/display_name_check.rs", "rank": 54, "score": 32483.83571673231 }, { "content": "trait ExtractSender<T> {\n\n type Error;\n\n\n\n fn extract_sender(self) -> std::result::Result<T, Self::Error>;\n\n}\n\n\n\nimpl ExtractSender<String> for String {\n\n type Error = anyhow::Error;\n\n\n\n fn extract_sender(self) -> Result<String> {\n\n if self.contains('<') {\n\n let parts = self.split('<');\n\n if let Some(email) = parts.into_iter().nth(1) {\n\n Ok(email.replace('>', \"\"))\n\n } else {\n\n Err(anyhow!(\"unrecognized data\"))\n\n }\n\n } else {\n\n Ok(self)\n\n }\n", "file_path": "src/adapters/email.rs", "rank": 55, "score": 31479.800501569178 }, { "content": "export interface GenericMessage {\n\n type: string;\n\n message: any;\n", "file_path": "www/src/json.ts", "rank": 56, "score": 27129.158212353166 }, { "content": "function notificationTypeResolver(notification: Notification): [string, string] {\n\n switch (notification.type) {\n\n case \"identity_inserted\": {\n\n return [\n\n `The judgement request has been discovered by the registrar service.`,\n\n \"bg-info text-dark\"\n\n ]\n\n }\n\n case \"identity_updated\": {\n\n return [\n\n `On-chain identity information has been modified.`,\n\n \"bg-info text-dark\"\n\n ]\n\n }\n\n case \"field_verified\": {\n\n let data = notification.value as NotificationFieldContext;\n\n return [\n\n `${capitalizeFirstLetter(data.field.type)} account \"${data.field.value}\" is verified. Challenge is valid.`,\n\n \"bg-success text-light\",\n\n ]\n\n }\n\n case \"field_verification_failed\": {\n\n let data = notification.value as NotificationFieldContext;\n\n return [\n\n `${capitalizeFirstLetter(data.field.type)} account \"${data.field.value}\" failed to get verified. Invalid challenge.`,\n\n \"bg-danger text-light\"\n\n ]\n\n }\n\n case \"second_field_verified\": {\n\n let data = notification.value as NotificationFieldContext;\n\n return [\n\n `${capitalizeFirstLetter(data.field.type)} account \"${data.field.value}\" is fully verified. Additional challenge is valid.`,\n\n \"bg-success text-light\"\n\n ]\n\n }\n\n case \"second_field_verification_failed\": {\n\n let data = notification.value as NotificationFieldContext;\n\n return [\n\n `${capitalizeFirstLetter(data.field.type)} account \"${data.field.value}\" failed to get verified. The additional challenge is invalid.`,\n\n \"bg-danger text-light\"\n\n ]\n\n }\n\n case \"awaiting_second_challenge\": {\n\n let data = notification.value as NotificationFieldContext;\n\n return [\n\n `A second challenge was sent to ${capitalizeFirstLetter(data.field.type)} account \"${data.field.value}\". Please also check the spam folder.`,\n\n \"bg-info text-dark\"\n\n ]\n\n }\n\n case \"identity_fully_verified\": {\n\n return [\n\n `<strong>Verification process completed!</strong> Judgement will be issued in a couple of minutes.`,\n\n \"bg-success text-light\"\n\n ]\n\n }\n\n case \"judgement_provided\": {\n\n return [\n\n `Judgement has been submitted!`,\n\n \"bg-success text-light\"\n\n ]\n\n }\n\n case \"manually_verified\": {\n\n let data = notification.value as ManuallyVerified;\n\n return [\n\n `Manually verified ${capitalizeFirstLetter(data.field)}`,\n\n \"bg-info text-light\"\n\n ]\n\n }\n\n case \"full_manual_verification\": {\n\n return [\n\n `Manually verified the identity. Judgement will be issued in a couple of minutes.`,\n\n \"bg-info text-light\"\n\n ]\n\n }\n\n default: {\n\n throw new Error(\"unrecognized notification\");\n\n }\n\n }\n", "file_path": "www/src/notifications.ts", "rank": 57, "score": 26296.302427632854 }, { "content": "export interface NotificationFieldContext {\n\n context: Context;\n\n field: FieldValue;\n", "file_path": "www/src/json.ts", "rank": 58, "score": 26284.346072151526 }, { "content": "export interface DisplayNameChallenge {\n\n passed: boolean;\n\n violations: Violation[];\n", "file_path": "www/src/json.ts", "rank": 59, "score": 26062.29576610095 }, { "content": " unexpectedError(id: string) {\n\n this.displayError(`Unexpected internal error, please contact admin. Code: ${id}`);\n", "file_path": "www/src/notifications.ts", "rank": 60, "score": 25459.918437874818 }, { "content": " displayError(message: string) {\n\n this.displayNotification(message, \"bg-danger text-light\", false);\n", "file_path": "www/src/notifications.ts", "rank": 61, "score": 25459.918437874818 }, { "content": " handleJudgementState(msg: MessageEvent) {\n\n const parsed: GenericMessage = JSON.parse(msg.data);\n\n if (parsed.type == \"ok\") {\n\n let message: StateNotification = parsed.message;\n\n this.manager.wipeIntroduction();\n\n this.manager.setButtonLiveAnimation();\n\n this.manager.setLiveUpdateInfo();\n\n this.manager.processVerificationOverviewTable(message.state);\n\n this.manager.processUnsupportedOverview(message.state);\n\n\n\n this.notifications.processNotifications(message.notifications);\n\n\n\n // This notification should only be displayed if no other notifications are available.\n\n if (message.state.is_fully_verified && message.notifications.length == 0) {\n\n this.notifications.displayNotification(\"The identity has been fully verified!\", \"bg-success text-light\", true)\n\n }\n\n } else if (parsed.type == \"err\") {\n\n let message: string = parsed.message;\n\n this.notifications.displayError(message);\n\n\n\n this.manager.resetButton();\n\n this.manager.wipeLiveUpdateInfo();\n\n } else {\n\n // Should never occur.\n\n this.notifications.unexpectedError(\"pas#1\")\n\n }\n", "file_path": "www/src/index.ts", "rank": 62, "score": 24679.24054690605 }, { "content": "#### Adapter Listener\n\n\n\n```yaml\n\ndb:\n\n uri: mongodb://localhost:27017/\n\n name: registrar_db\n\ninstance:\n\n role: adapter_listener\n\n config:\n\n watcher:\n\n - network: kusama\n\n endpoint: ws://localhost:8000\n\n - network: polkadot\n\n endpoint: ws://localhost:8001\n\n matrix:\n\n enabled: false\n\n homeserver: homeserver\n\n username: username\n\n password: password\n\n db_path: db_path\n\n admins: null\n\n twitter:\n\n enabled: false\n\n api_key: key\n\n api_secret: secret\n\n token: token\n\n token_secret: secret\n\n request_interval: 300\n\n email:\n\n enabled: false\n\n smtp_server: server\n\n imap_server: server\n\n inbox: inbox\n\n user: user\n\n password: password\n\n request_interval: 5\n\n display_name:\n\n enabled: true\n\n limit: 0.85\n\n```\n\n\n\n#### Session Notifier\n\n\n\n```yaml\n\ndb:\n\n uri: mongodb://localhost:27017/\n\n name: registrar_db\n\ninstance:\n\n role: session_notifier\n\n config:\n\n api_address: 0.0.0.0:8000\n\n display_name:\n\n enabled: true\n\n limit: 0.85\n\n\n\n```\n\n\n\n### Building\n\n\n\nTo build the binary:\n\n\n\n```console\n\n$ apt-get update\n\n$ apt-get -y install --no-install-recommends \\\n\n\tlld pkg-config openssl libssl-dev gcc g++ clang cmake\n\n```\n\n\n\nAnd to run the service:\n\n\n\n```console\n\n$ cargo run --release --bin registrar\n\n```\n\n\n\nTo build the UI (adjust any values in the config):\n\n\n\n```console\n\n$ cd www/\n\n$ cat config.json\n\n{\n\n \"http_url\": \"https://registrar-backend.web3.foundation/api/check_display_name\",\n\n \"ws_url\": \"wss://registrar-backend.web3.foundation/api/account_status\"\n\n}\n\n$ yarn build # output in dist/\n\n```\n", "file_path": "README.md", "rank": 63, "score": 23904.583214594957 }, { "content": "## Manual Judgements\n\n\n\nIn order to submit manual judgements, admins can join a room with the Matrix account of the registrar service as specified in [the config](#adapter-listener). Admins are specified as:\n\n\n\n```yaml\n\nadmins:\n\n - '@admin1:matrix.org'\n\n - '@admin2:matrix.org'\n\n - '@admin3:matrix.org'\n\n```\n\n\n\nIf there should not be any admins, then just set the value to `admins: null`. Those specified admins have the permission to send Matrix messages to the bot in order to perform an action.\n\n\n\n### Identity Status\n\n\n\n* `status <ADDR>` - Gets the (verbose) verification state.\n\n\n\nE.g.\n\n\n\n```\n\nstatus 1a2YiGNu1UUhJtihq8961c7FZtWGQuWDVMWTNBKJdmpGhZP\n\n```\n\n\n\n### Account Verification\n\n\n\n* `verify <ADDR> [FIELD]...` - Manually verifies the provided field(s).\n\n * Supported fields: `legalname`, `displayname`, `email`, `web`, `twitter`, `matrix`, `all`.\n\n\n\nE.g.\n\n\n\n```\n\nverify 1a2YiGNu1UUhJtihq8961c7FZtWGQuWDVMWTNBKJdmpGhZP displayname email\n\n```\n\n\n\n**NOTE**: The `all` field, as the name implies, verifies the full identity and (re-)issues a judgement extrinsic.\n\n\n\n### Help\n\n\n\n* `help` - Displays a help message.\n\n\n\n## Setup\n\n\n\n### Config\n\n\n\nBoth types of configuration, respectively the _adapter listener_ and _session notifier_ can be seen in the [`config/`](./config) directory.\n\n\n", "file_path": "README.md", "rank": 64, "score": 23903.93574590454 }, { "content": "[![CircleCI](https://circleci.com/gh/w3f/polkadot-registrar-challenger.svg?style=svg)](https://circleci.com/gh/w3f/polkadot-registrar-challenger)\n\n\n\n# Polkadot Registrar Service (beta)\n\n\n\nAn automated registrar service for [Polkadot on-chain identities](https://wiki.polkadot.network/docs/learn-identity).\n\n\n\n* App: https://registrar.web3.foundation/\n\n\n\n![Registrar preview](https://raw.githubusercontent.com/w3f/polkadot-registrar-challenger/master/registrar_preview.png)\n\n\n\n## About\n\n\n\nThis service (\"the challenger\") is responsible for veryifing accounts and providing a HTTP and websocket API to the UI. The full list of features includes:\n\n\n\n* Verification\n\n * Display name\n\n * Email\n\n * Twitter\n\n * Matrix\n\n* API\n\n * Websocket API for live notifications and state changes.\n\n * Rest API for display name checks.\n\n* Communication with [the watcher](#watcher-service)\n\n * Request pending judgement.\n\n * Request active display names of other identities.\n\n * Send judgement to the watcher to issue a judgement extrinsic.\n\n* [Manual judgements](#manual-judgements)\n\n * The registrar supports manual judgements via a Matrix bot.\n\n\n\nOn judgement request, the challenger generates challenges for each specified account (email, etc.) of the identity and expects those challenges to be sent to the registrar service by the user for verification. Display names are verified by matching those with the display names of already verified identities and deciding on a judgement based on a [similarity ranking](https://en.wikipedia.org/wiki/Jaro%E2%80%93Winkler_distance).\n\n\n\n## Watcher Service\n\n\n\nThis service only verifies identities, but does not interact with the Kusama/Polkadot blockchain directly. Rather, it communicates with [the watcher](https://github.com/w3f/polkadot-registrar-watcher) which is responsible for any blockchain interaction.\n\n\n\n## Web App / UI\n\n\n\nThe UI can be found in the [`www/`](./www) directory, which is automatically built and deployed via [Github Actions](./.github/workflows/gh-pages.yml).\n\n\n", "file_path": "README.md", "rank": 65, "score": 23896.70320464018 }, { "content": " setDisplayNameVerification(name: string, validity: string) {\n\n this.div_display_name_overview.innerHTML = `\n\n <div class=\"col-10 \">\n\n <h2>Display name check</h2>\n\n <p>The display name <strong>${name}</strong> is ${validity}</p>\n\n </div>\n\n `;\n", "file_path": "www/src/content.ts", "rank": 66, "score": 23724.203731100664 }, { "content": " setDisplayNameViolation(name: string, violations: Violation[], show_hint: boolean) {\n\n let listed = \"\";\n\n for (let v of violations) {\n\n listed += `<li>\"${v.display_name}\" (by account <em>${v.context.address}</em>)</li>`\n\n }\n\n\n\n let hint = \"\";\n\n if (show_hint) {\n\n hint = `<p><strong>Hint:</strong> You can check for valid display names by selecting <em>\"Validate Display Name\"</em> in the search bar.</p>`\n\n }\n\n\n\n this.div_display_name_overview.innerHTML = `\n\n <div class=\"col-10 \">\n\n <h2>Display name check</h2>\n\n <p>The display name <strong>${name}</strong> is ${BadgeInvalid}. It's too similar to (an) existing display name(s):</p>\n\n <ul>\n\n ${listed}\n\n </ul>\n\n ${hint}\n\n </div>\n\n `;\n", "file_path": "www/src/content.ts", "rank": 67, "score": 23724.203731100664 }, { "content": "import { StateNotification, GenericMessage, Notification, CheckDisplayNameResult, Violation } from \"./json\";\n\nimport { ContentManager, capitalizeFirstLetter, BadgeValid } from './content';\n\nimport { NotificationHandler } from \"./notifications\";\n\n\n\ninterface Config {\n\n http_url: string;\n\n ws_url: string;\n\n}\n\n\n\nconst config: Config = require(\"../config.json\");\n\n\n\n// The primary manager of all actions/events, for both UI and server messages.\n\nclass ActionListerner {\n\n specify_network: HTMLInputElement;\n\n specify_action: HTMLInputElement;\n\n search_bar: HTMLInputElement;\n\n btn_execute_action: HTMLButtonElement;\n\n manager: ContentManager;\n\n notifications: NotificationHandler;\n\n\n\n constructor() {\n\n // Register relevant elements.\n\n this.btn_execute_action =\n\n document\n\n .getElementById(\"execute-action\")! as HTMLButtonElement;\n\n\n\n this.specify_action =\n\n document\n\n .getElementById(\"specify-action\")! as HTMLInputElement;\n\n\n\n this.specify_network =\n\n document\n\n .getElementById(\"specify-network\")! as HTMLInputElement;\n\n\n\n this.search_bar =\n\n document\n\n .getElementById(\"search-bar\")! as HTMLInputElement;\n\n\n\n const handler = new NotificationHandler;\n\n this.manager = new ContentManager(handler);\n\n this.notifications = handler;\n\n\n\n // Handler for choosing network, e.g. \"Kusama\" or \"Polkadot\".\n\n document\n\n .getElementById(\"network-options\")!\n\n .addEventListener(\"click\", (e: Event) => {\n\n this.specify_network\n\n .innerText = (e.target as HTMLAnchorElement).innerText;\n\n this.manager.resetButton();\n\n });\n\n\n\n // Handler for choosing action, e.g. \"Check Judgement\".\n\n document\n\n .getElementById(\"action-options\")!\n\n .addEventListener(\"click\", (e: Event) => {\n\n let target = (e.target as HTMLAnchorElement).innerText;\n\n if (target == \"Check Judgement\") {\n\n this.search_bar.placeholder = \"Account address...\"\n\n this.specify_action.innerText = target;\n\n } else if (target == \"Validate Display Name\") {\n\n this.search_bar.placeholder = \"Display Name...\"\n\n this.specify_action.innerText = target;\n\n }\n\n });\n\n\n\n // Handler for executing action and communicating with the backend API.\n\n this.btn_execute_action\n\n .addEventListener(\"click\", (_: Event) => {\n\n let action = this.specify_action.innerHTML;\n\n if (action == \"Check Judgement\") {\n\n window.location.href = \"?network=\"\n\n + this.specify_network.innerHTML.toLowerCase()\n\n + \"&address=\"\n\n + this.search_bar.value;\n\n } else if (action == \"Validate Display Name\") {\n\n this.executeAction();\n\n }\n\n });\n\n\n\n this.search_bar\n\n .addEventListener(\"input\", (_: Event) => {\n\n this.manager.resetButton();\n\n\n\n if (this.search_bar.value.startsWith(\"1\")) {\n\n this.specify_network.innerHTML = \"Polkadot\";\n\n } else {\n\n this.specify_network.innerHTML = \"Kusama\";\n\n }\n\n });\n\n\n\n // Bind 'Enter' key to action button.\n\n this.search_bar\n\n .addEventListener(\"keyup\", (event: Event) => {\n\n // Number 13 is the \"Enter\" key on the keyboard\n\n if ((event as KeyboardEvent).keyCode === 13) {\n\n // Cancel the default action, if needed\n\n event.preventDefault();\n\n this.btn_execute_action.click();\n\n }\n\n });\n\n\n\n // Add a listener for every notification. Required for closing.\n\n Array.from(document\n\n .getElementsByClassName(\"toast\")!)\n\n .forEach(element => {\n\n element\n\n .addEventListener(\"click\", (_: Event) => {\n\n });\n\n });\n\n\n\n // Get params from the webbrowser search bar, load data from server if\n\n // specified.\n\n let params = new URLSearchParams(window.location.search);\n\n let network = params.get(\"network\");\n\n let address = params.get(\"address\");\n\n\n\n if (network != null && address != null) {\n\n this.specify_network.innerHTML = capitalizeFirstLetter(network);\n\n this.search_bar.value = address;\n\n this.executeAction();\n\n }\n\n }\n\n // Executes the main logic, either the judgement state or display name check.\n\n executeAction() {\n\n this.manager.setButtonLoadingSpinner();\n\n\n\n const action = this.specify_action.innerHTML;\n\n const user_input = this.search_bar.value;\n\n const network = this.specify_network.innerHTML.toLowerCase();\n\n\n\n if (action == \"Check Judgement\") {\n\n const socket = new WebSocket(config.ws_url);\n\n\n\n window.setInterval(() => {\n\n socket.send(\"heartbeat\");\n\n }, 30000);\n\n\n\n // Send request to the server\n\n socket.onopen = () => {\n\n let msg = JSON.stringify({ address: user_input, chain: network });\n\n socket.send(msg);\n\n };\n\n\n\n // Parse received judgement state.\n\n socket.onmessage = (event: Event) => {\n\n let msg = (event as MessageEvent);\n\n this.handleJudgementState(msg);\n\n };\n\n } else if (action == \"Validate Display Name\") {\n\n let display_name = user_input;\n\n\n\n (async () => {\n\n let body = JSON.stringify({\n\n check: display_name,\n\n chain: network,\n\n });\n\n\n\n let response = await fetch(config.http_url,\n\n {\n\n method: \"POST\",\n\n headers: {\n\n \"Content-Type\": \"application/json\",\n\n },\n\n body: body,\n\n });\n\n\n\n let result: GenericMessage = JSON.parse(await response.text());\n\n this.handleDisplayNameCheck(result, display_name);\n\n })();\n\n }\n\n }\n\n // Handles the display name result received from the server.\n\n handleDisplayNameCheck(data: GenericMessage, display_name: string) {\n\n this.manager.wipeIntroduction();\n\n\n\n if (data.type == \"ok\") {\n\n let check: CheckDisplayNameResult = data.message;\n\n if (check.type == \"ok\") {\n\n this.manager.setDisplayNameVerification(display_name, BadgeValid);\n\n } else if (check.type = \"violations\") {\n\n let violations: Violation[] = check.value;\n\n this.manager.setDisplayNameViolation(display_name, violations, false);\n\n } else {\n\n // Should never occur.\n\n this.notifications.unexpectedError(\"pdnc#1\")\n\n }\n\n } else if (data.type == \"err\") {\n\n // Should never occur.\n\n this.notifications.unexpectedError(\"pdnc#2\")\n\n } else {\n\n // Should never occur.\n\n this.notifications.unexpectedError(\"pdnc#3\")\n\n }\n\n\n\n this.manager.resetButton();\n\n this.manager.wipeLiveUpdateInfo();\n\n this.manager.wipeVerificationOverviewContent();\n\n this.manager.wipeEmailSecondChallengeContent();\n\n this.manager.wipeUnsupportedContent();\n\n }\n\n // Handles the judgement state received from the server.\n\n handleJudgementState(msg: MessageEvent) {\n\n const parsed: GenericMessage = JSON.parse(msg.data);\n\n if (parsed.type == \"ok\") {\n\n let message: StateNotification = parsed.message;\n\n this.manager.wipeIntroduction();\n\n this.manager.setButtonLiveAnimation();\n\n this.manager.setLiveUpdateInfo();\n\n this.manager.processVerificationOverviewTable(message.state);\n\n this.manager.processUnsupportedOverview(message.state);\n\n\n\n this.notifications.processNotifications(message.notifications);\n\n\n\n // This notification should only be displayed if no other notifications are available.\n\n if (message.state.is_fully_verified && message.notifications.length == 0) {\n\n this.notifications.displayNotification(\"The identity has been fully verified!\", \"bg-success text-light\", true)\n\n }\n\n } else if (parsed.type == \"err\") {\n\n let message: string = parsed.message;\n\n this.notifications.displayError(message);\n\n\n\n this.manager.resetButton();\n\n this.manager.wipeLiveUpdateInfo();\n\n } else {\n\n // Should never occur.\n\n this.notifications.unexpectedError(\"pas#1\")\n\n }\n\n }\n\n}\n\n\n\nnew ActionListerner();\n", "file_path": "www/src/index.ts", "rank": 68, "score": 22536.24600054524 }, { "content": "import { capitalizeFirstLetter } from \"./content.js\";\n\nimport { Notification, NotificationFieldContext, ManuallyVerified } from \"./json\";\n\n\n\nexport class NotificationHandler {\n\n notify_idx: number\n\n div_notifications: HTMLElement;\n\n\n\n constructor() {\n\n this.notify_idx = 0;\n\n\n\n this.div_notifications =\n\n document\n\n .getElementById(\"div-notifications\")!;\n\n }\n\n\n\n processNotifications(notifications: Notification[]) {\n\n for (let notify of notifications) {\n\n try {\n\n const [message, color] = notificationTypeResolver(notify);\n\n this.displayNotification(message, color, false);\n\n } catch (error) {\n\n this.unexpectedError(\"pnntr#1\");\n\n }\n\n }\n\n }\n\n displayNotification(message: string, color: string, show_final: boolean) {\n\n this.div_notifications.insertAdjacentHTML(\n\n \"beforeend\",\n\n `<div id=\"toast-${this.notify_idx}\" class=\"toast show align-items-center ${color} border-0\" role=\"alert\" aria-live=\"assertive\"\n\n aria-atomic=\"true\">\n\n <div class=\"d-flex\">\n\n <div class=\"toast-body\">\n\n ${message}\n\n </div>\n\n <button id=\"toast-${this.notify_idx}-close-btn\" type=\"button\" class=\"btn-close btn-close-white me-2 m-auto\" data-bs-dismiss=\"toast\"\n\n aria-label=\"Close\"></button>\n\n </div>\n\n </div>\n\n `\n\n );\n\n\n\n // Add handler for close button.\n\n let idx = this.notify_idx;\n\n document\n\n .getElementById(`toast-${idx}-close-btn`)!\n\n .addEventListener(\"click\", (e: Event) => {\n\n let toast: HTMLElement = document\n\n .getElementById(`toast-${idx}`)!;\n\n\n\n toast.classList.remove(\"show\");\n\n toast.classList.add(\"hide\");\n\n });\n\n\n\n // Cleanup old toast, limit to eight max.\n\n let max = 8;\n\n if (show_final) {\n\n max = 1;\n\n }\n\n\n\n let old = this.notify_idx - max;\n\n if (old >= 0) {\n\n let toast: HTMLElement | null = document\n\n .getElementById(`toast-${old}`);\n\n\n\n if (toast) {\n\n toast.classList.remove(\"show\");\n\n toast.classList.add(\"hide\");\n\n }\n\n }\n\n\n\n this.notify_idx += 1;\n\n }\n\n displayError(message: string) {\n\n this.displayNotification(message, \"bg-danger text-light\", false);\n\n }\n\n unexpectedError(id: string) {\n\n this.displayError(`Unexpected internal error, please contact admin. Code: ${id}`);\n\n }\n\n}\n\n\n\nfunction notificationTypeResolver(notification: Notification): [string, string] {\n\n switch (notification.type) {\n\n case \"identity_inserted\": {\n\n return [\n\n `The judgement request has been discovered by the registrar service.`,\n\n \"bg-info text-dark\"\n\n ]\n\n }\n\n case \"identity_updated\": {\n\n return [\n\n `On-chain identity information has been modified.`,\n\n \"bg-info text-dark\"\n\n ]\n\n }\n\n case \"field_verified\": {\n\n let data = notification.value as NotificationFieldContext;\n\n return [\n\n `${capitalizeFirstLetter(data.field.type)} account \"${data.field.value}\" is verified. Challenge is valid.`,\n\n \"bg-success text-light\",\n\n ]\n\n }\n\n case \"field_verification_failed\": {\n\n let data = notification.value as NotificationFieldContext;\n\n return [\n\n `${capitalizeFirstLetter(data.field.type)} account \"${data.field.value}\" failed to get verified. Invalid challenge.`,\n\n \"bg-danger text-light\"\n\n ]\n\n }\n\n case \"second_field_verified\": {\n\n let data = notification.value as NotificationFieldContext;\n\n return [\n\n `${capitalizeFirstLetter(data.field.type)} account \"${data.field.value}\" is fully verified. Additional challenge is valid.`,\n\n \"bg-success text-light\"\n\n ]\n\n }\n\n case \"second_field_verification_failed\": {\n\n let data = notification.value as NotificationFieldContext;\n\n return [\n\n `${capitalizeFirstLetter(data.field.type)} account \"${data.field.value}\" failed to get verified. The additional challenge is invalid.`,\n\n \"bg-danger text-light\"\n\n ]\n\n }\n\n case \"awaiting_second_challenge\": {\n\n let data = notification.value as NotificationFieldContext;\n\n return [\n\n `A second challenge was sent to ${capitalizeFirstLetter(data.field.type)} account \"${data.field.value}\". Please also check the spam folder.`,\n\n \"bg-info text-dark\"\n\n ]\n\n }\n\n case \"identity_fully_verified\": {\n\n return [\n\n `<strong>Verification process completed!</strong> Judgement will be issued in a couple of minutes.`,\n\n \"bg-success text-light\"\n\n ]\n\n }\n\n case \"judgement_provided\": {\n\n return [\n\n `Judgement has been submitted!`,\n\n \"bg-success text-light\"\n\n ]\n\n }\n\n case \"manually_verified\": {\n\n let data = notification.value as ManuallyVerified;\n\n return [\n\n `Manually verified ${capitalizeFirstLetter(data.field)}`,\n\n \"bg-info text-light\"\n\n ]\n\n }\n\n case \"full_manual_verification\": {\n\n return [\n\n `Manually verified the identity. Judgement will be issued in a couple of minutes.`,\n\n \"bg-info text-light\"\n\n ]\n\n }\n\n default: {\n\n throw new Error(\"unrecognized notification\");\n\n }\n\n }\n", "file_path": "www/src/notifications.ts", "rank": 69, "score": 22536.24600054524 }, { "content": "export interface GenericMessage {\n\n type: string;\n\n message: any;\n\n}\n\n\n\nexport interface StateNotification {\n\n state: State;\n\n notifications: Notification[];\n\n}\n\n\n\nexport interface State {\n\n context: Context;\n\n is_fully_verified: boolean;\n\n completion_timestamp?: any;\n\n fields: Field[];\n\n}\n\n\n\nexport interface Context {\n\n address: string;\n\n chain: string;\n\n}\n\n\n\nexport interface Field {\n\n value: FieldValue;\n\n challenge: Challenge;\n\n failed_attempts: number;\n\n}\n\n\n\nexport interface FieldValue {\n\n type: string;\n\n value: string;\n\n}\n\n\n\nexport interface Challenge {\n\n type: string;\n\n content: any;\n\n}\n\n\n\nexport interface Content {\n\n expected: Expected;\n\n second?: Expected;\n\n}\n\n\n\nexport interface DisplayNameChallenge {\n\n passed: boolean;\n\n violations: Violation[];\n\n}\n\n\n\nexport interface Expected {\n\n value: string;\n\n is_verified: boolean;\n\n}\n\n\n\nexport interface Notification {\n\n type: string;\n\n value: any;\n\n}\n\n\n\nexport interface NotificationFieldContext {\n\n context: Context;\n\n field: FieldValue;\n\n}\n\n\n\nexport interface ManuallyVerified {\n\n context: Context;\n\n field: string;\n\n}\n\n\n\nexport interface CheckDisplayNameResult {\n\n type: string;\n\n value: any;\n\n}\n\n\n\nexport interface Violation {\n\n context: Context,\n\n display_name: string,\n\n}\n", "file_path": "www/src/json.ts", "rank": 70, "score": 22536.24600054524 }, { "content": "import { CheckDisplayNameResult, DisplayNameChallenge, GenericMessage, State, Violation } from './json';\n\nimport { NotificationHandler } from './notifications.js';\n\n\n\nconst BadgeVerified = `\n\n <span class=\"badge bg-success\">verified</span>\n\n`;\n\n\n\nconst BadgeVerifiedHalf = `\n\n <span class=\"badge bg-info\">verified (1/2)</span>\n\n`;\n\n\n\nconst BadgeUnverified = `\n\n <span class=\"badge bg-warning text-dark\">unverified</span>\n\n`;\n\n\n\nexport const BadgeValid = `\n\n <span class=\"badge bg-success\">valid</span>\n\n`;\n\n\n\nconst BadgeInvalid = `\n\n <span class=\"badge bg-danger\">invalid</span>\n\n`;\n\n\n\n// Manages the content in the UI. Mostly called within the `ActionListener`.\n\nexport class ContentManager {\n\n btn_execute_action: HTMLButtonElement;\n\n div_live_updates_info: HTMLElement;\n\n div_display_name_overview: HTMLElement;\n\n div_fully_verified_info: HTMLElement;\n\n div_verification_overview: HTMLElement;\n\n div_email_second_challenge: HTMLElement;\n\n div_unsupported_overview: HTMLElement;\n\n notifications: NotificationHandler;\n\n\n\n constructor(handler: NotificationHandler) {\n\n // Register relevant elements.\n\n this.btn_execute_action =\n\n document\n\n .getElementById(\"execute-action\")! as HTMLButtonElement;\n\n\n\n this.div_live_updates_info =\n\n document\n\n .getElementById(\"div-live-updates-info\")!;\n\n\n\n this.div_display_name_overview =\n\n document\n\n .getElementById(\"div-display-name-overview\")!;\n\n\n\n this.div_fully_verified_info =\n\n document\n\n .getElementById(\"div-fully-verified-info\")! as HTMLButtonElement;\n\n\n\n this.div_verification_overview =\n\n document\n\n .getElementById(\"div-verification-overview\")!;\n\n\n\n this.div_email_second_challenge =\n\n document\n\n .getElementById(\"div-email-second-challenge\")!;\n\n\n\n this.div_unsupported_overview =\n\n document\n\n .getElementById(\"div-unsupported-overview\")!;\n\n\n\n this.notifications = handler;\n\n }\n\n\n\n setButtonLoadingSpinner() {\n\n this.btn_execute_action.disabled = true;\n\n this.btn_execute_action\n\n .innerHTML = `\n\n <span class=\"spinner-border spinner-border-sm\" role=\"status\" aria-hidden=\"true\"></span>\n\n <span class=\"visually-hidden\"></span>\n\n `;\n\n }\n\n setButtonLiveAnimation() {\n\n this.btn_execute_action.innerHTML = `\n\n <div class=\"spinner-grow spinner-grow-sm\" role=\"status\">\n\n <span class=\"visually-hidden\"></span>\n\n </div>\n\n `;\n\n }\n\n resetButton() {\n\n this.btn_execute_action.innerHTML = `Go!`;\n\n this.btn_execute_action.disabled = false;\n\n }\n\n wipeIntroduction() {\n\n document.getElementById(\"introduction\")!.innerHTML = \"\";\n\n }\n\n processVerificationOverviewTable(state: State) {\n\n let table = \"\";\n\n\n\n let counter = 1;\n\n for (let field of state.fields) {\n\n if (field.challenge.type == \"expected_message\") {\n\n let validity;\n\n if (field.challenge.content.expected.is_verified) {\n\n if (field.challenge.content.second && !field.challenge.content.second!.is_verified) {\n\n validity = BadgeVerifiedHalf;\n\n\n\n this.setEmailSecondChallengeContent(field.value.value);\n\n } else {\n\n validity = BadgeVerified;\n\n\n\n if (field.value.type == \"email\") {\n\n this.wipeEmailSecondChallengeContent();\n\n }\n\n }\n\n } else {\n\n validity = BadgeUnverified;\n\n }\n\n\n\n // Specify the destination address.\n\n let to = \"N/A\";\n\n if (field.value.type == \"email\") {\n\n to = \"[email protected]\";\n\n } else if (field.value.type == \"twitter\") {\n\n to = \"@w3f_registrar\";\n\n } else if (field.value.type == \"matrix\") {\n\n to = \"@registrar-v2:web3.foundation\";\n\n }\n\n\n\n table += `\n\n <tr>\n\n <th scope=\"row\">${counter}</th>\n\n <td>${capitalizeFirstLetter(field.value.type)}</td>\n\n <td>${field.challenge.content.expected.value}</td>\n\n <td>${field.value.value}</td>\n\n <td>${to}</td>\n\n <td>${validity}</td>\n\n </tr>\n\n `;\n\n\n\n counter += 1;\n\n } else if (field.challenge.type == \"display_name_check\") {\n\n let validity;\n\n\n\n let challenge: DisplayNameChallenge = field.challenge.content;\n\n if (challenge.passed) {\n\n this.setDisplayNameVerification(field.value.value, BadgeValid);\n\n } else {\n\n validity = BadgeInvalid;\n\n this.setDisplayNameViolation(field.value.value, challenge.violations, true);\n\n }\n\n }\n\n }\n\n\n\n // Apply table to the page.\n\n this.setVerificationOverviewContent(table);\n\n }\n\n processUnsupportedOverview(state: State) {\n\n let unsupported = \"\";\n\n for (let field of state.fields) {\n\n if (field.challenge.type == \"unsupported\") {\n\n unsupported += `<li>${capitalizeFirstLetter(field.value.type)} (\"${field.value.value}\")</li>`;\n\n }\n\n }\n\n\n\n if (unsupported.length != 0) {\n\n this.setUnsupportedContent(unsupported);\n\n } else {\n\n this.wipeUnsupportedContent;\n\n }\n\n }\n\n setLiveUpdateInfo() {\n\n this.div_live_updates_info.innerHTML = `\n\n <div class=\"col-10\">\n\n <p class=\"text-center\"><em>Displaying live updates...</em></p>\n\n </div>\n\n `;\n\n }\n\n wipeLiveUpdateInfo() {\n\n this.div_live_updates_info.innerHTML = \"\";\n\n }\n\n setDisplayNameVerification(name: string, validity: string) {\n\n this.div_display_name_overview.innerHTML = `\n\n <div class=\"col-10 \">\n\n <h2>Display name check</h2>\n\n <p>The display name <strong>${name}</strong> is ${validity}</p>\n\n </div>\n\n `;\n\n }\n\n setDisplayNameViolation(name: string, violations: Violation[], show_hint: boolean) {\n\n let listed = \"\";\n\n for (let v of violations) {\n\n listed += `<li>\"${v.display_name}\" (by account <em>${v.context.address}</em>)</li>`\n\n }\n\n\n\n let hint = \"\";\n\n if (show_hint) {\n\n hint = `<p><strong>Hint:</strong> You can check for valid display names by selecting <em>\"Validate Display Name\"</em> in the search bar.</p>`\n\n }\n\n\n\n this.div_display_name_overview.innerHTML = `\n\n <div class=\"col-10 \">\n\n <h2>Display name check</h2>\n\n <p>The display name <strong>${name}</strong> is ${BadgeInvalid}. It's too similar to (an) existing display name(s):</p>\n\n <ul>\n\n ${listed}\n\n </ul>\n\n ${hint}\n\n </div>\n\n `;\n\n }\n\n setVerificationOverviewContent(table: string) {\n\n this.div_verification_overview.innerHTML = `\n\n <div class=\"col-10 table-responsive \">\n\n <h2>Account verification</h2>\n\n <p>Send each provided challenge <strong>from</strong> your account <strong>to</strong> the corresponding W3F account.\n\n You can just copy and paste the challenge directly.</p>\n\n <p><em>Note:</em> Twitter verification can take about 5 minutes.</p>\n\n <table id=\"verification-overview\" class=\"table table-striped table-dark\">\n\n <thead>\n\n <tr>\n\n <th scope=\"col\">#</th>\n\n <th scope=\"col\">Type</th>\n\n <th scope=\"col\">Challenge</th>\n\n <th scope=\"col\">From</th>\n\n <th scope=\"col\">To</th>\n\n <th scope=\"col\">Status</th>\n\n </tr>\n\n </thead>\n\n <tbody>\n\n ${table}\n\n </tbody>\n\n </table>\n\n </div>\n\n `;\n\n }\n\n wipeVerificationOverviewContent() {\n\n this.div_verification_overview.innerHTML = \"\";\n\n }\n\n setEmailSecondChallengeContent(address: string) {\n\n this.div_email_second_challenge.innerHTML = `\n\n <div class=\"col-10\">\n\n <h2>⚠️️ Additional Challenge</h2>\n\n <p>A message was sent from <em>[email protected]</em> to <strong>${address}</strong> containing an additional challenge\n\n (make sure to check the spam folder). Please insert that challenge into the following field:\n\n </p>\n\n <div class=\"input-group\">\n\n <input id=\"specify-second-challenge\" type=\"text\" class=\"form-control\"\n\n aria-label=\"Second challenge verification\" placeholder=\"Challenge...\">\n\n <button id=\"execute-second-challenge\" class=\"col-1 btn btn-primary\"\n\n type=\"button\">Verify</button>\n\n </div>\n\n </div>`;\n\n\n\n let second_challenge = document\n\n .getElementById(\"specify-second-challenge\")! as HTMLInputElement;\n\n\n\n let button = document\n\n .getElementById(\"execute-second-challenge\")! as HTMLButtonElement;\n\n\n\n second_challenge\n\n .addEventListener(\"input\", (_: Event) => {\n\n button.innerHTML = `Go!`;\n\n button.disabled = false;\n\n });\n\n\n\n button\n\n .addEventListener(\"click\", async (e: Event) => {\n\n button.disabled = true;\n\n button\n\n .innerHTML = `\n\n <span class=\"spinner-border spinner-border-sm\" role=\"status\" aria-hidden=\"true\"></span>\n\n <span class=\"visually-hidden\"></span>\n\n `;\n\n\n\n let body = JSON.stringify({\n\n entry: {\n\n type: \"email\",\n\n value: address,\n\n },\n\n challenge: second_challenge.value,\n\n });\n\n\n\n let _resp = await fetch(\"https://registrar-backend.web3.foundation/api/verify_second_challenge\",\n\n {\n\n method: \"POST\",\n\n headers: {\n\n \"Content-Type\": \"application/json\",\n\n },\n\n body: body,\n\n });\n\n\n\n // No need to check the result, since an appropriate event is\n\n // generated in the backend and submitted over the websocket\n\n // stream.\n\n\n\n // Reset elements.\n\n button.disabled = false;\n\n button.innerHTML = \"Verify\";\n\n second_challenge.value = \"Challenge...\";\n\n });\n\n }\n\n wipeEmailSecondChallengeContent() {\n\n this.div_email_second_challenge.innerHTML = \"\";\n\n }\n\n setUnsupportedContent(list: string) {\n\n this.div_unsupported_overview.innerHTML = `\n\n <div class=\"col-10\">\n\n <h2>🚨 Unsupported entries</h2>\n\n <ul>\n\n ${list}\n\n </ul>\n\n <p>The identity on-chain info contains fields that are not supported by the W3F registrar service in\n\n an automated manner and <em>must</em> be removed. If you really want to have those fields\n\n included, contact the appropriate authorities as described in the <em>\"Need help?\"</em> section below. Please prepare\n\n the necessary information so the manual verification can be completed as quickly as possible. For\n\n example, if you want to add a web address, make sure that the website somehow references\n\n your Kusama/Polkadot address.</p>\n\n </div>\n\n `;\n\n\n\n }\n\n wipeUnsupportedContent() {\n\n this.div_unsupported_overview.innerHTML = \"\";\n\n }\n\n}\n\n\n\nexport function capitalizeFirstLetter(word: string) {\n\n return (word.charAt(0).toUpperCase() + word.slice(1))\n\n .replace(\"_\", \" \");\n\n}\n", "file_path": "www/src/content.ts", "rank": 71, "score": 22536.24600054524 }, { "content": " IdentityFieldValue::DisplayName(name) => name.as_str(),\n\n _ => panic!(\"Failed to get display name. This is a bug.\"),\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Message)]\n\n#[serde(rename_all = \"snake_case\")]\n\n#[rtype(result = \"()\")]\n\npub struct ExternalMessage {\n\n pub origin: ExternalMessageType,\n\n pub id: MessageId,\n\n pub timestamp: Timestamp,\n\n pub values: Vec<MessagePart>,\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\", tag = \"type\", content = \"value\")]\n\npub enum ExternalMessageType {\n\n Email(String),\n", "file_path": "src/primitives.rs", "rank": 72, "score": 56.36115648985099 }, { "content": " Twitter(String),\n\n Matrix(String),\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct MessageId(u64);\n\n\n\nimpl From<u64> for MessageId {\n\n fn from(val: u64) -> Self {\n\n MessageId(val)\n\n }\n\n}\n\n\n\nimpl From<u32> for MessageId {\n\n fn from(val: u32) -> Self {\n\n MessageId::from(val as u64)\n\n }\n\n}\n\n\n", "file_path": "src/primitives.rs", "rank": 73, "score": 52.42047177208945 }, { "content": " message\n\n ))\n\n .build()?;\n\n\n\n let _ = smtp.send(email.into())?;\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl Adapter for EmailClient {\n\n type MessageType = ExpectedMessage;\n\n\n\n fn name(&self) -> &'static str {\n\n \"email\"\n\n }\n\n async fn fetch_messages(&mut self) -> Result<Vec<ExternalMessage>> {\n\n self.request_messages()\n\n }\n\n async fn send_message(&mut self, to: &str, content: Self::MessageType) -> Result<()> {\n\n Self::send_message(self, to, content.value.as_str()).await\n\n }\n\n}\n", "file_path": "src/adapters/email.rs", "rank": 74, "score": 48.041643007804296 }, { "content": " };\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream_alice.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(exp_resp));\n\n\n\n // Verify Twitter.\n\n let msg = ExternalMessage {\n\n origin: ExternalMessageType::Twitter(\"@alice\".to_string()),\n\n id: MessageId::from(0u32),\n\n timestamp: Timestamp::now(),\n\n values: alice\n\n .get_field(&F::ALICE_TWITTER())\n\n .expected_message()\n\n .to_message_parts(),\n\n };\n\n\n\n let changed = alice\n\n .get_field_mut(&F::ALICE_TWITTER())\n\n .expected_message_mut()\n\n .verify_message(&msg);\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 75, "score": 47.80272808586206 }, { "content": " pub fn raw(&self) -> u64 {\n\n self.0\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct MessagePart(String);\n\n\n\nimpl From<String> for MessagePart {\n\n fn from(val: String) -> Self {\n\n MessagePart(val)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct Event {\n\n pub timestamp: Timestamp,\n\n pub event: NotificationMessage,\n", "file_path": "src/primitives.rs", "rank": 76, "score": 46.27591580074517 }, { "content": "}\n\n\n\nimpl Event {\n\n pub fn new(event: NotificationMessage) -> Self {\n\n Event {\n\n timestamp: Timestamp::now(),\n\n event,\n\n }\n\n }\n\n}\n\n\n\nimpl From<NotificationMessage> for Event {\n\n fn from(val: NotificationMessage) -> Self {\n\n Event::new(val)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, Message)]\n\n#[serde(rename_all = \"snake_case\", tag = \"type\", content = \"value\")]\n\n#[rtype(result = \"()\")]\n", "file_path": "src/primitives.rs", "rank": 77, "score": 45.913206945466314 }, { "content": " false\n\n }\n\n pub fn set_verified(&mut self) {\n\n self.is_verified = true;\n\n }\n\n #[cfg(test)]\n\n pub fn to_message_parts(&self) -> Vec<MessagePart> {\n\n vec![self.value.clone().into()]\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\", tag = \"type\", content = \"value\")]\n\npub enum IdentityFieldValue {\n\n LegalName(String),\n\n DisplayName(String),\n\n Email(String),\n\n Web(String),\n\n Twitter(String),\n\n Matrix(String),\n", "file_path": "src/primitives.rs", "rank": 78, "score": 45.21410246706688 }, { "content": "impl Adapter for MatrixClient {\n\n type MessageType = ();\n\n\n\n fn name(&self) -> &'static str {\n\n \"Matrix\"\n\n }\n\n async fn fetch_messages(&mut self) -> Result<Vec<ExternalMessage>> {\n\n let mut lock = self.messages.lock().await;\n\n // Return messages and wipe inner field.\n\n Ok(std::mem::take(&mut *lock))\n\n }\n\n async fn send_message(&mut self, _to: &str, _content: Self::MessageType) -> Result<()> {\n\n unimplemented!()\n\n }\n\n}\n", "file_path": "src/adapters/matrix.rs", "rank": 79, "score": 44.734349685882414 }, { "content": " let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(\n\n resp,\n\n JsonResult::Ok(ResponseAccountState::with_no_notifications(alice.clone()))\n\n );\n\n\n\n // Send valid message.\n\n injector\n\n .send(ExternalMessage {\n\n origin: ExternalMessageType::Matrix(\"@alice:matrix.org\".to_string()),\n\n id: MessageId::from(0u32),\n\n timestamp: Timestamp::now(),\n\n values: alice\n\n .get_field(&F::ALICE_MATRIX())\n\n .expected_message()\n\n .to_message_parts(),\n\n })\n\n .await;\n\n\n\n // Email account of Alice is now verified\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 80, "score": 43.63442726584792 }, { "content": " .await;\n\n\n\n // Start backend syncing service\n\n info!(\"Executing background sync\");\n\n let settings = SyncSettings::default().token(\n\n client\n\n .sync_token()\n\n .await\n\n .ok_or_else(|| anyhow!(\"Failed to acquire sync token\"))?,\n\n );\n\n\n\n actix::spawn(async move {\n\n client.clone().sync(settings).await;\n\n });\n\n\n\n Ok(MatrixClient { messages })\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Deserialize)]\n\npub struct MatrixHandle(String);\n\n\n", "file_path": "src/adapters/matrix.rs", "rank": 81, "score": 43.54345308974641 }, { "content": " }\n\n pub async fn verify_message(&self, message: &ExternalMessage) -> Result<()> {\n\n let coll = self.db.collection(IDENTITY_COLLECTION);\n\n\n\n // Fetch the current field state based on the message origin.\n\n let mut cursor = coll\n\n .find(\n\n doc! {\n\n \"fields.value\": message.origin.to_bson()?,\n\n },\n\n None,\n\n )\n\n .await?;\n\n\n\n // If a field was found, update it.\n\n while let Some(doc) = cursor.next().await {\n\n let mut id_state: JudgementState = from_document(doc?)?;\n\n let field_state = id_state\n\n .fields\n\n .iter_mut()\n", "file_path": "src/database.rs", "rank": 82, "score": 43.366647546908055 }, { "content": "\n\n#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct ChainAddress(String);\n\n\n\nimpl ChainAddress {\n\n pub fn as_str(&self) -> &str {\n\n self.0.as_str()\n\n }\n\n}\n\n\n\nimpl From<String> for ChainAddress {\n\n fn from(v: String) -> Self {\n\n ChainAddress(v)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum ChainName {\n", "file_path": "src/primitives.rs", "rank": 83, "score": 43.264727952029226 }, { "content": " Twitter(_) => ChallengeType::ExpectedMessage {\n\n expected: ExpectedMessage::random(),\n\n second: None,\n\n },\n\n Matrix(_) => ChallengeType::ExpectedMessage {\n\n expected: ExpectedMessage::random(),\n\n second: None,\n\n },\n\n }\n\n };\n\n\n\n IdentityField {\n\n value: val,\n\n challenge,\n\n failed_attempts: 0,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n", "file_path": "src/primitives.rs", "rank": 84, "score": 42.95719134189176 }, { "content": "\n\nimpl IdentityField {\n\n pub fn new(val: IdentityFieldValue) -> Self {\n\n use IdentityFieldValue::*;\n\n\n\n let challenge = {\n\n match val {\n\n LegalName(_) => ChallengeType::Unsupported { is_verified: None },\n\n Web(_) => ChallengeType::Unsupported { is_verified: None },\n\n PGPFingerprint(_) => ChallengeType::Unsupported { is_verified: None },\n\n Image(_) => ChallengeType::Unsupported { is_verified: None },\n\n Additional(_) => ChallengeType::Unsupported { is_verified: None },\n\n DisplayName(_) => ChallengeType::DisplayNameCheck {\n\n passed: false,\n\n violations: vec![],\n\n },\n\n Email(_) => ChallengeType::ExpectedMessage {\n\n expected: ExpectedMessage::random(),\n\n second: Some(ExpectedMessage::random()),\n\n },\n", "file_path": "src/primitives.rs", "rank": 85, "score": 42.62614728582401 }, { "content": " // Start Connector.\n\n let dn_verifier = DisplayNameVerifier::new(db.clone(), dn_config.clone());\n\n let conn =\n\n Connector::start(config.endpoint, config.network, db.clone(), dn_verifier).await?;\n\n\n\n info!(\"Connection initiated\");\n\n info!(\"Sending pending judgements request to Watcher\");\n\n let _ = conn.send(ClientCommand::RequestPendingJudgements).await?;\n\n\n\n Result::Ok(())\n\n }\n\n .instrument(span)\n\n .await?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct ResponseMessage<T> {\n", "file_path": "src/connector.rs", "rank": 86, "score": 42.62372147089172 }, { "content": " field: F::ALICE_EMAIL(),\n\n }],\n\n };\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream_alice.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(exp_resp));\n\n\n\n // Verify Matrix.\n\n let msg = ExternalMessage {\n\n origin: ExternalMessageType::Matrix(\"@alice:matrix.org\".to_string()),\n\n id: MessageId::from(0u32),\n\n timestamp: Timestamp::now(),\n\n values: alice\n\n .get_field(&F::ALICE_MATRIX())\n\n .expected_message()\n\n .to_message_parts(),\n\n };\n\n\n\n let changed = alice\n\n .get_field_mut(&F::ALICE_MATRIX())\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 87, "score": 42.394891109636106 }, { "content": " state: alice.clone().into(),\n\n notifications: vec![NotificationMessage::ManuallyVerified {\n\n context: alice.context.clone(),\n\n field: RawFieldName::Twitter,\n\n }],\n\n };\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(expected));\n\n\n\n // Empty stream.\n\n assert!(stream.next().now_or_never().is_none());\n\n}\n\n\n\n#[actix::test]\n\nasync fn command_verify_unsupported_field() {\n\n let (db, connector, mut api, _) = new_env().await;\n\n let mut stream = api.ws_at(\"/api/account_status\").await.unwrap();\n\n\n\n // Insert judgement state with unsupported entry.\n", "file_path": "src/tests/process_admin_cmds.rs", "rank": 88, "score": 42.28558488723262 }, { "content": " stream.send(IdentityContext::alice().to_ws()).await.unwrap();\n\n\n\n // Check current state.\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(\n\n resp,\n\n JsonResult::Ok(ResponseAccountState::with_no_notifications(alice.clone()))\n\n );\n\n\n\n // Send valid message.\n\n injector\n\n .send(ExternalMessage {\n\n origin: ExternalMessageType::Matrix(\"@alice:matrix.org\".to_string()),\n\n id: MessageId::from(0u32),\n\n timestamp: Timestamp::now(),\n\n values: alice\n\n .get_field(&F::ALICE_MATRIX())\n\n .expected_message()\n\n .to_message_parts(),\n\n })\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 89, "score": 41.958140486954626 }, { "content": "\n\n // Subscribe to endpoint.\n\n stream.send(IdentityContext::alice().to_ws()).await.unwrap();\n\n\n\n // Check current state.\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(\n\n resp,\n\n JsonResult::Ok(ResponseAccountState::with_no_notifications(alice.clone()))\n\n );\n\n\n\n // Send valid message.\n\n injector\n\n .send(ExternalMessage {\n\n origin: ExternalMessageType::Email(\"[email protected]\".to_string()),\n\n id: MessageId::from(0u32),\n\n timestamp: Timestamp::now(),\n\n values: alice\n\n .get_field(&F::ALICE_EMAIL())\n\n .expected_message()\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 90, "score": 41.80944829416457 }, { "content": " assert!(changed);\n\n\n\n db.verify_message(&msg).await.unwrap();\n\n\n\n // Check updated state with notification.\n\n let exp_resp = ResponseAccountState {\n\n state: alice.clone().into(),\n\n notifications: vec![NotificationMessage::FieldVerified {\n\n context: alice.context.clone(),\n\n field: F::ALICE_TWITTER(),\n\n }],\n\n };\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream_alice.next().await.into();\n\n assert_eq!(resp, JsonResult::Ok(exp_resp));\n\n\n\n // Verify Email (first challenge).\n\n let msg = ExternalMessage {\n\n origin: ExternalMessageType::Email(\"[email protected]\".to_string()),\n\n id: MessageId::from(0u32),\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 91, "score": 41.29033352398759 }, { "content": " #[serde(rename = \"erroneous\")]\n\n Erroneous,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct JudgementRequest {\n\n pub address: ChainAddress,\n\n pub accounts: HashMap<AccountType, String>,\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct DisplayNameEntry {\n\n pub context: IdentityContext,\n\n pub display_name: String,\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\n/// The entry as sent by the Watcher. Then converted into `DisplayNameEntry`.\n\npub struct DisplayNameEntryRaw {\n\n pub address: ChainAddress,\n", "file_path": "src/connector.rs", "rank": 92, "score": 41.277373860199404 }, { "content": " })\n\n .collect(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct JudgementState {\n\n pub context: IdentityContext,\n\n pub is_fully_verified: bool,\n\n pub inserted_timestamp: Timestamp,\n\n pub completion_timestamp: Option<Timestamp>,\n\n pub judgement_submitted: bool,\n\n pub issue_judgement_at: Option<Timestamp>,\n\n pub fields: Vec<IdentityField>,\n\n}\n\n\n\nimpl JudgementState {\n\n pub fn new(context: IdentityContext, fields: Vec<IdentityFieldValue>) -> Self {\n", "file_path": "src/primitives.rs", "rank": 93, "score": 41.0330661246407 }, { "content": " .map(JsonResult::Ok)\n\n .unwrap_or_else(|_| JsonResult::Err(\"Backend error, contact admin\".to_string()))\n\n }\n\n .into_actor(self),\n\n )\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Message)]\n\n#[rtype(result = \"JsonResult<bool>\")]\n\npub struct VerifyChallenge {\n\n pub entry: IdentityFieldValue,\n\n pub challenge: String,\n\n}\n\n\n\npub async fn verify_second_challenge(req: web::Json<VerifyChallenge>) -> HttpResponse {\n\n HttpResponse::Ok().json(\n\n SecondChallengeVerifier::from_registry()\n\n .send(req.into_inner())\n\n .await\n\n .unwrap(),\n\n )\n\n}\n", "file_path": "src/api/second_challenge.rs", "rank": 94, "score": 40.7845094524221 }, { "content": " request\n\n .accounts\n\n .entry(AccountType::Email)\n\n .and_modify(|entry| *entry = \"[email protected]\".to_string());\n\n request.accounts.remove(&AccountType::Matrix);\n\n\n\n // Send request\n\n connector\n\n .inject(WatcherMessage::new_judgement_request(request))\n\n .await;\n\n\n\n // The expected message (identity updated).\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n\n\n match resp {\n\n JsonResult::Ok(resp) => {\n\n assert!(resp.state.fields.iter().any(|field| {\n\n field.value == IdentityFieldValue::Email(\"[email protected]\".to_string())\n\n }));\n\n assert!(!resp\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 95, "score": 40.71245023403719 }, { "content": "use crate::api::{LookupServer, NotifyAccountState};\n\nuse crate::database::Database;\n\nuse crate::primitives::{IdentityContext, JudgementState, Timestamp};\n\nuse crate::Result;\n\nuse actix::prelude::*;\n\nuse std::collections::HashMap;\n\nuse tokio::time::{sleep, Duration};\n\n\n\npub async fn run_session_notifier(mut db: Database, server: Addr<LookupServer>) {\n\n async fn local(\n\n db: &mut Database,\n\n server: &Addr<LookupServer>,\n\n event_counter: &mut u64,\n\n ) -> Result<()> {\n\n let (events, new_counter) = db.fetch_events(*event_counter).await?;\n\n let mut cache: HashMap<IdentityContext, JudgementState> = HashMap::new();\n\n\n\n for event in events {\n\n let state = match cache.get(event.context()) {\n\n Some(state) => state.clone(),\n", "file_path": "src/notifier.rs", "rank": 96, "score": 40.49402991067988 }, { "content": " .send(ExternalMessage {\n\n origin: ExternalMessageType::Email(\"[email protected]\".to_string()),\n\n id: MessageId::from(0u32),\n\n timestamp: Timestamp::now(),\n\n values: alice\n\n .get_field(&F::ALICE_EMAIL())\n\n .expected_message()\n\n .to_message_parts(),\n\n })\n\n .await;\n\n\n\n // No response is sent. The service ignores unknown senders.\n\n assert!(stream.next().now_or_never().is_none());\n\n\n\n // Other judgement states must be unaffected (Bob).\n\n stream.send(IdentityContext::bob().to_ws()).await.unwrap();\n\n\n\n let resp: JsonResult<ResponseAccountState> = stream.next().await.into();\n\n assert_eq!(\n\n resp,\n", "file_path": "src/tests/api_judgement_state.rs", "rank": 97, "score": 40.32574605061261 }, { "content": "\n\n debug!(\"Received message from {}\", event.sender);\n\n\n\n // Add external message to inner field. That field is then\n\n // fetched by the `Adapter` implementation.\n\n let mut lock = self.messages.lock().await;\n\n (*lock).push(ExternalMessage {\n\n origin: ExternalMessageType::Matrix(event.sender.to_string()),\n\n // A message UID is not relevant regarding a live\n\n // message listener. The Matrix SDK handles\n\n // synchronization.\n\n id: 0u32.into(),\n\n timestamp: Timestamp::now(),\n\n values: vec![msg_body.to_string().into()],\n\n });\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "src/adapters/matrix.rs", "rank": 98, "score": 40.30804262752392 }, { "content": " if twitter_config.enabled {\n\n let config = twitter_config;\n\n\n\n let span = info_span!(\"twitter_adapter\");\n\n info!(api_key = config.api_key.as_str());\n\n\n\n async {\n\n info!(\"Configuring client\");\n\n let twitter_client = twitter::TwitterBuilder::new()\n\n .consumer_key(config.api_key)\n\n .consumer_secret(config.api_secret)\n\n .token(config.token)\n\n .token_secret(config.token_secret)\n\n .build()?;\n\n\n\n info!(\"Starting message adapter\");\n\n listener\n\n .start_message_adapter(twitter_client, config.request_interval)\n\n .await;\n\n\n", "file_path": "src/adapters/mod.rs", "rank": 99, "score": 40.27528017457577 } ]
Rust
simulator/src/memory.rs
Laegluin/mikrorechner
7e5e878072c941e422889465c43dea838b83e5fd
use crate::vm; use byteorder::{ByteOrder, LittleEndian}; use rand; use std::cmp::min; use std::mem; use std::ptr; pub type Word = u32; pub const WORD_BYTES: Word = mem::size_of::<Word>() as Word; pub const WORD_BITS: Word = WORD_BYTES * 8; pub const OP_CODE_BITS: Word = 5; pub const REG_REF_BITS: Word = 6; const WRITABLE_MEM_START_ADDR: Word = 0x80000000; const PAGE_LEN: usize = 4096; const LEVEL_1_TABLE_LEN: usize = 1024; const LEVEL_2_TABLE_LEN: usize = 1024; pub struct Memory { init_value: u8, page_table: Level2Table, } pub enum Access { All, Protected, } impl Memory { pub fn new() -> Memory { Memory { init_value: rand::random(), page_table: Level2Table::empty(), } } pub fn store_word(&mut self, addr: Word, value: Word) -> Result<(), vm::ErrorKind> { let mut bytes = [0; WORD_BYTES as usize]; LittleEndian::write_u32(&mut bytes, value); self.store(addr, &bytes, Access::Protected) } pub fn store(&mut self, addr: Word, buf: &[u8], access: Access) -> Result<(), vm::ErrorKind> { if buf.len() > (Word::max_value() - addr) as usize { return Err(vm::ErrorKind::OutOfBoundsMemoryAccess(addr, buf.len())); } match access { Access::All => (), Access::Protected => { if addr < WRITABLE_MEM_START_ADDR { return Err(vm::ErrorKind::ReadOnlyMemoryWriteAccess(addr)); } } } let mut ptr = addr; let mut remaining = buf; while !remaining.is_empty() { let (memory, _) = self.mem_ref(ptr, remaining.len() as Word); let memory_len = memory.len(); memory[..].copy_from_slice(&remaining[..memory_len]); ptr += memory_len as Word; remaining = &remaining[memory_len..]; } Ok(()) } pub fn load_word(&mut self, addr: Word) -> Result<Word, vm::ErrorKind> { let mut bytes = [0; WORD_BYTES as usize]; self.load(addr, &mut bytes)?; Ok(LittleEndian::read_u32(&bytes)) } pub fn load(&mut self, addr: Word, buf: &mut [u8]) -> Result<(), vm::ErrorKind> { if buf.len() > (Word::max_value() - addr) as usize { return Err(vm::ErrorKind::OutOfBoundsMemoryAccess(addr, buf.len())); } let buf_len = buf.len() as Word; let mut ptr = addr; let mut bytes_read = 0; while bytes_read < buf_len as usize { match self.mem_ref(ptr, buf_len - bytes_read as Word) { (_, true) => return Err(vm::ErrorKind::UninitializedMemoryAccess(ptr)), (memory, _) => { let memory_len = memory.len(); buf[bytes_read..bytes_read + memory_len].copy_from_slice(&memory); ptr += memory_len as Word; bytes_read += memory_len; } } } Ok(()) } fn mem_ref(&mut self, addr: Word, len: Word) -> (&mut [u8], bool) { let lvl_2_idx = addr >> 22; let lvl_1_idx = (addr >> 12) & (Word::max_value() >> 22); let page_idx = addr & (Word::max_value() >> 20); let init_value = self.init_value; let mut is_page_fault = false; let page = self.page_table.tables[lvl_2_idx as usize] .get_or_insert_with(|| Box::new(Level1Table::empty())) .pages[lvl_1_idx as usize] .get_or_insert_with(|| { is_page_fault = true; Box::new([init_value; PAGE_LEN]) }); let start = page_idx as usize; let end = min(page.len(), (page_idx + len) as usize); (&mut page[start..end], is_page_fault) } } struct Level2Table { tables: [Option<Box<Level1Table>>; LEVEL_2_TABLE_LEN], } impl Level2Table { fn empty() -> Level2Table { unsafe { let mut tables: [Option<Box<Level1Table>>; LEVEL_2_TABLE_LEN] = mem::uninitialized(); for table in &mut tables[..] { ptr::write(table, None); } Level2Table { tables } } } } struct Level1Table { pages: [Option<Box<[u8; PAGE_LEN]>>; LEVEL_1_TABLE_LEN], } impl Level1Table { fn empty() -> Level1Table { unsafe { let mut pages: [Option<Box<[u8; PAGE_LEN]>>; LEVEL_1_TABLE_LEN] = mem::uninitialized(); for page in &mut pages[..] { ptr::write(page, None); } Level1Table { pages } } } } #[cfg(test)] mod test { use super::*; #[test] fn mem_ref() { let mut mem = Memory::new(); assert!(mem.mem_ref(0, 100).1); let mut mem = Memory::new(); mem.store(0, &[0; 10], Access::All).unwrap(); mem.store(10, &[1; 10], Access::All).unwrap(); assert_eq!(mem.mem_ref(0, 10), (&mut *vec![0; 10], false)); assert_eq!(mem.mem_ref(10, 10), (&mut *vec![1; 10], false)); assert_eq!( mem.mem_ref(0, 20), ( &mut *vec![0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], false ) ); } #[test] #[should_panic] fn store_panic_on_overflow() { let mut mem = Memory::new(); mem.store(Word::max_value(), &[1, 2, 3, 4], Access::All) .unwrap(); } #[test] #[should_panic] fn load_panic_on_overflow() { let mut mem = Memory::new(); mem.load(Word::max_value(), &mut vec![0; 11]).unwrap(); } #[test] fn load_and_store() { let mut mem = Memory::new(); mem.load(0, &mut vec![0; 42]).unwrap_err(); let mut mem = Memory::new(); mem.store(0, &[1, 2, 3, 4], Access::All).unwrap(); let mut buf = vec![0; 4]; mem.load(0, &mut buf).unwrap(); assert_eq!(vec![1, 2, 3, 4], buf); let mut buf = vec![0; 2]; mem.load(2, &mut buf).unwrap(); assert_eq!(vec![3, 4], buf); let mut mem = Memory::new(); mem.store(PAGE_LEN as Word - 3, &[1, 2, 3, 4, 5, 6], Access::All) .unwrap(); let mut buf = vec![0; 6]; mem.load(PAGE_LEN as Word - 3, &mut buf).unwrap(); assert_eq!(vec![1, 2, 3, 4, 5, 6], buf); } #[test] fn out_of_bounds_mem_access() { let mut mem = Memory::new(); assert_eq!( mem.load_word(0xffffffff).unwrap_err(), vm::ErrorKind::OutOfBoundsMemoryAccess(0xffffffff, 4) ); assert_eq!( mem.store_word(0xffffffff, 0xdeaddead).unwrap_err(), vm::ErrorKind::OutOfBoundsMemoryAccess(0xffffffff, 4) ); } }
use crate::vm; use byteorder::{ByteOrder, LittleEndian}; use rand; use std::cmp::min; use std::mem; use std::ptr; pub type Word = u32; pub const WORD_BYTES: Word = mem::size_of::<Word>() as Word; pub const WORD_BITS: Word = WORD_BYTES * 8; pub const OP_CODE_BITS: Word = 5; pub const REG_REF_BITS: Word = 6; const WRITABLE_MEM_START_ADDR: Word = 0x80000000; const PAGE_LEN: usize = 4096; const LEVEL_1_TABLE_LEN: usize = 1024; const LEVEL_2_TABLE_LEN: usize = 1024; pub struct Memory { init_value: u8, page_table: Level2Table, } pub enum Access { All, Protected, } impl Memory {
pub fn store_word(&mut self, addr: Word, value: Word) -> Result<(), vm::ErrorKind> { let mut bytes = [0; WORD_BYTES as usize]; LittleEndian::write_u32(&mut bytes, value); self.store(addr, &bytes, Access::Protected) } pub fn store(&mut self, addr: Word, buf: &[u8], access: Access) -> Result<(), vm::ErrorKind> { if buf.len() > (Word::max_value() - addr) as usize { return Err(vm::ErrorKind::OutOfBoundsMemoryAccess(addr, buf.len())); } match access { Access::All => (), Access::Protected => { if addr < WRITABLE_MEM_START_ADDR { return Err(vm::ErrorKind::ReadOnlyMemoryWriteAccess(addr)); } } } let mut ptr = addr; let mut remaining = buf; while !remaining.is_empty() { let (memory, _) = self.mem_ref(ptr, remaining.len() as Word); let memory_len = memory.len(); memory[..].copy_from_slice(&remaining[..memory_len]); ptr += memory_len as Word; remaining = &remaining[memory_len..]; } Ok(()) } pub fn load_word(&mut self, addr: Word) -> Result<Word, vm::ErrorKind> { let mut bytes = [0; WORD_BYTES as usize]; self.load(addr, &mut bytes)?; Ok(LittleEndian::read_u32(&bytes)) } pub fn load(&mut self, addr: Word, buf: &mut [u8]) -> Result<(), vm::ErrorKind> { if buf.len() > (Word::max_value() - addr) as usize { return Err(vm::ErrorKind::OutOfBoundsMemoryAccess(addr, buf.len())); } let buf_len = buf.len() as Word; let mut ptr = addr; let mut bytes_read = 0; while bytes_read < buf_len as usize { match self.mem_ref(ptr, buf_len - bytes_read as Word) { (_, true) => return Err(vm::ErrorKind::UninitializedMemoryAccess(ptr)), (memory, _) => { let memory_len = memory.len(); buf[bytes_read..bytes_read + memory_len].copy_from_slice(&memory); ptr += memory_len as Word; bytes_read += memory_len; } } } Ok(()) } fn mem_ref(&mut self, addr: Word, len: Word) -> (&mut [u8], bool) { let lvl_2_idx = addr >> 22; let lvl_1_idx = (addr >> 12) & (Word::max_value() >> 22); let page_idx = addr & (Word::max_value() >> 20); let init_value = self.init_value; let mut is_page_fault = false; let page = self.page_table.tables[lvl_2_idx as usize] .get_or_insert_with(|| Box::new(Level1Table::empty())) .pages[lvl_1_idx as usize] .get_or_insert_with(|| { is_page_fault = true; Box::new([init_value; PAGE_LEN]) }); let start = page_idx as usize; let end = min(page.len(), (page_idx + len) as usize); (&mut page[start..end], is_page_fault) } } struct Level2Table { tables: [Option<Box<Level1Table>>; LEVEL_2_TABLE_LEN], } impl Level2Table { fn empty() -> Level2Table { unsafe { let mut tables: [Option<Box<Level1Table>>; LEVEL_2_TABLE_LEN] = mem::uninitialized(); for table in &mut tables[..] { ptr::write(table, None); } Level2Table { tables } } } } struct Level1Table { pages: [Option<Box<[u8; PAGE_LEN]>>; LEVEL_1_TABLE_LEN], } impl Level1Table { fn empty() -> Level1Table { unsafe { let mut pages: [Option<Box<[u8; PAGE_LEN]>>; LEVEL_1_TABLE_LEN] = mem::uninitialized(); for page in &mut pages[..] { ptr::write(page, None); } Level1Table { pages } } } } #[cfg(test)] mod test { use super::*; #[test] fn mem_ref() { let mut mem = Memory::new(); assert!(mem.mem_ref(0, 100).1); let mut mem = Memory::new(); mem.store(0, &[0; 10], Access::All).unwrap(); mem.store(10, &[1; 10], Access::All).unwrap(); assert_eq!(mem.mem_ref(0, 10), (&mut *vec![0; 10], false)); assert_eq!(mem.mem_ref(10, 10), (&mut *vec![1; 10], false)); assert_eq!( mem.mem_ref(0, 20), ( &mut *vec![0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], false ) ); } #[test] #[should_panic] fn store_panic_on_overflow() { let mut mem = Memory::new(); mem.store(Word::max_value(), &[1, 2, 3, 4], Access::All) .unwrap(); } #[test] #[should_panic] fn load_panic_on_overflow() { let mut mem = Memory::new(); mem.load(Word::max_value(), &mut vec![0; 11]).unwrap(); } #[test] fn load_and_store() { let mut mem = Memory::new(); mem.load(0, &mut vec![0; 42]).unwrap_err(); let mut mem = Memory::new(); mem.store(0, &[1, 2, 3, 4], Access::All).unwrap(); let mut buf = vec![0; 4]; mem.load(0, &mut buf).unwrap(); assert_eq!(vec![1, 2, 3, 4], buf); let mut buf = vec![0; 2]; mem.load(2, &mut buf).unwrap(); assert_eq!(vec![3, 4], buf); let mut mem = Memory::new(); mem.store(PAGE_LEN as Word - 3, &[1, 2, 3, 4, 5, 6], Access::All) .unwrap(); let mut buf = vec![0; 6]; mem.load(PAGE_LEN as Word - 3, &mut buf).unwrap(); assert_eq!(vec![1, 2, 3, 4, 5, 6], buf); } #[test] fn out_of_bounds_mem_access() { let mut mem = Memory::new(); assert_eq!( mem.load_word(0xffffffff).unwrap_err(), vm::ErrorKind::OutOfBoundsMemoryAccess(0xffffffff, 4) ); assert_eq!( mem.store_word(0xffffffff, 0xdeaddead).unwrap_err(), vm::ErrorKind::OutOfBoundsMemoryAccess(0xffffffff, 4) ); } }
pub fn new() -> Memory { Memory { init_value: rand::random(), page_table: Level2Table::empty(), } }
function_block-full_function
[ { "content": "pub fn to_hex(word: Word) -> String {\n\n let width = WORD_BYTES as usize * 2 + 2;\n\n format!(\"{:#0width$x}\", word, width = width)\n\n}\n\n\n", "file_path": "simulator/src/support.rs", "rank": 0, "score": 217301.66889780847 }, { "content": "struct Instruction(u32);\n\n\n\nimpl Instruction {\n\n fn op(op: Op) -> Instruction {\n\n Instruction(set_op_code(0, op))\n\n }\n\n\n\n fn dst(self, reg: Reg) -> Instruction {\n\n Instruction(set_reg(self.0, reg, RegPos::Dst))\n\n }\n\n\n\n fn arg1(self, reg: Reg) -> Instruction {\n\n Instruction(set_reg(self.0, reg, RegPos::Arg1))\n\n }\n\n\n\n fn arg2(self, reg: Reg) -> Instruction {\n\n Instruction(set_reg(self.0, reg, RegPos::Arg2))\n\n }\n\n\n\n fn immediate(self, imm: u32, num_bits: u32) -> Instruction {\n", "file_path": "cmlc/src/emit.rs", "rank": 1, "score": 216780.53215535378 }, { "content": "pub fn to_hex_octets(bytes: &[u8]) -> String {\n\n let mut string: String = bytes.iter().map(|byte| format!(\"{:02x} \", byte)).collect();\n\n string.pop();\n\n string\n\n}\n", "file_path": "simulator/src/support.rs", "rank": 2, "score": 190412.10760680557 }, { "content": "pub fn emit_object(asm: Asm) -> Vec<u8> {\n\n let mut labels = HashMap::new();\n\n let mut idx = 0;\n\n\n\n for cmd in asm.commands() {\n\n if let Command::Label(ref label) = &cmd {\n\n labels.insert(label.clone(), idx);\n\n }\n\n\n\n idx += cmd.len();\n\n }\n\n\n\n let mut img = Vec::new();\n\n let mut idx = 0;\n\n\n\n for cmd in asm.commands() {\n\n cmd.write_img(&mut img, idx, &labels);\n\n idx += cmd.len();\n\n }\n\n\n", "file_path": "cmlc/src/emit.rs", "rank": 3, "score": 184139.22433945548 }, { "content": "pub fn instr_to_string(instr: Word) -> Option<String> {\n\n use self::Op::*;\n\n\n\n let op = Op::from_word(instr).ok()?;\n\n\n\n let string = match op {\n\n Add | Sub | Mul | Div | And | Or | Xor | ShiftL | ShiftR | SignedShiftR => {\n\n let dst = Reg::from_word(instr, RegPos::Dst).ok()?;\n\n let lhs = Reg::from_word(instr, RegPos::Arg1).ok()?;\n\n let rhs = Reg::from_word(instr, RegPos::Arg2).ok()?;\n\n format!(\"{}: dst = {}, lhs = {}, rhs = {}\", op, dst, lhs, rhs)\n\n }\n\n Not => {\n\n let dst = Reg::from_word(instr, RegPos::Dst).ok()?;\n\n let rhs = Reg::from_word(instr, RegPos::Arg1).ok()?;\n\n format!(\"{}: dst = {}, rhs = {}\", op, dst, rhs)\n\n }\n\n Copy => {\n\n let dst = Reg::from_word(instr, RegPos::Dst).ok()?;\n\n let src = Reg::from_word(instr, RegPos::Arg1).ok()?;\n", "file_path": "simulator/src/vm.rs", "rank": 4, "score": 184108.30992576169 }, { "content": "#[derive(Clone, Copy)]\n\nstruct StreamPos(usize);\n\n\n", "file_path": "cmlc/src/parser.rs", "rank": 7, "score": 169132.64678832772 }, { "content": "fn tracer(printer: Arc<Printer>) -> impl 'static + Send + Sync + FnMut(Word) {\n\n move |instr| {\n\n if IS_TRACE_ENABLED.load(Ordering::SeqCst) {\n\n if let Some(instr) = vm::instr_to_string(instr) {\n\n displayln!(printer, \"> {}\", instr);\n\n }\n\n }\n\n }\n\n}\n\n\n\nconst HELP: &str = r#\"Inspect and interact with the simulator.\n\n\n\nAll commands interacting with the simulators state require the simulation to be paused.\n\nNumeric literals are interpreted as decimal, but can be prefixed with `0x` for hexadecimal\n\nor `0b` for binary.\n\n\n\nc | continue Continue execution if paused\n\np | pause Pause execution if currently running\n\nenable_trace Log each executed instruction\n\ndisable_trace Disable logging of instructions\n", "file_path": "simulator/src/main.rs", "rank": 8, "score": 164904.6794392923 }, { "content": "pub fn parse(tokens: impl AsRef<[Spanned<Token>]>) -> Result<Ast, Spanned<ParseError>> {\n\n let tokens = TokenStream::new(tokens.as_ref());\n\n let mut items = Vec::new();\n\n\n\n while tokens.peek() != None {\n\n items.push(item(&tokens)?);\n\n }\n\n\n\n Ok(Ast::new(items))\n\n}\n\n\n", "file_path": "cmlc/src/parser.rs", "rank": 9, "score": 161140.0451456962 }, { "content": "pub fn verify_types(mut ast: Ast) -> Result<TypedAst, Spanned<TypeError>> {\n\n let mut types = FnvHashMap::default();\n\n\n\n for item in &mut ast.items {\n\n let Spanned { value: item, .. } = item;\n\n\n\n match *item {\n\n Item::FnDef(ref mut def) => {\n\n for Spanned { value: param, span } in def.params.iter_mut() {\n\n canonicalize_type_ref(&mut param.ty, &mut ast.type_env, &mut types, *span)?;\n\n }\n\n\n\n canonicalize_type_ref(\n\n &mut def.ret_ty,\n\n &mut ast.type_env,\n\n &mut types,\n\n def.body.span,\n\n )?;\n\n verify_expr(def.body.as_mut(), &mut ast.type_env, &mut types)?;\n\n\n", "file_path": "cmlc/src/typecheck/lint.rs", "rank": 10, "score": 156120.2510002278 }, { "content": "pub fn lex<'a>(stream: impl Into<StrStream<'a>>) -> Result<Vec<Spanned<Token>>, Spanned<LexError>> {\n\n let mut stream = stream.into();\n\n let mut char_buf = String::new();\n\n let mut tokens = Vec::new();\n\n\n\n loop {\n\n stream.set_span_start();\n\n\n\n let c = match stream.next() {\n\n Some(c) => c,\n\n None => return Ok(tokens),\n\n };\n\n\n\n match c {\n\n c if c.is_whitespace() => (),\n\n '\"' => {\n\n while let Some(c) = stream.next_if(|c| c != '\"') {\n\n if c == '\\\\' {\n\n let maybe_char = stream.next();\n\n\n", "file_path": "cmlc/src/lexer.rs", "rank": 11, "score": 153442.52367286268 }, { "content": "pub fn typecheck(mut ast: Ast) -> Result<TypedAst, Spanned<TypeError>> {\n\n let mut type_bindings = ScopeMap::new();\n\n bind_primitives(&mut ast.type_env, &mut type_bindings);\n\n let mut value_bindings = ScopeMap::new();\n\n\n\n check_items(\n\n &mut ast.items,\n\n &mut ast.type_env,\n\n &mut type_bindings,\n\n &mut value_bindings,\n\n )?;\n\n\n\n lint::verify_types(ast)\n\n}\n\n\n", "file_path": "cmlc/src/typecheck.rs", "rank": 12, "score": 152609.1485976844 }, { "content": "#[allow(unused)]\n\npub fn run(mem: Memory, breakpoints: Breakpoints) -> Result<(RegBank, Memory), SimError> {\n\n let handle = start(mem, breakpoints, false, |_| {})?;\n\n let mut c = 0;\n\n\n\n loop {\n\n match handle.recv() {\n\n Response::Exception(err) => {\n\n handle.send(Request::Exit);\n\n return Err(SimError::Vm(err));\n\n }\n\n Response::Pause(Status::Halt) => handle.send(Request::Exit),\n\n Response::Pause(Status::Break) => handle.send(Request::Continue),\n\n Response::Exit => break,\n\n // we never generate any requests that could cause other responses\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n handle.join()\n\n}\n\n\n", "file_path": "simulator/src/simulation.rs", "rank": 13, "score": 150186.98711498152 }, { "content": "/// An iterator over offsets required to copy a value of the given `layout` word by word.\n\n///\n\n/// If the value's size is not divisible by 4, part of the offsets will overlap to compensate.\n\n///\n\n/// ## Panics\n\n/// Panics if the size of the layout is less than 4 bytes.\n\nfn word_copy_offsets(layout: &Layout) -> impl '_ + Iterator<Item = StackOffset> {\n\n // need at least four bytes for a copy\n\n assert!(layout.stack_size() >= StackOffset(4));\n\n\n\n (0..layout.stack_size().0).step_by(4).map(move |offset| {\n\n // if the last value is not exactly four bytes, simply copy\n\n // some bytes again\n\n if offset <= (layout.stack_size().0 - 4) {\n\n StackOffset(offset)\n\n } else {\n\n layout.stack_size() - StackOffset(4)\n\n }\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::lexer;\n\n use crate::parser;\n", "file_path": "cmlc/src/codegen.rs", "rank": 14, "score": 143488.64167414067 }, { "content": "pub fn gen_asm(ast: TypedAst) -> Result<Asm, Spanned<CodegenError>> {\n\n let mut asm = Asm::new();\n\n let mut layouts = LayoutCache::new();\n\n let mut bindings = ScopeMap::new();\n\n\n\n gen_items(&ast.items, &mut bindings, &mut layouts, &ast, &mut asm)?;\n\n gen_rt_start(&mut asm, &bindings)?;\n\n\n\n Ok(asm)\n\n}\n\n\n", "file_path": "cmlc/src/codegen.rs", "rank": 15, "score": 142415.79577981879 }, { "content": "fn set_immediate(instr: u32, imm: u32, num_bits: u32) -> u32 {\n\n assert!(num_bits <= 32);\n\n let imm_max = u32::max_value() >> (32 - num_bits);\n\n assert!(imm <= imm_max);\n\n instr | imm\n\n}\n\n\n", "file_path": "cmlc/src/emit.rs", "rank": 16, "score": 134661.47499046638 }, { "content": "fn store(instr: Word, regs: &mut RegBank, mem: &mut Memory) -> Result<Status, ErrorKind> {\n\n let dst_addr_reg = Reg::from_word(instr, RegPos::Dst)?;\n\n let src = Reg::from_word(instr, RegPos::Arg1)?;\n\n let offset = immediate_from_instr(instr, 2);\n\n\n\n let addr = regs[dst_addr_reg]\n\n .wrapping_add(offset)\n\n .wrapping_add(regs[Reg::AddrOffset]);\n\n\n\n mem.store_word(addr, regs[src])?;\n\n\n\n Ok(Status::Pause)\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 17, "score": 133497.80547864182 }, { "content": "fn load(instr: Word, regs: &mut RegBank, mem: &mut Memory) -> Result<Status, ErrorKind> {\n\n let dst = Reg::from_word(instr, RegPos::Dst)?;\n\n let src_addr_reg = Reg::from_word(instr, RegPos::Arg1)?;\n\n let offset = immediate_from_instr(instr, 2);\n\n\n\n let addr = regs[src_addr_reg]\n\n .wrapping_add(offset)\n\n .wrapping_add(regs[Reg::AddrOffset]);\n\n\n\n let value = mem.load_word(addr)?;\n\n regs.set(dst, value);\n\n Ok(Status::Pause)\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 18, "score": 133497.80547864182 }, { "content": "/// Extracts an unsigned immediate from an `instr` with `num_reg_refs` number of\n\n/// register references in the instruction (including don't care registers like in\n\n/// `Op::Cmp`).\n\nfn immediate_from_instr(instr: Word, num_reg_refs: Word) -> Word {\n\n let ignored_bits = OP_CODE_BITS + (num_reg_refs * REG_REF_BITS);\n\n let mask = Word::max_value() >> ignored_bits;\n\n instr & mask\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 19, "score": 130579.64675716069 }, { "content": "/// Like `immediate_from_word`, but the immediate is interpreted as a signed two's\n\n/// complement number.\n\nfn immediate_from_instr_signed(instr: Word, num_reg_refs: Word) -> Word {\n\n let ignored_bits = OP_CODE_BITS + (num_reg_refs * REG_REF_BITS);\n\n let mask = Word::max_value() >> ignored_bits;\n\n\n\n let imm_bits = WORD_BITS - ignored_bits;\n\n let sign = instr & (1 << (imm_bits - 1));\n\n\n\n if sign == 0 {\n\n instr & mask\n\n } else {\n\n let abs = ((!instr) + 1) & mask;\n\n (-(abs as i32)) as Word\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "simulator/src/vm.rs", "rank": 20, "score": 130279.28854776415 }, { "content": "fn set_signed_immediate(instr: u32, imm: i32, num_bits: u32) -> u32 {\n\n assert!(num_bits <= 32);\n\n let imm_max = u32::max_value() >> (32 - num_bits);\n\n let imm = (imm as u32) & imm_max;\n\n\n\n instr | imm\n\n}\n", "file_path": "cmlc/src/emit.rs", "rank": 21, "score": 125796.4852607525 }, { "content": "fn parse_word(word: &str) -> Result<Word, CliError> {\n\n let result = if word.starts_with(\"0x\") {\n\n Word::from_str_radix(&word[2..], 16)\n\n } else if word.starts_with(\"0b\") {\n\n Word::from_str_radix(&word[2..], 2)\n\n } else {\n\n Word::from_str_radix(word, 10)\n\n };\n\n\n\n result.map_err(|_| CliError::CannotParseWord(word.to_owned()))\n\n}\n\n\n", "file_path": "simulator/src/main.rs", "rank": 22, "score": 122545.86768269337 }, { "content": "// some of these constants should probably be shared with the simulator\n\nfn set_op_code(instr: u32, op: Op) -> u32 {\n\n let op = op as u32;\n\n instr | (op << 27)\n\n}\n\n\n", "file_path": "cmlc/src/emit.rs", "rank": 23, "score": 117698.44419298344 }, { "content": "class Word(Token):\n\n \"\"\"\n\n Token for matching words composed of allowed character sets.\n\n Defined with string containing all allowed initial characters,\n\n an optional string containing allowed body characters (if omitted,\n\n defaults to the initial character set), and an optional minimum,\n\n maximum, and/or exact length. The default value for C{min} is 1 (a\n\n minimum value < 1 is not valid); the default values for C{max} and C{exact}\n\n are 0, meaning no maximum or exact length restriction. An optional\n\n C{excludeChars} parameter can list characters that might be found in \n\n the input C{bodyChars} string; useful to define a word of all printables\n\n except for one or two characters, for instance.\n\n \n\n L{srange} is useful for defining custom character set strings for defining \n\n C{Word} expressions, using range notation from regular expression character sets.\n\n \n\n A common mistake is to use C{Word} to match a specific literal string, as in \n\n C{Word(\"Address\")}. Remember that C{Word} uses the string argument to define\n\n I{sets} of matchable characters. This expression would match \"Add\", \"AAA\",\n\n \"dAred\", or any other word made up of the characters 'A', 'd', 'r', 'e', and 's'.\n\n To match an exact literal string, use L{Literal} or L{Keyword}.\n\n\n\n pyparsing includes helper strings for building Words:\n\n - L{alphas}\n\n - L{nums}\n\n - L{alphanums}\n\n - L{hexnums}\n\n - L{alphas8bit} (alphabetic characters in ASCII range 128-255 - accented, tilded, umlauted, etc.)\n\n - L{punc8bit} (non-alphabetic characters in ASCII range 128-255 - currency, symbols, superscripts, diacriticals, etc.)\n\n - L{printables} (any non-whitespace character)\n\n\n\n Example::\n\n # a word composed of digits\n\n integer = Word(nums) # equivalent to Word(\"0123456789\") or Word(srange(\"0-9\"))\n\n \n\n # a word with a leading capital, and zero or more lowercase\n\n capital_word = Word(alphas.upper(), alphas.lower())\n\n\n\n # hostnames are alphanumeric, with leading alpha, and '-'\n\n hostname = Word(alphas, alphanums+'-')\n\n \n\n # roman numeral (not a strict parser, accepts invalid mix of characters)\n\n roman = Word(\"IVXLCDM\")\n\n \n\n # any string of non-whitespace characters, except for ','\n\n csv_value = Word(printables, excludeChars=\",\")\n\n \"\"\"\n\n def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ):\n\n super(Word,self).__init__()\n\n if excludeChars:\n\n initChars = ''.join(c for c in initChars if c not in excludeChars)\n\n if bodyChars:\n\n bodyChars = ''.join(c for c in bodyChars if c not in excludeChars)\n\n self.initCharsOrig = initChars\n\n self.initChars = set(initChars)\n\n if bodyChars :\n\n self.bodyCharsOrig = bodyChars\n\n self.bodyChars = set(bodyChars)\n\n else:\n\n self.bodyCharsOrig = initChars\n\n self.bodyChars = set(initChars)\n\n\n\n self.maxSpecified = max > 0\n\n\n\n if min < 1:\n\n raise ValueError(\"cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted\")\n\n\n\n self.minLen = min\n\n\n\n if max > 0:\n\n self.maxLen = max\n\n else:\n\n self.maxLen = _MAX_INT\n\n\n\n if exact > 0:\n\n self.maxLen = exact\n\n self.minLen = exact\n\n\n\n self.name = _ustr(self)\n\n self.errmsg = \"Expected \" + self.name\n\n self.mayIndexError = False\n\n self.asKeyword = asKeyword\n\n\n\n if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0):\n\n if self.bodyCharsOrig == self.initCharsOrig:\n\n self.reString = \"[%s]+\" % _escapeRegexRangeChars(self.initCharsOrig)\n\n elif len(self.initCharsOrig) == 1:\n\n self.reString = \"%s[%s]*\" % \\\n\n (re.escape(self.initCharsOrig),\n\n _escapeRegexRangeChars(self.bodyCharsOrig),)\n\n else:\n\n self.reString = \"[%s][%s]*\" % \\\n\n (_escapeRegexRangeChars(self.initCharsOrig),\n\n _escapeRegexRangeChars(self.bodyCharsOrig),)\n\n if self.asKeyword:\n\n self.reString = r\"\\b\"+self.reString+r\"\\b\"\n\n try:\n\n self.re = re.compile( self.reString )\n\n except Exception:\n\n self.re = None\n\n\n\n def parseImpl( self, instring, loc, doActions=True ):\n\n if self.re:\n\n result = self.re.match(instring,loc)\n\n if not result:\n\n raise ParseException(instring, loc, self.errmsg, self)\n\n\n\n loc = result.end()\n\n return loc, result.group()\n\n\n\n if not(instring[ loc ] in self.initChars):\n\n raise ParseException(instring, loc, self.errmsg, self)\n\n\n\n start = loc\n\n loc += 1\n\n instrlen = len(instring)\n\n bodychars = self.bodyChars\n\n maxloc = start + self.maxLen\n\n maxloc = min( maxloc, instrlen )\n\n while loc < maxloc and instring[loc] in bodychars:\n\n loc += 1\n\n\n\n throwException = False\n\n if loc - start < self.minLen:\n\n throwException = True\n\n if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:\n\n throwException = True\n\n if self.asKeyword:\n\n if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):\n\n throwException = True\n\n\n\n if throwException:\n\n raise ParseException(instring, loc, self.errmsg, self)\n\n\n\n return loc, instring[start:loc]\n\n\n\n def __str__( self ):\n\n try:\n\n return super(Word,self).__str__()\n\n except Exception:\n\n pass\n\n\n\n\n\n if self.strRepr is None:\n\n\n\n def charsAsStr(s):\n\n if len(s)>4:\n\n return s[:4]+\"...\"\n\n else:\n\n return s\n\n\n\n if ( self.initCharsOrig != self.bodyCharsOrig ):\n\n self.strRepr = \"W:(%s,%s)\" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) )\n\n else:\n\n self.strRepr = \"W:(%s)\" % charsAsStr(self.initCharsOrig)\n\n\n", "file_path": "Assembler/venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pyparsing.py", "rank": 24, "score": 114237.05375017192 }, { "content": "#[derive(Debug, PartialEq, Eq)]\n\n#[repr(u8)]\n\n#[rustfmt::skip]\n\n#[allow(unused)]\n\nenum Op {\n\n Add = 0b_00000,\n\n Sub = 0b_10111,\n\n Mul = 0b_01110,\n\n Div = 0b_01111,\n\n And = 0b_10000,\n\n Or = 0b_10001,\n\n Not = 0b_10010,\n\n Xor = 0b_10011,\n\n ShiftL = 0b_10100,\n\n ShiftR = 0b_10101,\n\n SignedShiftR = 0b_10110,\n\n Copy = 0b_00001,\n\n Set = 0b_00010,\n\n CmpEq = 0b_00011,\n\n CmpGt = 0b_00100,\n\n CmpGe = 0b_00101,\n\n Jmp = 0b_00110,\n\n JmpRel = 0b_00111,\n\n JmpIf = 0b_01000,\n\n JmpRelIf = 0b_01001,\n\n Load = 0b_01010,\n\n Store = 0b_01011,\n\n Noop = 0b_01100,\n\n Halt = 0b_01101,\n\n}\n\n\n", "file_path": "cmlc/src/emit.rs", "rank": 25, "score": 114212.93804127525 }, { "content": "#[derive(CustomTryInto, Debug, PartialEq, Eq, Display)]\n\n#[strum(serialize_all = \"snake_case\")]\n\n#[repr(u8)]\n\n#[rustfmt::skip]\n\nenum Op {\n\n Add = 0b_00000,\n\n Sub = 0b_10111,\n\n Mul = 0b_01110,\n\n Div = 0b_01111,\n\n And = 0b_10000,\n\n Or = 0b_10001,\n\n Not = 0b_10010,\n\n Xor = 0b_10011,\n\n ShiftL = 0b_10100,\n\n ShiftR = 0b_10101,\n\n SignedShiftR = 0b_10110,\n\n Copy = 0b_00001,\n\n Set = 0b_00010,\n\n CmpEq = 0b_00011,\n\n CmpGt = 0b_00100,\n\n CmpGe = 0b_00101,\n\n Jmp = 0b_00110,\n\n JmpRel = 0b_00111,\n\n JmpIf = 0b_01000,\n", "file_path": "simulator/src/vm.rs", "rank": 26, "score": 114212.56127920756 }, { "content": "#[derive(Debug)]\n\nenum Error {\n\n Lex(lexer::LexError),\n\n Parse(parser::ParseError),\n\n Type(typecheck::TypeError),\n\n Codegen(codegen::CodegenError),\n\n}\n\n\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use Error::*;\n\n\n\n match *self {\n\n Lex(ref why) => write!(f, \"{}\", why),\n\n Parse(ref why) => write!(f, \"{}\", why),\n\n Type(ref why) => write!(f, \"{}\", why),\n\n Codegen(ref why) => write!(f, \"{}\", why),\n\n }\n\n }\n\n}\n\n\n\n/// Compiles cml programs\n", "file_path": "cmlc/src/main.rs", "rank": 27, "score": 114208.0712570608 }, { "content": "#[derive(StructOpt)]\n\nstruct Args {\n\n /// The source file to compile\n\n #[structopt(parse(from_os_str))]\n\n src: PathBuf,\n\n\n\n /// A file the compiler will use for the output\n\n #[structopt(short = \"o\", long = \"--output\", parse(from_os_str))]\n\n output: PathBuf,\n\n\n\n /// Only emit assembler code, do not compile to an image\n\n #[structopt(long = \"--emit-asm\")]\n\n emit_asm: bool,\n\n}\n\n\n", "file_path": "cmlc/src/main.rs", "rank": 28, "score": 114202.79826906929 }, { "content": "#[derive(StructOpt)]\n\nstruct Args {\n\n /// The image that is loaded into memory before startup. Addressing\n\n /// starts at 0x00000000\n\n #[structopt(parse(from_os_str))]\n\n image: PathBuf,\n\n\n\n /// Load the image from a text instead of a binary file. The file\n\n /// must contain the binary representation of words, separated by newlines\n\n #[structopt(short = \"c\", long = \"convert-from-text\")]\n\n convert_from_text: bool,\n\n\n\n /// Dump the entire image to a file and exit\n\n #[structopt(short = \"d\", long = \"dump-image\", name = \"path\", parse(from_os_str))]\n\n dump_image: Option<PathBuf>,\n\n\n\n /// Start the simulation but immediately pause it before executing the first\n\n /// instruction.\n\n #[structopt(long = \"start-paused\")]\n\n start_paused: bool,\n\n}\n\n\n", "file_path": "simulator/src/main.rs", "rank": 29, "score": 114202.79826906929 }, { "content": "struct Binding {\n\n ty: TypeRef,\n\n is_mut: bool,\n\n param_names: Option<Vec<Option<Ident>>>,\n\n}\n\n\n\nimpl Binding {\n\n fn new(ty: TypeRef) -> Binding {\n\n Binding {\n\n ty,\n\n is_mut: false,\n\n param_names: None,\n\n }\n\n }\n\n\n\n fn new_mut(ty: TypeRef) -> Binding {\n\n Binding {\n\n ty,\n\n is_mut: true,\n\n param_names: None,\n", "file_path": "cmlc/src/typecheck.rs", "rank": 30, "score": 114197.1742771987 }, { "content": "def enum(*sequential, **named):\n\n enums = dict(zip(sequential, range(len(sequential))), **named)\n\n reverse = {value: key for key, value in enums.items()}\n\n enums['reverse_mapping'] = reverse\n", "file_path": "Assembler/venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/misc.py", "rank": 31, "score": 113726.0679783232 }, { "content": "#[derive(Clone, Copy)]\n\n#[repr(u8)]\n\nenum RegPos {\n\n Dst = 0,\n\n Arg1 = 1,\n\n Arg2 = 2,\n\n}\n\n\n\nimpl Reg {\n\n #[inline]\n\n fn from_word(word: Word, pos: RegPos) -> Result<Reg, ErrorKind> {\n\n // shift to right so the register is in the lowest bits\n\n let shift_by = WORD_BITS - (OP_CODE_BITS + ((pos as Word + 1) * REG_REF_BITS));\n\n let rest = word >> shift_by;\n\n\n\n let mask = Word::max_value() >> (WORD_BITS - REG_REF_BITS);\n\n let reg = (rest & mask) as u8;\n\n\n\n match reg.try_into_Reg() {\n\n Ok(reg) => Ok(reg),\n\n Err(_) => Err(ErrorKind::IllegalRegister(reg)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 32, "score": 113568.13613918194 }, { "content": "enum RegPos {\n\n Dst,\n\n Arg1,\n\n Arg2,\n\n}\n\n\n", "file_path": "cmlc/src/emit.rs", "rank": 33, "score": 113562.72443324003 }, { "content": "#[derive(Debug)]\n\nenum Node {\n\n Root(Root),\n\n Next(usize),\n\n}\n\n\n", "file_path": "cmlc/src/typecheck/unify.rs", "rank": 34, "score": 113562.72443324003 }, { "content": "#[derive(Debug)]\n\nstruct Root {\n\n var: Type,\n\n ty_ref: TypeRef,\n\n rank: u8,\n\n}\n\n\n\nimpl TypeEnv {\n\n pub fn new() -> TypeEnv {\n\n TypeEnv { nodes: Vec::new() }\n\n }\n\n\n\n pub fn insert(&mut self, var: Type) -> TypeRef {\n\n let ty_ref = TypeRef::new(self.nodes.len(), &var);\n\n\n\n self.nodes.push(Node::Root(Root {\n\n var,\n\n ty_ref: ty_ref.clone(),\n\n rank: 0,\n\n }));\n\n\n", "file_path": "cmlc/src/typecheck/unify.rs", "rank": 35, "score": 113551.84002189481 }, { "content": "struct SimSignals {\n\n pause: AtomicBool,\n\n is_running: AtomicBool,\n\n stop: AtomicBool,\n\n cont: Condvar,\n\n}\n\n\n\nimpl SimSignals {\n\n /// If set to true, the simulation will pause on the next instruction. While paused, the lock\n\n /// to the state is yielded.\n\n fn set_pause(&self, pause: bool) {\n\n self.pause.store(pause, Ordering::Release);\n\n }\n\n\n\n fn set_is_running(&self, is_running: bool) {\n\n self.is_running.store(is_running, Ordering::SeqCst);\n\n }\n\n\n\n fn is_running(&self) -> bool {\n\n self.is_running.load(Ordering::SeqCst)\n", "file_path": "simulator/src/simulation.rs", "rank": 36, "score": 113551.84002189481 }, { "content": "struct CtrlThread {\n\n handle: JoinHandle<Result<(), VmError>>,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct CtrlHandle {\n\n req_sender: Sender<Request>,\n\n resp_receiver: Receiver<Response>,\n\n resp_sender: Sender<Response>,\n\n req_receiver: Receiver<Request>,\n\n}\n\n\n\nimpl CtrlHandle {\n\n fn new() -> CtrlHandle {\n\n let (req_sender, req_receiver) = crossbeam_channel::unbounded();\n\n let (resp_sender, resp_receiver) = crossbeam_channel::unbounded();\n\n\n\n CtrlHandle {\n\n req_sender,\n\n resp_receiver,\n", "file_path": "simulator/src/simulation.rs", "rank": 37, "score": 113551.84002189481 }, { "content": "struct SimThread {\n\n handle: JoinHandle<Arc<Mutex<State>>>,\n\n}\n\n\n", "file_path": "simulator/src/simulation.rs", "rank": 38, "score": 113551.84002189481 }, { "content": "struct ExprResult {\n\n value: Option<Value>,\n\n layout: Rc<Layout>,\n\n}\n\n\n\nimpl ExprResult {\n\n fn copy_to(value: Value, layout: impl Into<Rc<Layout>>) -> ExprResult {\n\n ExprResult {\n\n value: Some(value),\n\n layout: layout.into(),\n\n }\n\n }\n\n\n\n fn by_ref(layout: impl Into<Rc<Layout>>) -> ExprResult {\n\n ExprResult {\n\n value: None,\n\n layout: layout.into(),\n\n }\n\n }\n\n\n", "file_path": "cmlc/src/codegen.rs", "rank": 39, "score": 113551.84002189481 }, { "content": "fn set_reg(instr: u32, reg: Reg, pos: RegPos) -> u32 {\n\n let shift_by = match pos {\n\n RegPos::Dst => 21,\n\n RegPos::Arg1 => 15,\n\n RegPos::Arg2 => 9,\n\n };\n\n\n\n instr | ((reg as u32) << shift_by)\n\n}\n\n\n", "file_path": "cmlc/src/emit.rs", "rank": 40, "score": 112409.1728003571 }, { "content": "struct FnContext<'a> {\n\n ret_addr: &'a Value,\n\n ret_value: &'a Value,\n\n ret_layout: Rc<Layout>,\n\n regs: RegAllocator,\n\n stack: StackAllocator,\n\n bindings: &'a mut ScopeMap<Ident, Value>,\n\n layouts: &'a mut LayoutCache,\n\n ast: &'a TypedAst,\n\n}\n\n\n\nimpl FnContext<'_> {\n\n /// Allocates space for a value with the given `layout`. Allocates registers if possible and\n\n /// falls back to stack memory if there are not enough free registers.\n\n fn alloc(&mut self, layout: &Layout) -> Value {\n\n self.regs\n\n .alloc(&layout)\n\n .map(Value::Reg)\n\n .unwrap_or_else(|| Value::Stack(self.stack.alloc(&layout)))\n\n }\n\n\n\n fn layout(&mut self, ty: TypeRef, at: Span) -> Result<Rc<Layout>, Spanned<CodegenError>> {\n\n self.layouts\n\n .get_or_gen(ty, self.ast)\n\n .map_err(|err| Spanned::new(err, at))\n\n }\n\n}\n\n\n", "file_path": "cmlc/src/codegen.rs", "rank": 41, "score": 108378.27981232354 }, { "content": "struct TokenStream<'t> {\n\n tokens: &'t [Spanned<Token>],\n\n consumed: Cell<usize>,\n\n}\n\n\n\nimpl<'t> TokenStream<'t> {\n\n fn new(tokens: &[Spanned<Token>]) -> TokenStream<'_> {\n\n TokenStream {\n\n tokens,\n\n consumed: Cell::new(0),\n\n }\n\n }\n\n\n\n fn next(&self) -> Option<Token> {\n\n let consumed = self.consumed.get();\n\n\n\n if consumed < self.tokens.len() {\n\n self.consumed.set(consumed + 1);\n\n\n\n // tokens are cheap to clone\n", "file_path": "cmlc/src/parser.rs", "rank": 42, "score": 108378.27981232354 }, { "content": "struct SpanStart<'s, 't> {\n\n start: usize,\n\n stream: &'s TokenStream<'t>,\n\n}\n\n\n\nimpl SpanStart<'_, '_> {\n\n fn end(&self) -> Span {\n\n let consumed = self.stream.consumed.get();\n\n assert!(consumed > 0);\n\n let last_token = consumed - 1;\n\n assert!(self.start <= last_token);\n\n\n\n self.stream.tokens[self.start]\n\n .span\n\n .to(self.stream.tokens[last_token].span)\n\n }\n\n}\n\n\n", "file_path": "cmlc/src/parser.rs", "rank": 43, "score": 104148.42217557649 }, { "content": "pub fn run<F>(\n\n regs: &mut RegBank,\n\n mem: &mut Memory,\n\n breakpoints: &Breakpoints,\n\n pause: &AtomicBool,\n\n trace: &mut F,\n\n) -> Result<Status, VmError>\n\nwhere\n\n F: Send + Sync + FnMut(Word),\n\n{\n\n while !pause.load(Ordering::Acquire) {\n\n if run_next(regs, mem, trace)? == Status::Halt {\n\n return Ok(Status::Halt);\n\n }\n\n\n\n if breakpoints.is_breakpoint(regs.next_instr_addr()) {\n\n return Ok(Status::Break);\n\n }\n\n }\n\n\n\n Ok(Status::Pause)\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 44, "score": 103518.32103314693 }, { "content": "pub fn start<F>(\n\n mem: Memory,\n\n breakpoints: Breakpoints,\n\n start_paused: bool,\n\n trace: F,\n\n) -> Result<SimHandle, SimError>\n\nwhere\n\n F: 'static + Send + Sync + FnMut(Word),\n\n{\n\n let signals = Arc::new(SimSignals {\n\n pause: AtomicBool::new(start_paused),\n\n is_running: AtomicBool::new(false),\n\n stop: AtomicBool::new(false),\n\n cont: Condvar::new(),\n\n });\n\n\n\n let state = Arc::new(Mutex::new(State {\n\n regs: RegBank::new(),\n\n mem,\n\n breakpoints,\n", "file_path": "simulator/src/simulation.rs", "rank": 45, "score": 103518.32103314693 }, { "content": "#[derive(Debug)]\n\nstruct Level<K, V>\n\nwhere\n\n K: Hash + Eq,\n\n{\n\n value: Option<V>,\n\n next: Option<ScopeMap<K, V>>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn path_insert_and_get() {\n\n // empty paths do nothing\n\n let mut map = ScopeMap::<&str, bool>::new();\n\n\n\n assert!(map.path_insert(vec![], true).is_none());\n\n assert!(map.path_get(vec![]).is_none());\n\n\n", "file_path": "cmlc/src/scope_map.rs", "rank": 46, "score": 102896.83522822124 }, { "content": "pub fn emit_asm(asm: Asm) -> String {\n\n let mut buf = String::new();\n\n let mut label = None;\n\n\n\n for cmd in asm.commands() {\n\n if cmd.is_label() {\n\n label = Some(cmd);\n\n } else {\n\n match label.take() {\n\n Some(label) => buf.push_str(&format!(\"{} {}\\n\", cmd, label)),\n\n None => buf.push_str(&format!(\"{}\\n\", cmd.to_string())),\n\n }\n\n }\n\n }\n\n\n\n buf\n\n}\n\n\n", "file_path": "cmlc/src/emit.rs", "rank": 47, "score": 95194.57095931964 }, { "content": "type Printer = linefeed::Interface<linefeed::DefaultTerminal>;\n\n\n", "file_path": "simulator/src/main.rs", "rank": 48, "score": 95190.07422638962 }, { "content": "fn not(instr: Word, regs: &mut RegBank) -> Result<Status, ErrorKind> {\n\n let dst = Reg::from_word(instr, RegPos::Dst)?;\n\n let src = Reg::from_word(instr, RegPos::Arg1)?;\n\n\n\n let value = !regs[src];\n\n regs.set(dst, value);\n\n Ok(Status::Pause)\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 49, "score": 84514.28236582363 }, { "content": "fn jmp(instr: Word, regs: &mut RegBank) -> Result<Status, ErrorKind> {\n\n let addr = regs[Reg::from_word(instr, RegPos::Arg1)?];\n\n regs.next_instr_addr = addr.wrapping_add(regs[Reg::AddrOffset]);\n\n Ok(Status::Pause)\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 50, "score": 83974.35006043964 }, { "content": "fn set(instr: Word, regs: &mut RegBank) -> Result<Status, ErrorKind> {\n\n let dst = Reg::from_word(instr, RegPos::Dst)?;\n\n\n\n let value = immediate_from_instr(instr, 1);\n\n regs.set(dst, value);\n\n Ok(Status::Pause)\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 51, "score": 83974.35006043964 }, { "content": "fn copy(instr: Word, regs: &mut RegBank) -> Result<Status, ErrorKind> {\n\n let dst = Reg::from_word(instr, RegPos::Dst)?;\n\n let src = Reg::from_word(instr, RegPos::Arg1)?;\n\n\n\n let value = regs[src];\n\n regs.set(dst, value);\n\n Ok(Status::Pause)\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 52, "score": 83974.35006043964 }, { "content": "fn jmp_rel(instr: Word, regs: &mut RegBank) -> Result<Status, ErrorKind> {\n\n let offset = immediate_from_instr_signed(instr, 0);\n\n // subtract the word len, because the instr_addr already points at the next instruction\n\n regs.next_instr_addr = regs.next_instr_addr.wrapping_add(offset) - WORD_BYTES;\n\n Ok(Status::Pause)\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 53, "score": 83444.50287950507 }, { "content": "fn compile(file_map: &FileMap, emit_asm: bool) -> Result<Vec<u8>, Spanned<Error>> {\n\n let tokens = lexer::lex(file_map.src()).map_err(|spanned| spanned.map(Error::Lex))?;\n\n let ast = parser::parse(&tokens).map_err(|spanned| spanned.map(Error::Parse))?;\n\n let typed_ast = typecheck::typecheck(ast).map_err(|spanned| spanned.map(Error::Type))?;\n\n let asm = codegen::gen_asm(typed_ast).map_err(|spanned| spanned.map(Error::Codegen))?;\n\n\n\n if emit_asm {\n\n Ok(emit::emit_asm(asm).into_bytes())\n\n } else {\n\n Ok(emit::emit_object(asm))\n\n }\n\n}\n\n\n", "file_path": "cmlc/src/main.rs", "rank": 54, "score": 81435.76561555172 }, { "content": "/// Converts a simple text file into a binary image that can be loaded by the vm.\n\n/// Each line is either empty, start with a `#` and is ignored, or contains a word\n\n/// formatted in binary. Whitespace is ignored.\n\npub fn assemble<R, W>(src: R, mut dst: W) -> Result<(), AsmError>\n\nwhere\n\n R: Read + BufRead,\n\n W: Write,\n\n{\n\n for line in src.lines() {\n\n let line = line.map_err(AsmError::Io)?;\n\n\n\n // strip line comments\n\n let line = &line[..line.find(\"#\").unwrap_or(line.len())];\n\n let line = line.trim().replace(char::is_whitespace, \"\");\n\n\n\n if line.is_empty() {\n\n continue;\n\n }\n\n\n\n let word = Word::from_str_radix(&line, 2).map_err(AsmError::InvalidWord)?;\n\n let mut bytes = [0; 4];\n\n LittleEndian::write_u32(&mut bytes, word);\n\n dst.write_all(&bytes).map_err(AsmError::Io)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "simulator/src/asm.rs", "rank": 55, "score": 80584.02240877989 }, { "content": "fn cmp<F>(instr: Word, op: F, regs: &mut RegBank) -> Result<Status, ErrorKind>\n\nwhere\n\n F: FnOnce(Word, Word) -> bool,\n\n{\n\n let lhs = Reg::from_word(instr, RegPos::Arg1)?;\n\n let rhs = Reg::from_word(instr, RegPos::Arg2)?;\n\n\n\n regs.cmp_flag = op(regs[lhs], regs[rhs]);\n\n Ok(Status::Pause)\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 56, "score": 78320.52025404491 }, { "content": "pub fn find_remove<T, F>(vec: &mut Vec<T>, mut pred: F) -> Option<T>\n\nwhere\n\n F: FnMut(&T) -> bool,\n\n{\n\n vec.iter()\n\n .enumerate()\n\n .find(|(_, elem)| pred(elem))\n\n .map(|(i, _)| i)\n\n .map(|i| vec.remove(i))\n\n}\n", "file_path": "cmlc/src/support.rs", "rank": 57, "score": 77224.36604900852 }, { "content": "fn ensure_is_castable(ty: &TypeRef, type_env: &TypeEnv) -> Result<(), TypeError> {\n\n let (ty_ref, ty) = type_env.find_type(ty);\n\n\n\n match *ty {\n\n Type::Int | Type::I32 | Type::U32 | Type::Ptr(_) | Type::ConstPtr(_) | Type::MutPtr(_) => {\n\n Ok(())\n\n }\n\n _ => Err(TypeError::TypeIsNotCastable(ty_ref.desc())),\n\n }\n\n}\n\n\n", "file_path": "cmlc/src/typecheck.rs", "rank": 58, "score": 76424.49481791606 }, { "content": "fn binary_op<F, R>(instr: Word, op: F, regs: &mut RegBank) -> Result<Status, ErrorKind>\n\nwhere\n\n F: FnOnce(Word, Word) -> R,\n\n R: Lift<Word, ErrorKind>,\n\n{\n\n let dst = Reg::from_word(instr, RegPos::Dst)?;\n\n let lhs = Reg::from_word(instr, RegPos::Arg1)?;\n\n let rhs = Reg::from_word(instr, RegPos::Arg2)?;\n\n\n\n let value = op(regs[lhs], regs[rhs]);\n\n regs.set(dst, R::lift(value)?);\n\n Ok(Status::Pause)\n\n}\n\n\n", "file_path": "simulator/src/vm.rs", "rank": 59, "score": 76222.9475925102 }, { "content": "fn bind_primitives(type_env: &mut TypeEnv, type_bindings: &mut ScopeMap<Ident, TypeRef>) {\n\n type_bindings.insert(Ident::new(\"bool\"), type_env.insert(Type::Bool));\n\n type_bindings.insert(Ident::new(\"i32\"), type_env.insert(Type::I32));\n\n type_bindings.insert(Ident::new(\"u32\"), type_env.insert(Type::U32));\n\n type_bindings.insert(Ident::new(\"str\"), type_env.insert(Type::Str));\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::lexer;\n\n use crate::parser;\n\n\n\n #[test]\n\n fn check_syntax_example() {\n\n let tokens = lexer::lex(include_str!(\"../tests/syntax.cml\")).unwrap();\n\n let ast = parser::parse(&tokens).unwrap();\n\n typecheck(ast).unwrap();\n\n }\n\n\n", "file_path": "cmlc/src/typecheck.rs", "rank": 60, "score": 75703.37376267459 }, { "content": "fn verify_entry_point(def: &FnDef, type_env: &TypeEnv) -> Result<(), Spanned<TypeError>> {\n\n let ret_ty = type_env.find_type(&def.ret_ty).1;\n\n\n\n if !def.params.is_empty() || !ret_ty.is_unit() {\n\n Err(Spanned::new(\n\n TypeError::EntryPointTypeMismatch(Rc::new(def.desc())),\n\n def.name.span(),\n\n ))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "cmlc/src/typecheck/lint.rs", "rank": 61, "score": 72598.95509143875 }, { "content": "fn function_type_decl(tokens: &TokenStream<'_>) -> Result<TypeDecl, Spanned<ParseError>> {\n\n token(tokens, Token::Keyword(Keyword::Fn), \"fn\")?;\n\n let mut param_tys = Vec::new();\n\n\n\n while tokens.peek() != Some(Token::Arrow) {\n\n param_tys.push(type_decl(tokens)?);\n\n\n\n match tokens.peek() {\n\n Some(Token::Comma) => {\n\n tokens.next();\n\n }\n\n _ => break,\n\n }\n\n }\n\n\n\n token(tokens, Token::Arrow, \"->\")?;\n\n let ret_ty = type_decl(tokens)?.map(Box::new);\n\n\n\n Ok(TypeDecl::Function(FunctionDecl { param_tys, ret_ty }))\n\n}\n\n\n", "file_path": "cmlc/src/parser.rs", "rank": 62, "score": 68237.37604512899 }, { "content": "fn array_type_decl(tokens: &TokenStream<'_>) -> Result<TypeDecl, Spanned<ParseError>> {\n\n token(tokens, Token::OpenBracket, \"[\")?;\n\n let ty = type_decl(tokens)?.map(Box::new);\n\n token(tokens, Token::Semicolon, \";\")?;\n\n\n\n let len = match tokens.next() {\n\n Some(Token::Lit(Lit::Int(len))) => Spanned::new(len, tokens.last_token_span()),\n\n Some(_) => {\n\n return Err(Spanned::new(\n\n ParseError::unexpected_token().expected(\"array length\"),\n\n tokens.last_token_span(),\n\n ));\n\n }\n\n None => {\n\n return Err(Spanned::new(\n\n ParseError::eof().expected(\"array length\"),\n\n tokens.eof_span(),\n\n ));\n\n }\n\n };\n\n\n\n token(tokens, Token::CloseBracket, \"]\")?;\n\n Ok(TypeDecl::Array(ArrayDecl { ty, len }))\n\n}\n\n\n", "file_path": "cmlc/src/parser.rs", "rank": 63, "score": 68237.37604512899 }, { "content": "fn tuple_type_decl(tokens: &TokenStream<'_>) -> Result<TypeDecl, Spanned<ParseError>> {\n\n token(tokens, Token::OpenParen, \"(\")?;\n\n let mut tys = Vec::new();\n\n\n\n loop {\n\n match tokens.peek() {\n\n Some(Token::CloseParen) => {\n\n tokens.next();\n\n return Ok(TypeDecl::Tuple(tys));\n\n }\n\n Some(_) => (),\n\n None => {\n\n return Err(Spanned::new(\n\n ParseError::eof().expected(\")\"),\n\n tokens.eof_span(),\n\n ));\n\n }\n\n }\n\n\n\n tys.push(type_decl(tokens)?);\n", "file_path": "cmlc/src/parser.rs", "rank": 64, "score": 68237.37604512899 }, { "content": "fn name_type_decl(tokens: &TokenStream<'_>) -> Result<TypeDecl, Spanned<ParseError>> {\n\n ident(tokens)\n\n .map(|ident| TypeDecl::Name(ident.value))\n\n .map_err(|spanned| spanned.map(|err| err.set_expected(\"type name\")))\n\n}\n\n\n", "file_path": "cmlc/src/parser.rs", "rank": 65, "score": 68237.37604512899 }, { "content": "fn hole_type_decl(tokens: &TokenStream<'_>) -> Result<TypeDecl, Spanned<ParseError>> {\n\n token(tokens, Token::Underscore, \"_\")?;\n\n Ok(TypeDecl::Hole)\n\n}\n\n\n", "file_path": "cmlc/src/parser.rs", "rank": 66, "score": 68237.37604512899 }, { "content": "fn type_decl(tokens: &TokenStream<'_>) -> Result<Spanned<TypeDecl>, Spanned<ParseError>> {\n\n one_of(\n\n tokens,\n\n &[\n\n hole_type_decl,\n\n name_type_decl,\n\n ptr_type_decl,\n\n array_type_decl,\n\n function_type_decl,\n\n tuple_type_decl,\n\n ],\n\n )\n\n}\n\n\n", "file_path": "cmlc/src/parser.rs", "rank": 67, "score": 68237.37604512899 }, { "content": "fn ptr_type_decl(tokens: &TokenStream<'_>) -> Result<TypeDecl, Spanned<ParseError>> {\n\n token(tokens, Token::Star, \"*\")?;\n\n\n\n match tokens.peek() {\n\n Some(Token::Keyword(Keyword::Mut)) => {\n\n tokens.next();\n\n Ok(TypeDecl::MutPtr(type_decl(tokens)?.map(Box::new)))\n\n }\n\n _ => Ok(TypeDecl::ConstPtr(type_decl(tokens)?.map(Box::new))),\n\n }\n\n}\n\n\n", "file_path": "cmlc/src/parser.rs", "rank": 68, "score": 68237.37604512899 }, { "content": "fn type_from_decl(\n\n desc: Spanned<&TypeDecl>,\n\n is_type_def: bool,\n\n type_env: &mut TypeEnv,\n\n type_bindings: &ScopeMap<Ident, TypeRef>,\n\n) -> Result<TypeRef, Spanned<TypeError>> {\n\n let Spanned { value: desc, span } = desc;\n\n\n\n let ty = match *desc {\n\n TypeDecl::Hole => {\n\n if is_type_def {\n\n return Err(Spanned::new(TypeError::HoleInTypeDef, span));\n\n } else {\n\n type_env.insert(Type::Var)\n\n }\n\n }\n\n TypeDecl::Name(ref ident) => type_bindings\n\n .get(ident)\n\n .ok_or_else(|| {\n\n Spanned::new(\n", "file_path": "cmlc/src/typecheck.rs", "rank": 81, "score": 61985.98956714611 }, { "content": "fn canonicalize_type_ref(\n\n ty_ref: &mut TypeRef,\n\n type_env: &TypeEnv,\n\n types: &mut FnvHashMap<TypeRef, Type>,\n\n span: Span,\n\n) -> Result<(), Spanned<TypeError>> {\n\n let (canonical_ref, ty) = type_env.find_type(ty_ref);\n\n let mut ty = ty.clone();\n\n\n\n match &mut ty {\n\n Type::Var | Type::PartialRecord(_) | Type::Ptr(_) => Err(Spanned::new(\n\n TypeError::CannotInfer(canonical_ref.desc()),\n\n span,\n\n )),\n\n // if the int type does not matter, default to i32\n\n Type::Int => {\n\n let canonical_ref = canonical_ref.with_desc(&Type::I32);\n\n\n\n types\n\n .entry(canonical_ref.clone())\n", "file_path": "cmlc/src/typecheck/lint.rs", "rank": 82, "score": 60734.69519159379 }, { "content": "def trim_word_gap(commands):\n", "file_path": "Assembler/trim.py", "rank": 83, "score": 60134.83433299328 }, { "content": "def test_trim_word_gap():\n", "file_path": "Assembler/tests/test_trim.py", "rank": 84, "score": 58384.608589041294 }, { "content": "fn type_def(tokens: &TokenStream<'_>) -> Result<Item, Spanned<ParseError>> {\n\n token(tokens, Token::Keyword(Keyword::Type), \"type\")?;\n\n let name = ident(tokens)?;\n\n token(tokens, Token::Equal, \"=\")?;\n\n\n\n match tokens.peek() {\n\n Some(Token::OpenBrace) => {\n\n let span_start = tokens.start_span();\n\n let record_def = record_def(name, tokens)?;\n\n let span = span_start.end();\n\n\n\n Ok(Item::TypeDef(TypeDef::RecordDef(Spanned::new(\n\n record_def, span,\n\n ))))\n\n }\n\n Some(_) => {\n\n let span_start = tokens.start_span();\n\n let alias = type_decl(tokens)?;\n\n let span = span_start.end();\n\n token(tokens, Token::Semicolon, \";\")?;\n", "file_path": "cmlc/src/parser.rs", "rank": 85, "score": 57266.618084960326 }, { "content": "fn member_access(tokens: &TokenStream<'_>) -> Result<Spanned<Expr>, Spanned<ParseError>> {\n\n let span_start = tokens.start_span();\n\n let mut value = atom_or_group(tokens)?;\n\n\n\n while tokens.peek() == Some(Token::Dot) || tokens.peek() == Some(Token::Arrow) {\n\n let is_deref = match tokens.next() {\n\n Some(Token::Dot) => false,\n\n Some(Token::Arrow) => true,\n\n _ => unreachable!(),\n\n };\n\n\n\n // desugar the `->` member access after deref\n\n if is_deref {\n\n value = Spanned::new(\n\n Expr::un_op(UnOp {\n\n op: UnOpKind::Deref,\n\n operand: value.map(Box::new),\n\n }),\n\n span_start.end(),\n\n )\n", "file_path": "cmlc/src/parser.rs", "rank": 86, "score": 56755.623061712824 }, { "content": " def __rand__(self, other ):\n\n \"\"\"\n\n Implementation of & operator when left operand is not a C{L{ParserElement}}\n\n \"\"\"\n\n if isinstance( other, basestring ):\n\n other = ParserElement._literalStringClass( other )\n\n if not isinstance( other, ParserElement ):\n\n warnings.warn(\"Cannot combine element of type %s with ParserElement\" % type(other),\n\n SyntaxWarning, stacklevel=2)\n\n return None\n", "file_path": "Assembler/venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pyparsing.py", "rank": 87, "score": 56226.80349068171 }, { "content": "\"\"\"\n\nAll of the Enums that are used throughout the chardet package.\n\n\n\n:author: Dan Blanchard ([email protected])\n\n\"\"\"\n\n\n\n\n\nclass InputState(object):\n\n \"\"\"\n\n This enum represents the different states a universal detector can be in.\n\n \"\"\"\n\n PURE_ASCII = 0\n\n ESC_ASCII = 1\n\n HIGH_BYTE = 2\n\n\n\n\n\nclass LanguageFilter(object):\n\n \"\"\"\n\n This enum represents the different language filters we can apply to a\n\n ``UniversalDetector``.\n\n \"\"\"\n\n CHINESE_SIMPLIFIED = 0x01\n\n CHINESE_TRADITIONAL = 0x02\n\n JAPANESE = 0x04\n\n KOREAN = 0x08\n\n NON_CJK = 0x10\n\n ALL = 0x1F\n\n CHINESE = CHINESE_SIMPLIFIED | CHINESE_TRADITIONAL\n\n CJK = CHINESE | JAPANESE | KOREAN\n\n\n\n\n\nclass ProbingState(object):\n\n \"\"\"\n\n This enum represents the different states a prober can be in.\n\n \"\"\"\n\n DETECTING = 0\n\n FOUND_IT = 1\n\n NOT_ME = 2\n\n\n\n\n\nclass MachineState(object):\n\n \"\"\"\n\n This enum represents the different states a state machine can be in.\n\n \"\"\"\n\n START = 0\n\n ERROR = 1\n\n ITS_ME = 2\n\n\n\n\n\nclass SequenceLikelihood(object):\n\n \"\"\"\n\n This enum represents the likelihood of a character following the previous one.\n\n \"\"\"\n\n NEGATIVE = 0\n\n UNLIKELY = 1\n\n LIKELY = 2\n\n POSITIVE = 3\n\n\n\n @classmethod\n\n def get_num_categories(cls):\n\n \"\"\":returns: The number of likelihood categories in the enum.\"\"\"\n\n return 4\n\n\n\n\n\nclass CharacterCategory(object):\n\n \"\"\"\n\n This enum represents the different categories language models for\n\n ``SingleByteCharsetProber`` put characters into.\n\n\n\n Anything less than CONTROL is considered a letter.\n\n \"\"\"\n\n UNDEFINED = 255\n\n LINE_BREAK = 254\n\n SYMBOL = 253\n\n DIGIT = 252\n\n CONTROL = 251\n", "file_path": "Assembler/venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/chardet/enums.py", "rank": 88, "score": 56220.658837022245 }, { "content": "\"\"\"For neatly implementing static typing in pip.\n\n\n\n`mypy` - the static type analysis tool we use - uses the `typing` module, which\n\nprovides core functionality fundamental to mypy's functioning.\n\n\n\nGenerally, `typing` would be imported at runtime and used in that fashion -\n\nit acts as a no-op at runtime and does not have any run-time overhead by\n\ndesign.\n\n\n\nAs it turns out, `typing` is not vendorable - it uses separate sources for\n\nPython 2/Python 3. Thus, this codebase can not expect it to be present.\n\nTo work around this, mypy allows the typing import to be behind a False-y\n\noptional to prevent it from running at runtime and type-comments can be used\n\nto remove the need for the types to be accessible directly during runtime.\n\n\n\nThis module provides the False-y guard in a nicely named fashion so that a\n\ncurious maintainer can reach here to read this.\n\n\n\nIn pip, all static-typing related imports should be guarded as follows:\n\n\n\n from pip.utils.typing import MYPY_CHECK_RUNNING\n\n\n\n if MYPY_CHECK_RUNNING:\n\n from typing import ...\n\n\n\nRef: https://github.com/python/mypy/issues/3216\n\n\"\"\"\n\n\n\nMYPY_CHECK_RUNNING = False\n", "file_path": "Assembler/venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/utils/typing.py", "rank": 89, "score": 56209.462422129895 }, { "content": "class WordStart(_PositionToken):\n\n \"\"\"\n\n Matches if the current position is at the beginning of a Word, and\n\n is not preceded by any character in a given set of C{wordChars}\n\n (default=C{printables}). To emulate the C{\\b} behavior of regular expressions,\n\n use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of\n\n the string being parsed, or at the beginning of a line.\n\n \"\"\"\n\n def __init__(self, wordChars = printables):\n\n super(WordStart,self).__init__()\n\n self.wordChars = set(wordChars)\n\n self.errmsg = \"Not at the start of a word\"\n\n\n\n def parseImpl(self, instring, loc, doActions=True ):\n\n if loc != 0:\n\n if (instring[loc-1] in self.wordChars or\n\n instring[loc] not in self.wordChars):\n\n raise ParseException(instring, loc, self.errmsg, self)\n", "file_path": "Assembler/venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pyparsing.py", "rank": 90, "score": 55692.703346893744 }, { "content": "class WordEnd(_PositionToken):\n\n \"\"\"\n\n Matches if the current position is at the end of a Word, and\n\n is not followed by any character in a given set of C{wordChars}\n\n (default=C{printables}). To emulate the C{\\b} behavior of regular expressions,\n\n use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of\n\n the string being parsed, or at the end of a line.\n\n \"\"\"\n\n def __init__(self, wordChars = printables):\n\n super(WordEnd,self).__init__()\n\n self.wordChars = set(wordChars)\n\n self.skipWhitespace = False\n\n self.errmsg = \"Not at the end of a word\"\n\n\n\n def parseImpl(self, instring, loc, doActions=True ):\n\n instrlen = len(instring)\n\n if instrlen>0 and loc<instrlen:\n\n if (instring[loc] in self.wordChars or\n\n instring[loc-1] not in self.wordChars):\n\n raise ParseException(instring, loc, self.errmsg, self)\n", "file_path": "Assembler/venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pyparsing.py", "rank": 91, "score": 55692.703346893744 }, { "content": " def parseImpl( self, instring, loc, doActions=True ):\n\n if self.re:\n\n result = self.re.match(instring,loc)\n\n if not result:\n\n raise ParseException(instring, loc, self.errmsg, self)\n\n\n\n loc = result.end()\n\n return loc, result.group()\n\n\n\n if not(instring[ loc ] in self.initChars):\n\n raise ParseException(instring, loc, self.errmsg, self)\n\n\n\n start = loc\n\n loc += 1\n\n instrlen = len(instring)\n\n bodychars = self.bodyChars\n\n maxloc = start + self.maxLen\n\n maxloc = min( maxloc, instrlen )\n\n while loc < maxloc and instring[loc] in bodychars:\n\n loc += 1\n\n\n\n throwException = False\n\n if loc - start < self.minLen:\n\n throwException = True\n\n if self.maxSpecified and loc < instrlen and instring[loc] in bodychars:\n\n throwException = True\n\n if self.asKeyword:\n\n if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars):\n\n throwException = True\n\n\n\n if throwException:\n\n raise ParseException(instring, loc, self.errmsg, self)\n\n\n", "file_path": "Assembler/venv/Lib/site-packages/pip-10.0.1-py3.7.egg/pip/_vendor/pyparsing.py", "rank": 92, "score": 55666.12040634908 }, { "content": "use crate::memory::{Memory, Word, OP_CODE_BITS, REG_REF_BITS, WORD_BITS, WORD_BYTES};\n\nuse crate::support::to_hex;\n\nuse log::trace;\n\nuse num_enum::CustomTryInto;\n\nuse rand;\n\nuse std::fmt::{self, Display};\n\nuse std::ops::{BitAnd, BitOr, BitXor, Index};\n\nuse std::sync::atomic::{AtomicBool, Ordering};\n\nuse strum::IntoEnumIterator;\n\nuse strum_macros::{Display, EnumIter};\n\n\n\n#[derive(Debug)]\n\npub struct VmError {\n\n pub at: Option<Word>,\n\n pub kind: ErrorKind,\n\n}\n\n\n\nimpl VmError {\n\n pub fn at(at: Word, kind: ErrorKind) -> VmError {\n\n VmError { at: Some(at), kind }\n", "file_path": "simulator/src/vm.rs", "rank": 93, "score": 27.964716953538158 }, { "content": " }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum ErrorKind {\n\n IllegalInstruction(Word),\n\n IllegalRegister(u8),\n\n UninitializedMemoryAccess(Word),\n\n ReadOnlyMemoryWriteAccess(Word),\n\n OutOfBoundsMemoryAccess(Word, usize),\n\n DivideByZero,\n\n}\n\n\n\npub struct RegBank {\n\n next_instr_addr: Word,\n\n regs: [Word; 34],\n\n cmp_flag: bool,\n", "file_path": "simulator/src/vm.rs", "rank": 94, "score": 27.652284779216604 }, { "content": "use crate::codegen::Asm;\n\nuse byteorder::{ByteOrder, LittleEndian};\n\nuse std::collections::HashMap;\n\nuse std::fmt::{self, Display};\n\nuse std::rc::Rc;\n\nuse strum_macros::EnumIter;\n\n\n\npub const JMP_REL_IMMEDIATE_MAX: u32 = 0b_111_1111_1111_1111_1111_1111_1111;\n\npub const JMP_REL_IF_IMMEDIATE_MAX: u32 = 0b_111_1111_1111_1111_1111_1111_1111;\n\npub const SET_IMMEDIATE_MAX: u32 = 0b_1_1111_1111_1111_1111_1111;\n\npub const LOAD_IMMEDIATE_MAX: u32 = 0b_111_1111_1111_1111;\n\npub const STORE_IMMEDIATE_MAX: u32 = 0b_111_1111_1111_1111;\n\n\n", "file_path": "cmlc/src/emit.rs", "rank": 95, "score": 20.073681379165894 }, { "content": "use crate::span::{Span, Spanned};\n\nuse crate::typecheck::{unify::TypeEnv, Type, TypeDesc, TypeRef};\n\nuse fnv::FnvHashMap;\n\nuse std::fmt::{self, Display, Write};\n\nuse std::rc::Rc;\n\n\n\n#[derive(Debug)]\n\npub struct TypedAst {\n\n pub items: Vec<Spanned<Item>>,\n\n pub types: FnvHashMap<TypeRef, Type>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Ast {\n\n pub items: Vec<Spanned<Item>>,\n\n pub type_env: TypeEnv,\n\n}\n\n\n\nimpl Ast {\n\n pub fn new(items: Vec<Spanned<Item>>) -> Ast {\n", "file_path": "cmlc/src/ast.rs", "rank": 96, "score": 19.794071184277968 }, { "content": "}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone)]\n\npub struct TypeId(usize);\n\n\n\nimpl TypeId {\n\n pub fn new() -> TypeId {\n\n static NEXT_ID: AtomicUsize = AtomicUsize::new(0);\n\n\n\n let id = NEXT_ID.fetch_add(1, Ordering::AcqRel);\n\n\n\n if id == usize::max_value() {\n\n panic!(\"overflow for type ids\");\n\n }\n\n\n\n TypeId(id)\n\n }\n\n}\n\n\n", "file_path": "cmlc/src/typecheck.rs", "rank": 97, "score": 18.954626305703925 }, { "content": "#[derive(Debug, Clone, Copy, Sub, Add, AddAssign, Ord, PartialOrd, Eq, PartialEq)]\n\npub struct RegOffset(pub u32);\n\n\n\nimpl Mul<u32> for RegOffset {\n\n type Output = RegOffset;\n\n\n\n fn mul(self, rhs: u32) -> RegOffset {\n\n RegOffset(self.0 * rhs)\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq)]\n\npub enum FieldIdent<'a> {\n\n Idx(usize),\n\n Name(&'a Ident),\n\n}\n\n\n\nimpl<'a> From<usize> for FieldIdent<'a> {\n\n fn from(idx: usize) -> FieldIdent<'a> {\n\n FieldIdent::Idx(idx)\n", "file_path": "cmlc/src/codegen/layout.rs", "rank": 98, "score": 18.377042157477135 }, { "content": " }\n\n\n\n pub fn desc(&self) -> Rc<TypeDesc> {\n\n Rc::clone(&self.1)\n\n }\n\n\n\n pub fn invalid() -> TypeRef {\n\n TypeRef(\n\n usize::max_value(),\n\n Rc::new(TypeDesc::Name(Ident::new(\"{invalid}\"))),\n\n )\n\n }\n\n}\n\n\n\nimpl PartialEq for TypeRef {\n\n fn eq(&self, rhs: &TypeRef) -> bool {\n\n self.0 == rhs.0\n\n }\n\n}\n\n\n", "file_path": "cmlc/src/typecheck.rs", "rank": 99, "score": 18.060208672313596 } ]
Rust
examples/minijava/src/enumabsyn.rs
chuckcscccl/rustlr
0e9794cca27589c25115325f69fb5b417a77400d
/* Abstract syntax for minijava (adopted from 2014 java program) Using internally generated RetTypeEnum */ #![allow(dead_code)] use rustlr::LBox; use crate::Expr::*; use crate::Stat::*; use crate::Declaration::*; #[derive(Debug)] pub enum Expr<'t> { Int(i32), Strlit(&'t str), Bool(bool), Var(&'t str), Thisptr, Binop(&'static str,LBox<Expr<'t>>,LBox<Expr<'t>>), Notexp(LBox<Expr<'t>>), Field(&'t str,LBox<Expr<'t>>), Newarray(LBox<Expr<'t>>), Newobj(&'t str), Callexp(LBox<Expr<'t>>,&'t str,Vec<LBox<Expr<'t>>>), Nothing, } impl<'t> Default for Expr<'t> { fn default()->Self {Nothing} } #[derive(Debug)] pub enum Stat<'t> { Whilest(LBox<Expr<'t>>,LBox<Stat<'t>>), Ifstat(LBox<Expr<'t>>,LBox<Stat<'t>>,LBox<Stat<'t>>), Vardecst(&'t str,&'t str,LBox<Expr<'t>>), Returnst(LBox<Expr<'t>>), Assignst(&'t str,LBox<Expr<'t>>), ArAssignst(LBox<Expr<'t>>,LBox<Expr<'t>>,LBox<Expr<'t>>), Callstat(LBox<Expr<'t>>,&'t str,Vec<LBox<Expr<'t>>>), Nopst, Blockst(Vec<LBox<Stat<'t>>>), } impl<'t> Default for Stat<'t> {fn default()->Self {Nopst} } #[derive(Debug)] pub struct VarDec<'t> { pub dname:&'t str, pub dtype:&'t str, pub initval:Expr<'t>, } impl<'t> Default for VarDec<'t> { fn default() -> Self { VarDec{dname:"",dtype:"",initval:Nothing} } } #[derive(Debug)] pub struct MethodDec<'t> { pub formals:Vec<LBox<VarDec<'t>>>, pub body: Vec<LBox<Stat<'t>>>, pub classname: &'t str, pub methodname: &'t str, } impl<'t> Default for MethodDec<'t> { fn default() -> Self { MethodDec{formals:Vec::new(),classname:"",methodname:"",body:Vec::new()} } } #[derive(Debug)] pub struct ClassDec<'t> { pub superclass:&'t str, pub classname:&'t str, pub vars: Vec<LBox<VarDec<'t>>>, pub methods: Vec<LBox<MethodDec<'t>>>, } impl<'t> Default for ClassDec<'t> { fn default()->Self { ClassDec{superclass:"Object",classname:"",vars:Vec::new(),methods:Vec::new()}} } #[derive(Debug)] pub enum Declaration<'t> { Mdec(MethodDec<'t>), Vdec(VarDec<'t>), Cdec(ClassDec<'t>), } #[derive(Debug)] pub struct Mainclass<'t> { pub classname:&'t str, pub argvname: &'t str, pub body : Stat<'t>, } impl<'t> Default for Mainclass<'t> { fn default()->Self { Mainclass {classname:"",argvname:"",body:Stat::default(),}} } #[derive(Debug)] pub struct Program<'t> { pub mainclass:LBox<Mainclass<'t>>, pub otherclasses: Vec<LBox<ClassDec<'t>>>, } impl<'t> Default for Program<'t> { fn default()->Self { Program {mainclass:LBox::default(), otherclasses:Vec::new()}} } pub fn separatedecs<'t>(mut ds:Vec<LBox<Declaration<'t>>>,vars:&mut Vec<LBox<VarDec<'t>>>,mths:&mut Vec<LBox<MethodDec<'t>>>) { while ds.len()>0 { let mut dec = ds.pop().unwrap(); match &mut *dec { Vdec(vd) => { let vdec = std::mem::replace(vd,VarDec::default()); vars.push(dec.transfer(vdec)); }, Mdec(md) => { let mdec = std::mem::replace(md,MethodDec::default()); mths.push(dec.transfer(mdec)); }, _ => {}, } } }
/* Abstract syntax for minijava (adopted from 2014 java program) Using internally generated RetTypeEnum */ #![allow(dead_code)] use rustlr::LBox; use crate::Expr::*; use crate::Stat::*; use crate::Declaration::*; #[derive(Debug)] pub enum Expr<'t> { Int(i32), Strlit(&'t str), Bool(bool), Var(&'t str), Thisptr, Binop(&'static str,LBox<Expr<'t>>,LBox<Expr<'t>>), Notexp(LBox<Expr<'t>>), Field(&'t str,LBox<Expr<'t>>), Newarray(LBox<Expr<'t>>), Newobj(&'t str), Callexp(LBox<Expr<'t>>,&'t str,Vec<LBox<Expr<'t>>>), Nothing, } impl<'t> Default for Expr<'t> { fn default()->Self {Nothing} } #[derive(Debug)] pub enum Stat<'t> { Whilest(LBox<Expr<'t>>,LBox<Stat<'t>>), Ifstat(LBox<Expr<'t>>,LBox<Stat<'t>>,LBox<Stat<'t>>), Vardecst(&'t str,&'t str,LBox<Expr<'t>>), Returnst(LBox<Expr<'t>>), Assignst(&'t str,LBox<Expr<'t>>), ArAssignst(LBox<Expr<'t>>,LBox<Expr<'t>>,LBox<Expr<'t>>), Callstat(LBox<Expr<'t>>,&'t str,Vec<LBox<Expr<'t>>>), Nopst, Blockst(Vec<LBox<Stat<'t>>>), } impl<'t> Default for Stat<'t> {fn default()->Self {Nopst} } #[derive(Debug)] pub struct VarDec<'t> { pub dname:&'t str, pub dtype:&'t str, pub initval:Expr<'t>, } impl<'t> Default for VarDec<'t> { fn default() -> Self { VarDec{dname:"",dtype:"",initval:Nothing} } } #[derive(Debug)] pub struct MethodDec<'t> { pub formals:Vec<LBox<VarDec<'t>>>, pub body: Vec<LBox<Stat<'t>>>, pub classname: &'t str, pub methodname: &'t str, } impl<'t> Default for MethodDec<'t> { fn default() -> Self { MethodDec{formals:Vec::new(),classname:"",methodname:"",body:Vec::new()} } } #[derive(Debug)] pub struct ClassDec<'t> { pub superclass:&'t str, pub classname:&'t str, pub vars: Vec<LBox<VarDec<'t>>>, pub methods: Vec<LBox<MethodDec<'t>>>, } impl<'t> Default for ClassDec<'t> { fn default()->Self { ClassDec{superclass:"Object",classname:"",vars:Vec::new(),methods:Vec::new()}} } #[derive(Debug)] pub enum Declaration<'t> { Mdec(MethodDec<'t>), Vdec(VarDec<'t>), Cdec(ClassDec<'t>), } #[derive(Debug)] pub struct Mainclass<'t> { pub classname:&'t str, pub argvname: &'t str, pub body : Stat<'t>, } impl<'t> Default for Mainclass<'t> { fn default()->Self { Mainclass {classname:"",argvname:"",body:Stat::default(),}} } #[derive(Debug)] pub struct Program<'t> { pub mainclass:LBox<Mainclass<'t>>, pub otherclasses: Vec<LBox<ClassDec<'t>>>, } impl<'t> Default for Program<'t> { fn default()->Self { Program {mainclass:LBox::default(), otherclasses:Vec::new()}} } pub fn separatedecs<'t>(mut ds:Vec<LBox<Declaration<'t>>>,vars:&mut Vec<LBox<VarDec<'t>>>,mths:&mut Vec<LBox<MethodDec<'t>>>) { while ds.len()>0 { let mut dec = ds.pop().unwrap(); match &mut *dec { Vdec(vd) => { let vdec = std::mem::replace(vd,VarDec::default()); va
rs.push(dec.transfer(vdec)); }, Mdec(md) => { let mdec = std::mem::replace(md,MethodDec::default()); mths.push(dec.transfer(mdec)); }, _ => {}, } } }
function_block-function_prefixed
[ { "content": "pub fn parse_train_with<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>, lexer:&mut mjenumlexer<'lt>, parserpath:&str) -> Result<Program<'lt>,Program<'lt>>\n\n{\n\n if let RetTypeEnum::Enumvariant_0(_xres_) = parser.parse_train(lexer,parserpath) {\n\n if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}\n\n } else { Err(<Program<'lt>>::default())}\n\n}//parse_train_with public function\n\n\n\n//Enum for return values \n\npub enum RetTypeEnum<'lt> {\n\n Enumvariant_11(Stat<'lt>),\n\n Enumvariant_3(i32),\n\n Enumvariant_10(Vec<LBox<VarDec<'lt>>>),\n\n Enumvariant_5(ClassDec<'lt>),\n\n Enumvariant_2(bool),\n\n Enumvariant_4(Mainclass<'lt>),\n\n Enumvariant_0(Program<'lt>),\n\n Enumvariant_14(Vec<LBox<Expr<'lt>>>),\n\n Enumvariant_6(Vec<LBox<ClassDec<'lt>>>),\n\n Enumvariant_9(Vec<LBox<Declaration<'lt>>>),\n\n Enumvariant_13(Expr<'lt>),\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 0, "score": 350798.7972295936 }, { "content": "pub fn parse_with<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>, lexer:&mut mjenumlexer<'lt>) -> Result<Program<'lt>,Program<'lt>>\n\n{\n\n if let RetTypeEnum::Enumvariant_0(_xres_) = parser.parse(lexer) {\n\n if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}\n\n } else { Err(<Program<'lt>>::default())}\n\n}//parse_with public function\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 1, "score": 316505.15929107263 }, { "content": "fn _semaction_rule_1_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Mainclass<'lt> {\n\nlet mut _item16_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item15_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut thebody = if let RetTypeEnum::Enumvariant_12(_x_12)=parser.popstack().value { _x_12 } else {<Vec<LBox<Stat<'lt>>>>::default()}; let mut _item13_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item12_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut an = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut _item10_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item9_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item8_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item7_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item6_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item5_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item4_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut cn = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; \n\n Mainclass{classname:cn,\n\n argvname:an,\n\n body: Blockst(thebody),\n\n\t }\n\n }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 2, "score": 312077.099567788 }, { "content": "fn _semaction_rule_20_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> &'lt str {\n\nlet mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; return \"int\"; }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 3, "score": 311543.4670437023 }, { "content": "fn _semaction_rule_22_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> &'lt str {\n\nlet mut c = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; c }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 4, "score": 311543.4670437023 }, { "content": "fn _semaction_rule_5_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> &'lt str {\n\nlet mut sup = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; sup }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 5, "score": 311543.4670437023 }, { "content": "fn _semaction_rule_19_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> &'lt str {\n\nlet mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; return \"String\"; }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 6, "score": 311543.4670437023 }, { "content": "fn _semaction_rule_18_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> &'lt str {\n\nlet mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; return \"boolean\"; }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 7, "score": 311543.4670437023 }, { "content": "fn _semaction_rule_21_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> &'lt str {\n\nlet mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; return \"void\"; }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 8, "score": 311543.4670437023 }, { "content": "fn _semaction_rule_17_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> &'lt str {\n\nlet mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; return \"int[]\"; }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 9, "score": 311543.4670437023 }, { "content": "fn _semaction_rule_6_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> &'lt str {\n\n \"Object\" }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 10, "score": 311543.4670437023 }, { "content": "fn _semaction_rule_9_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> MethodDec<'lt> {\n\nlet mut _item8_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut mbody = if let RetTypeEnum::Enumvariant_12(_x_12)=parser.popstack().value { _x_12 } else {<Vec<LBox<Stat<'lt>>>>::default()}; let mut _item6_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item5_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut args = if let RetTypeEnum::Enumvariant_10(_x_10)=parser.popstack().value { _x_10 } else {<Vec<LBox<VarDec<'lt>>>>::default()}; let mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut name = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut ty = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; \n\n MethodDec{ formals:args,\n\n body: mbody,\n\n classname:ty,\n\n\t methodname:name, }\n\n }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 11, "score": 311124.9366133364 }, { "content": "fn _semaction_rule_8_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> VarDec<'lt> {\n\nlet mut _item4_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut e = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut v = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut t = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; VarDec{dname:v,dtype:t,initval:e}}\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 12, "score": 311086.7313268429 }, { "content": "fn _semaction_rule_7_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> VarDec<'lt> {\n\nlet mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut v = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut t = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; VarDec{dname:v,dtype:t,initval:Nothing,} }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 13, "score": 311086.7313268429 }, { "content": "// separates a list containing both variable and method declarations as \n\n// \"constructs\" into two separate lists; for use when constructing a class\n\n// declaration.\n\npub fn separatedecs(mut ds:Vec<LBox<Construct>>,vars:&mut Vec<LBox<VarDec>>,mths:&mut Vec<LBox<MethodDec>>)\n\n{\n\n while ds.len()>0\n\n {\n\n let mut dec = ds.pop().unwrap(); // this is an lbox\n\n match &mut *dec {\n\n Vdec(vd) => {\n\n let vdec = std::mem::replace(vd,VarDec::default());\n\n vars.push(dec.transfer(vdec)); // transfers lexical info to new lbox\n\n },\n\n Method(md) => {\n\n let mdec = std::mem::replace(md,MethodDec::default());\n\n mths.push(dec.transfer(mdec));\n\n },\n\n _ => {},\n\n }//match\n\n }\n\n}//separatedecs\n", "file_path": "examples/minijava/src/lbasyntax.rs", "rank": 14, "score": 308462.9627385213 }, { "content": "// separates a list containing both variable and method declarations as \n\n// \"constructs\" into two separate lists; for use when constructing a class\n\n// declaration.\n\npub fn separatedecs<'t>(mut ds:Vec<LBox<Construct<'t>>>,vars:&mut Vec<LBox<VarDec<'t>>>,mths:&mut Vec<LBox<MethodDec<'t>>>)\n\n{\n\n while ds.len()>0\n\n {\n\n let mut dec = ds.pop().unwrap(); // this is an lbox\n\n match &mut *dec {\n\n Vdec(vd) => {\n\n let vdec = std::mem::replace(vd,VarDec::default());\n\n vars.push(dec.transfer(vdec)); // transfers lexical info to new lbox\n\n },\n\n Method(md) => {\n\n let mdec = std::mem::replace(md,MethodDec::default());\n\n mths.push(dec.transfer(mdec));\n\n },\n\n _ => {},\n\n }//match\n\n }\n\n}//separatedecs\n", "file_path": "examples/minijava/src/absyntax.rs", "rank": 15, "score": 302309.5151960691 }, { "content": "pub fn getvar(t:&Term) -> str8 {if let Var(x)=t {*x} else {str8::default()}}\n\n\n\n//// replace all defined terms with their definitions\n\n\n\n\n\n////// evaluation of a program\n\n////// given hashmap of definitions\n\n\n", "file_path": "examples/lambdascript/src/untyped.rs", "rank": 17, "score": 295362.53649423015 }, { "content": "fn _semaction_rule_16_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<VarDec<'lt>>> {\n\nlet mut frs = if let RetTypeEnum::Enumvariant_10(_x_10)=parser.popstack().value { _x_10 } else {<Vec<LBox<VarDec<'lt>>>>::default()}; let mut a = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut ty = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; \n\n frs.push(parser.lb(VarDec{dname:a,dtype:ty,initval:Nothing}));\n\n frs \n\n }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 18, "score": 294019.39804977766 }, { "content": "fn _semaction_rule_15_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<VarDec<'lt>>> {\n\n Vec::new() }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 19, "score": 294019.39804977766 }, { "content": "fn _semaction_rule_13_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<VarDec<'lt>>> {\n\n Vec::new() }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 20, "score": 294019.39804977766 }, { "content": "fn _semaction_rule_14_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<VarDec<'lt>>> {\n\nlet mut frs = if let RetTypeEnum::Enumvariant_10(_x_10)=parser.popstack().value { _x_10 } else {<Vec<LBox<VarDec<'lt>>>>::default()}; let mut a = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut ty = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; \n\n frs.push(parser.lb(VarDec{dname:a,dtype:ty,initval:Nothing}));\n\n frs \n\n }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 21, "score": 294019.39804977766 }, { "content": "// procedure to generate lexical scanner from lexname, lexval and lexattribute\n\n// declarations in the grammar file. Added for Version 0.2.3. This procedure\n\n// is only used by other modules internally\n\npub fn genlexer(&self,fd:&mut File, fraw:&str) -> Result<(),std::io::Error>\n\n{\n\n ////// WRITE LEXER\n\n let ref absyn = self.Absyntype;\n\n let ltopt = if self.lifetime.len()>0 {format!(\"<{}>\",&self.lifetime)}\n\n else {String::new()};\n\n let retenum = format!(\"RetTypeEnum{}\",&ltopt);\n\n let retype = if self.sametype {absyn} else {&retenum};\n\n let lifetime = if (self.lifetime.len()>0) {&self.lifetime} else {\"'t\"};\n\n write!(fd,\"\\n// Lexical Scanner using RawToken and StrTokenizer\\n\")?;\n\n let lexername = format!(\"{}lexer\",&self.name);\n\n let mut keywords:Vec<&str> = Vec::new();\n\n let mut singles:Vec<char> = Vec::new();\n\n let mut doubles:Vec<&str> = Vec::new();\n\n // collect symbols from grammar\n\n for symbol in &self.Symbols\n\n {\n\n if !symbol.terminal {continue;}\n\n if is_alphanum(&symbol.sym) && &symbol.sym!=\"EOF\" && &symbol.sym!=\"ANY_ERROR\" && !self.Haslexval.contains(&symbol.sym) {\n\n\t keywords.push(&symbol.sym);\n", "file_path": "src/grammar_processor.rs", "rank": 22, "score": 292061.24688951974 }, { "content": "fn _semaction_rule_4_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> ClassDec<'lt> {\n\nlet mut _item5_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut ds = if let RetTypeEnum::Enumvariant_9(_x_9)=parser.popstack().value { _x_9 } else {<Vec<LBox<Declaration<'lt>>>>::default()}; let mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut sup = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut name = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; \n\n let mut vdecs=Vec::new();\n\n let mut mdecs=Vec::new();\n\n separatedecs(ds,&mut vdecs,&mut mdecs); /*split var and method declarations*/\n\n ClassDec {superclass:sup,\n\n classname:name,\n\n vars:vdecs,\n\n methods:mdecs}\n\n }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 23, "score": 280757.8755688842 }, { "content": "// generates the enum type unifying absyntype. - if !self.sametype\n\npub fn gen_enum(&self,fd:&mut File) -> Result<(),std::io::Error>\n\n{\n\n let ref absyn = self.Absyntype;\n\n let ref extype = self.Externtype;\n\n let ref lifetime = self.lifetime;\n\n let has_lt = lifetime.len()>0 && (absyn.contains(lifetime) || extype.contains(lifetime) || absyn==\"LBox<dyn Any>\");\n\n let ltopt = if has_lt {format!(\"<{}>\",lifetime)} else {String::from(\"\")};\n\n //enum name is Retenumgrammarname, variant is _grammarname_enum_{n}\n\n let enumname = format!(\"RetTypeEnum{}\",&ltopt); // will not be pub\n\n let symlen = self.Symbols.len();\n\n write!(fd,\"\\n//Enum for return values \\npub enum {} {{\\n\",&enumname)?;\n\n\n\n for (typesym,eindex) in self.enumhash.iter()\n\n {\n\n write!(fd,\" Enumvariant_{}({}),\\n\",eindex,typesym)?;\n\n }\n\n/*\n\n let mut varset:HashSet<usize> = HashSet::with_capacity(self.Symbols.len());\n\n for sym in &self.Symbols\n\n {\n", "file_path": "src/grammar_processor.rs", "rank": 24, "score": 274936.90582862165 }, { "content": "pub fn make_parser<'lt>() -> ZCParser<RetTypeEnum<'lt>,Program<'lt>>\n\n{\n\n let mut parser1:ZCParser<RetTypeEnum<'lt>,Program<'lt>> = ZCParser::new(60,147);\n\n let mut rule = ZCRProduction::<RetTypeEnum<'lt>,Program<'lt>>::new_skeleton(\"start\");\n\n rule = ZCRProduction::<RetTypeEnum<'lt>,Program<'lt>>::new_skeleton(\"Program\");\n\n rule.Ruleaction = |parser|{ RetTypeEnum::Enumvariant_0(_semaction_rule_0_(parser)) };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<RetTypeEnum<'lt>,Program<'lt>>::new_skeleton(\"MainCl\");\n\n rule.Ruleaction = |parser|{ RetTypeEnum::Enumvariant_4(_semaction_rule_1_(parser)) };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<RetTypeEnum<'lt>,Program<'lt>>::new_skeleton(\"ClassDecl\");\n\n rule.Ruleaction = |parser|{ RetTypeEnum::Enumvariant_6(_semaction_rule_2_(parser)) };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<RetTypeEnum<'lt>,Program<'lt>>::new_skeleton(\"ClassDecl\");\n\n rule.Ruleaction = |parser|{ RetTypeEnum::Enumvariant_6(_semaction_rule_3_(parser)) };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<RetTypeEnum<'lt>,Program<'lt>>::new_skeleton(\"ClassDec\");\n\n rule.Ruleaction = |parser|{ RetTypeEnum::Enumvariant_5(_semaction_rule_4_(parser)) };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<RetTypeEnum<'lt>,Program<'lt>>::new_skeleton(\"Extension\");\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 25, "score": 274529.7798217619 }, { "content": "fn _semaction_rule_0_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Program<'lt> {\n\nlet mut cs = if let RetTypeEnum::Enumvariant_6(_x_6)=parser.popstack().value { _x_6 } else {<Vec<LBox<ClassDec<'lt>>>>::default()}; let mut _mc_ = if let RetTypeEnum::Enumvariant_4(_x_4)=parser.popstack().value { _x_4 } else {<Mainclass<'lt>>::default()}; let mut mc = parser.lbx(0,_mc_); Program {mainclass:mc, otherclasses:cs } }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 26, "score": 273861.2022315914 }, { "content": "fn _semaction_rule_59_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Program<'lt> {\n\nlet mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; <Program<'lt>>::default()}\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 27, "score": 273861.2022315914 }, { "content": "fn load_extras<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>)\n\n{\n\n}//end of load_extras: don't change this line as it affects augmentation\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 28, "score": 265990.692883591 }, { "content": "fn _semaction_rule_3_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<ClassDec<'lt>>> {\n\nlet mut _cl_ = if let RetTypeEnum::Enumvariant_5(_x_5)=parser.popstack().value { _x_5 } else {<ClassDec<'lt>>::default()}; let mut cl = parser.lbx(1,_cl_); let mut cs = if let RetTypeEnum::Enumvariant_6(_x_6)=parser.popstack().value { _x_6 } else {<Vec<LBox<ClassDec<'lt>>>>::default()}; cs.push(cl); cs }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 29, "score": 265309.451091375 }, { "content": "fn _semaction_rule_2_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<ClassDec<'lt>>> {\n\n Vec::new() }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 30, "score": 265309.451091375 }, { "content": "/// parse function with enhanced error handling, requires grammar and\n\n/// abstract syntax that also recognize errors.\n\npub fn enhanced_parse(&mut self, tokenizer:&mut dyn Enhanced_Lexer<AT>) -> ParseResult<AT>\n\n {\n\n self.err_occured = false;\n\n self.stack.clear();\n\n// self.exstate = ET::default(); ???\n\n let mut result = AT::default();\n\n // push state 0 on stack:\n\n self.stack.push(Stackelement {si:0, value:AT::default()});\n\n let unexpected = Stateaction::Error(String::from(\"unexpected end of input\"));\n\n let mut action = unexpected; //Stateaction::Error(String::from(\"get started\"));\n\n self.stopparsing = false;\n\n let mut lookahead = Lextoken{sym:\"EOF\".to_owned(),value:AT::default()}; \n\n if let Some(tok) = tokenizer.nextsym() {lookahead=tok;}\n\n else {self.stopparsing=true;}\n\n\n\n while !self.stopparsing\n\n {\n\n self.linenum = tokenizer.linenum(); self.column=tokenizer.column();\n\n let currentstate = self.stack[self.stack.len()-1].si;\n\n let mut actionopt = self.RSM[currentstate].get(lookahead.sym.as_str());//.unwrap();\n", "file_path": "src/enhancements.rs", "rank": 31, "score": 257710.21163669607 }, { "content": "fn _semaction_rule_25_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Stat<'lt> {\n\nlet mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut sv = if let RetTypeEnum::Enumvariant_12(_x_12)=parser.popstack().value { _x_12 } else {<Vec<LBox<Stat<'lt>>>>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; Blockst(sv) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 32, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_30_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Stat<'lt> {\n\nlet mut _item6_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _e_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut e = parser.lbx(5,_e_); let mut _item4_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _i_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut i = parser.lbx(2,_i_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _v_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut v = parser.lbx(0,_v_); ArAssignst(v,i,e) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 33, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_38_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _b_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut b = parser.lbx(2,_b_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _a_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut a = parser.lbx(0,_a_); Binop(\"&&\",a,b) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 34, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_53_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut x = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; Newobj(x) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 35, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_31_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Stat<'lt> {\n\nlet mut _item6_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item5_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut args = if let RetTypeEnum::Enumvariant_14(_x_14)=parser.popstack().value { _x_14 } else {<Vec<LBox<Expr<'lt>>>>::default()}; let mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut m = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _obj_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut obj = parser.lbx(0,_obj_); Callstat(obj,m,args)}\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 36, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_41_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _b_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut b = parser.lbx(2,_b_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _a_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut a = parser.lbx(0,_a_); Binop(\"<\",a,b) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 37, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_29_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Stat<'lt> {\n\nlet mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _e_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut e = parser.lbx(2,_e_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut v = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; Assignst(v,e) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 38, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_27_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Stat<'lt> {\n\nlet mut _a_ = if let RetTypeEnum::Enumvariant_11(_x_11)=parser.popstack().value { _x_11 } else {<Stat<'lt>>::default()}; let mut a = parser.lbx(4,_a_); let mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _c_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut c = parser.lbx(2,_c_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; Ifstat(c,a,parser.lb(Nopst)) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 39, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_42_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _b_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut b = parser.lbx(2,_b_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _a_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut a = parser.lbx(0,_a_); Binop(\"%\",a,b) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 40, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_50_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut x = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; Var(x) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 41, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_48_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut s = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; Strlit(s) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 42, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_51_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; Thisptr }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 43, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_33_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Stat<'lt> {\n\nlet mut v = if let RetTypeEnum::Enumvariant_7(_x_7)=parser.popstack().value { _x_7 } else {<VarDec<'lt>>::default()}; Vardecst(v.dname,v.dtype,parser.lb(v.initval))}\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 44, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_32_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Stat<'lt> {\n\nlet mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _e_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut e = parser.lbx(1,_e_); let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; Returnst(e) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 45, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_43_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _b_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut b = parser.lbx(2,_b_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _a_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut a = parser.lbx(0,_a_); Binop(\"==\",a,b) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 46, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_40_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _a_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut a = parser.lbx(1,_a_); let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; Notexp(a) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 47, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_36_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _b_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut b = parser.lbx(2,_b_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _a_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut a = parser.lbx(0,_a_); Binop(\"/\",a,b) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 48, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_49_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut b = if let RetTypeEnum::Enumvariant_2(_x_2)=parser.popstack().value { _x_2 } else {<bool>::default()}; Bool(b) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 49, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_52_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _item4_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _s_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut s = parser.lbx(3,_s_); let mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; Newarray(s) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 50, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_54_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut e = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; e }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 51, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_45_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut field = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _obj_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut obj = parser.lbx(0,_obj_); Field(field,obj) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 52, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_46_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _item5_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut args = if let RetTypeEnum::Enumvariant_14(_x_14)=parser.popstack().value { _x_14 } else {<Vec<LBox<Expr<'lt>>>>::default()}; let mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut f = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<&'lt str>::default()}; let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _obj_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut obj = parser.lbx(0,_obj_); Callexp(obj,f,args) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 53, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_35_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _b_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut b = parser.lbx(2,_b_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _a_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut a = parser.lbx(0,_a_); Binop(\"+\",a,b) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 54, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_26_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Stat<'lt> {\n\nlet mut _b_ = if let RetTypeEnum::Enumvariant_11(_x_11)=parser.popstack().value { _x_11 } else {<Stat<'lt>>::default()}; let mut b = parser.lbx(6,_b_); let mut _item5_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _a_ = if let RetTypeEnum::Enumvariant_11(_x_11)=parser.popstack().value { _x_11 } else {<Stat<'lt>>::default()}; let mut a = parser.lbx(4,_a_); let mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _c_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut c = parser.lbx(2,_c_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; Ifstat(c, a, b) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 55, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_34_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _b_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut b = parser.lbx(2,_b_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _a_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut a = parser.lbx(0,_a_); Binop(\"*\",a,b) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 56, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_39_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _b_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut b = parser.lbx(2,_b_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _a_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut a = parser.lbx(0,_a_); Binop(\"OROR\",a,b) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 57, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_47_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut i = if let RetTypeEnum::Enumvariant_3(_x_3)=parser.popstack().value { _x_3 } else {<i32>::default()}; Int(i) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 58, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_28_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Stat<'lt> {\n\nlet mut _s_ = if let RetTypeEnum::Enumvariant_11(_x_11)=parser.popstack().value { _x_11 } else {<Stat<'lt>>::default()}; let mut s = parser.lbx(4,_s_); let mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _c_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut c = parser.lbx(2,_c_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; Whilest(c,s) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 59, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_44_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _item3_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _i_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut i = parser.lbx(2,_i_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _a_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut a = parser.lbx(0,_a_); Binop(\"[]\",a,i) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 60, "score": 253284.3496815837 }, { "content": "fn _semaction_rule_37_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Expr<'lt> {\n\nlet mut _b_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut b = parser.lbx(2,_b_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; let mut _a_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut a = parser.lbx(0,_a_); Binop(\"-\",a,b) }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 61, "score": 253284.3496815837 }, { "content": "//////////////\n\n///////////////// non-binary version (no augmentation) //////////////////\n\npub fn write_verbose(&self, filename:&str)->Result<(),std::io::Error>\n\n {\n\n let mut fd = File::create(filename)?;\n\n write!(fd,\"//Parser generated by rustlr\\n\n\n#![allow(unused_variables)]\n\n#![allow(non_snake_case)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(unused_parens)]\n\n#![allow(unused_mut)]\n\n#![allow(unused_assignments)]\n\nextern crate rustlr;\n\nuse rustlr::{{RuntimeParser,RProduction,Stateaction}};\\n\")?;\n\n\n\n write!(fd,\"{}\\n\",&self.Gmr.Extras)?; // use clauses\n\n let ref absyn = self.Gmr.Absyntype;\n\n let ref extype = self.Gmr.Externtype;\n\n write!(fd,\"pub fn make_parser() -> RuntimeParser<{},{}>\",absyn,extype)?; \n\n write!(fd,\"\\n{{\\n\")?;\n\n // write code to pop stack, assign labels to variables.\n\n write!(fd,\" let mut parser1:RuntimeParser<{},{}> = RuntimeParser::new({},{});\\n\",absyn,extype,self.Gmr.Rules.len(),self.States.len())?;\n", "file_path": "src/runtime_parser.rs", "rank": 62, "score": 250187.62869713444 }, { "content": "pub fn parse_train_with<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>, lexer:&mut calcenumlexer<'src_lt>, parserpath:&str) -> Result<Expr<'src_lt>,Expr<'src_lt>>\n\n{\n\n if let RetTypeEnum::Enumvariant_0(_xres_) = parser.parse_train(lexer,parserpath) {\n\n if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}\n\n } else { Err(<Expr<'src_lt>>::default())}\n\n}//parse_train_with public function\n\n\n\n//Enum for return values \n\npub enum RetTypeEnum<'src_lt> {\n\n Enumvariant_2(&'src_lt str),\n\n Enumvariant_1(i64),\n\n Enumvariant_0(Expr<'src_lt>),\n\n}\n\nimpl<'src_lt> Default for RetTypeEnum<'src_lt> { fn default()->Self {RetTypeEnum::Enumvariant_0(<Expr<'src_lt>>::default())} }\n\n\n\n\n\n// Lexical Scanner using RawToken and StrTokenizer\n\npub struct calcenumlexer<'t> {\n\n stk: StrTokenizer<'t>,\n\n keywords: HashSet<&'static str>,\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 63, "score": 241461.28749008704 }, { "content": "fn _semaction_rule_55_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<Expr<'lt>>> {\n\n Vec::new() }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 64, "score": 239156.09055836784 }, { "content": "fn _semaction_rule_24_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<Stat<'lt>>> {\n\nlet mut _s_ = if let RetTypeEnum::Enumvariant_11(_x_11)=parser.popstack().value { _x_11 } else {<Stat<'lt>>::default()}; let mut s = parser.lbx(1,_s_); let mut sv = if let RetTypeEnum::Enumvariant_12(_x_12)=parser.popstack().value { _x_12 } else {<Vec<LBox<Stat<'lt>>>>::default()}; sv.push(s); sv }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 65, "score": 239156.09055836784 }, { "content": "fn _semaction_rule_12_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<Declaration<'lt>>> {\n\nlet mut m = if let RetTypeEnum::Enumvariant_8(_x_8)=parser.popstack().value { _x_8 } else {<MethodDec<'lt>>::default()}; let mut ds = if let RetTypeEnum::Enumvariant_9(_x_9)=parser.popstack().value { _x_9 } else {<Vec<LBox<Declaration<'lt>>>>::default()}; ds.push(parser.lbx(1,Mdec(m))); ds }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 66, "score": 239156.09055836784 }, { "content": "fn _semaction_rule_11_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<Declaration<'lt>>> {\n\nlet mut v = if let RetTypeEnum::Enumvariant_7(_x_7)=parser.popstack().value { _x_7 } else {<VarDec<'lt>>::default()}; let mut ds = if let RetTypeEnum::Enumvariant_9(_x_9)=parser.popstack().value { _x_9 } else {<Vec<LBox<Declaration<'lt>>>>::default()}; ds.push(parser.lbx(1,Vdec(v))); ds }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 67, "score": 239156.09055836784 }, { "content": "fn _semaction_rule_57_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<Expr<'lt>>> {\n\n Vec::new() }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 68, "score": 239156.09055836784 }, { "content": "fn _semaction_rule_10_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<Declaration<'lt>>> {\n\n Vec::new() }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 69, "score": 239156.09055836784 }, { "content": "fn _semaction_rule_56_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<Expr<'lt>>> {\n\nlet mut er = if let RetTypeEnum::Enumvariant_14(_x_14)=parser.popstack().value { _x_14 } else {<Vec<LBox<Expr<'lt>>>>::default()}; let mut _e_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut e = parser.lbx(0,_e_); er.push(e); er }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 70, "score": 239156.09055836784 }, { "content": "fn _semaction_rule_23_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<Stat<'lt>>> {\n\n Vec::new() }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 71, "score": 239156.09055836784 }, { "content": "fn _semaction_rule_58_<'lt>(parser:&mut ZCParser<RetTypeEnum<'lt>,Program<'lt>>) -> Vec<LBox<Expr<'lt>>> {\n\nlet mut er = if let RetTypeEnum::Enumvariant_14(_x_14)=parser.popstack().value { _x_14 } else {<Vec<LBox<Expr<'lt>>>>::default()}; let mut _e_ = if let RetTypeEnum::Enumvariant_13(_x_13)=parser.popstack().value { _x_13 } else {<Expr<'lt>>::default()}; let mut e = parser.lbx(1,_e_); let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Program<'lt>>::default()}; er.push(e); er }\n\n\n", "file_path": "examples/minijava/src/mjenumparser.rs", "rank": 72, "score": 239156.09055836784 }, { "content": "// weak head reduction, CBV\n\npub fn weak_beta(&mut self, t:&Term, defs:&HashMap<str8,Term>)\n\n{\n\n if self.trace>0 { println!(\"weak {}\",t.to_string()); }\n\n let mut t2 = t.clone();\n\n while expand(&mut t2,&defs) {\n\n if self.trace>0 {println!(\"= {}\",t2.to_string());}\n\n }\n\n while self.weak_beta1(&mut t2,defs) {\n\n if self.trace>0 {println!(\" => {}\",t2.to_string());}\n\n }\n\n}//weak_beta\n", "file_path": "examples/lambdascript/src/untyped.rs", "rank": 73, "score": 238413.10625232564 }, { "content": "// takes grammar file prefix as command line arg\n\npub fn rustler(grammarname:&str, option:&str) {\n\n let mut gram1 = Grammar::new();\n\n let grammarfile = format!(\"{}.grammar\",&grammarname);\n\n\n\n let lalr = match option {\n\n \"lalr\" | \"LALR\" => true, \n\n \"lr1\" | \"LR1\" => false,\n\n _ => {println!(\"Option {} not supported, defaulting to full LR1 generation\",option); false},\n\n };\n\n \n\n if TRACE>1 {println!(\"parsing grammar from {}\",grammarfile);}\n\n gram1.parse_grammar(&grammarfile);\n\n if TRACE>2 {println!(\"computing Nullable set\");}\n\n gram1.compute_NullableRf();\n\n if TRACE>2 {println!(\"computing First sets\");}\n\n gram1.compute_FirstIM();\n\n if gram1.name.len()<2 {gram1.name = grammarname.to_owned(); }\n\n let gramname = gram1.name.clone();\n\n /*\n\n for nt in gram1.First.keys() {\n", "file_path": "src/bunch.rs", "rank": 74, "score": 226263.9825231256 }, { "content": "pub fn parse_with<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>, lexer:&mut calcenumlexer<'src_lt>) -> Result<Expr<'src_lt>,Expr<'src_lt>>\n\n{\n\n if let RetTypeEnum::Enumvariant_0(_xres_) = parser.parse(lexer) {\n\n if !parser.error_occurred() {Ok(_xres_)} else {Err(_xres_)}\n\n } else { Err(<Expr<'src_lt>>::default())}\n\n}//parse_with public function\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 75, "score": 204352.69468687847 }, { "content": "pub fn stateclosure(mut state:LR1State, Gmr:&Grammar)\n\n -> LR1State // consumes and returns new state\n\n{\n\n //algorithm is like that of a spanning tree\n\n let mut closed =LR1State::new(); // closed set,\n\n closed.index = state.index;\n\n while state.items.len()>0\n\n { \n\n //if TRACE>2 {printstate(&state,Gmr);}\n\n let nextitem = state.items.iter().next().unwrap().clone();\n\n let item = state.items.take(&nextitem).unwrap();\n\n let (ri,pi,la) = (item.ri,item.pi,&item.la);\n\n let rulei = &Gmr.Rules[ri]; //.get(ri).unwrap();\n\n let lhs = &rulei.lhs.sym;\n\n closed.insert(nextitem,lhs); // place item in interior\n\n if pi<rulei.rhs.len() && !rulei.rhs[pi].terminal {\n\n let nti = &rulei.rhs[pi]; // non-terminal after dot (Gsym)\n\n let lookaheads=&Gmr.Firstseq(&rulei.rhs[pi+1..],la); \n\n for rulent in Gmr.Rulesfor.get(&nti.sym).unwrap() //rulent:usize\n\n {\n", "file_path": "src/bunch.rs", "rank": 76, "score": 198991.9668089045 }, { "content": "pub fn printrulela(ri:usize, Gmr:&Grammar, la:&str)\n\n{\n\n if ri>=Gmr.Rules.len() {println!(\"printing invalid rule number {}\",ri); return;}\n\n let ref lhs_sym = Gmr.Rules[ri].lhs.sym;\n\n let ref rhs = Gmr.Rules[ri].rhs;\n\n print!(\" (Rule {}) {} --> \",ri,lhs_sym);\n\n for gsym in rhs { print!(\"{} \",gsym.sym); }\n\n println!(\" , lookahead {}\",la);\n\n}\n", "file_path": "src/bunch.rs", "rank": 77, "score": 198618.9612659472 }, { "content": "pub fn stateclosure(mut state:LR1State, Gmr:&Grammar)\n\n -> LR1State // consumes and returns new state\n\n{\n\n //algorithm is like that of a spanning tree\n\n let mut closed =LR1State::new(); // closed set,\n\n closed.index = state.index;\n\n while state.items.len()>0\n\n { \n\n //if TRACE>2 {printstate(&state,Gmr);}\n\n let nextitem = state.items.iter().next().unwrap().clone();\n\n let item = state.items.take(&nextitem).unwrap();\n\n let (ri,pi,la) = (item.ri,item.pi,&item.la);\n\n let rulei = &Gmr.Rules[ri]; //.get(ri).unwrap();\n\n let lhs = &rulei.lhs.sym;\n\n closed.insert(nextitem,lhs); // place item in interior\n\n /*\n\n // insert terminals into expected set for error reporting\n\n if pi<rulei.rhs.len() && rulei.rhs[pi].terminal { // add to expected\n\n closed.expected.insert(rulei.rhs[pi].sym.clone());\n\n }\n", "file_path": "src/lr_statemachine.rs", "rank": 78, "score": 196511.828754256 }, { "content": "pub fn printrulela(ri:usize, Gmr:&Grammar, la:&str)\n\n{\n\n if ri>=Gmr.Rules.len() {println!(\"printing invalid rule number {}\",ri); return;}\n\n let ref lhs_sym = Gmr.Rules[ri].lhs.sym;\n\n let ref rhs = Gmr.Rules[ri].rhs;\n\n print!(\" (Rule {}) {} --> \",ri,lhs_sym);\n\n for gsym in rhs { print!(\"{} \",gsym.sym); }\n\n println!(\" , lookahead {}\",la);\n\n}\n", "file_path": "src/lr_statemachine.rs", "rank": 79, "score": 196124.15166643073 }, { "content": "fn weak_beta1(&mut self, t:&mut Term, defs:&HashMap<str8,Term>) -> bool\n\n{ \n\n match t {\n\n App(a,b) => {\n\n if let Abs(x,body) = &**a {\n\n // reduce b first:\n\n self.weak_beta1(b,defs) || \n\n self.beta1(t,defs) \n\n// let wt =self.weak_beta(t,defs); // do it again\n\n// wb || bt || wt\n\n }//redex found\n\n else {self.weak_beta1(a,defs)}\n\n },\n\n _ => {false},\n\n }//match\n\n}//weak beta\n\n\n\n}//impl BetaReducer\n\n\n\n//////////////\n\n\n", "file_path": "examples/lambdascript/src/untyped.rs", "rank": 80, "score": 193119.2363880844 }, { "content": "pub fn make_parser<'src_lt>() -> ZCParser<RetTypeEnum<'src_lt>,i64>\n\n{\n\n let mut parser1:ZCParser<RetTypeEnum<'src_lt>,i64> = ZCParser::new(12,27);\n\n let mut rule = ZCRProduction::<RetTypeEnum<'src_lt>,i64>::new_skeleton(\"start\");\n\n rule = ZCRProduction::<RetTypeEnum<'src_lt>,i64>::new_skeleton(\"E\");\n\n rule.Ruleaction = |parser|{ RetTypeEnum::Enumvariant_0(_semaction_rule_0_(parser)) };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<RetTypeEnum<'src_lt>,i64>::new_skeleton(\"E\");\n\n rule.Ruleaction = |parser|{ RetTypeEnum::Enumvariant_0(_semaction_rule_1_(parser)) };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<RetTypeEnum<'src_lt>,i64>::new_skeleton(\"E\");\n\n rule.Ruleaction = |parser|{ RetTypeEnum::Enumvariant_0(_semaction_rule_2_(parser)) };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<RetTypeEnum<'src_lt>,i64>::new_skeleton(\"E\");\n\n rule.Ruleaction = |parser|{ RetTypeEnum::Enumvariant_0(_semaction_rule_3_(parser)) };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<RetTypeEnum<'src_lt>,i64>::new_skeleton(\"E\");\n\n rule.Ruleaction = |parser|{ RetTypeEnum::Enumvariant_0(_semaction_rule_4_(parser)) };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<RetTypeEnum<'src_lt>,i64>::new_skeleton(\"E\");\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 81, "score": 181698.51703106007 }, { "content": "fn get<'t>(v:&str,mut env:&'t Rc<Binding>) -> Option<&'t Closure>\n\n{\n\n while v!=&env.var {\n\n if let None = env.rest {return None;} else {env = &env.rest.unwrap();}\n\n }//while\n\n return Some(&env.cls);\n\n}\n\n\n", "file_path": "examples/lambdascript/src/abstmachine.rs", "rank": 82, "score": 179031.34991177788 }, { "content": "fn get<'t>(v:&str,mut env:&'t Bindings) -> Option<&'t Rc<Closure>>\n\n{\n\n while let Some(rcb) = env {\n\n if v==&rcb.var { return Some(&rcb.cl); }\n\n env = &rcb.rest;\n\n }\n\n return None;\n\n}\n\n\n\npub struct Machine // Krivine's Abstract Machine\n\n{\n\n stack: Vec<Rc<Closure>>,\n\n counter: usize, // for alpha-conversion (only used when printing)\n\n}\n\n\n\nimpl Machine\n\n{\n\n fn new(t0:LRc<Lamterm>) -> Machine\n\n { \n\n Machine {\n", "file_path": "examples/lambdascript/src/abstmachine.rs", "rank": 83, "score": 179031.34991177788 }, { "content": "fn load_extras<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>)\n\n{\n\n}//end of load_extras: don't change this line as it affects augmentation\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 84, "score": 175252.00308626017 }, { "content": "fn push<'t>(var:&'t str, val:i64, env:&Rc<Env<'t>>) -> Rc<Env<'t>>\n\n{ Rc::new(Cons(var,val,Rc::clone(env))) }\n", "file_path": "examples/calc4/src/exprtrees.rs", "rank": 85, "score": 175221.06015412993 }, { "content": "pub fn new_parser() -> ZCParser<Construct,Construct>\n\n{\n\n let mut parser1:ZCParser<Construct,Construct> = ZCParser::new(60,147);\n\n let mut rule = ZCRProduction::<Construct,Construct>::new_skeleton(\"start\");\n\n rule = ZCRProduction::<Construct,Construct>::new_skeleton(\"Program\");\n\n rule.Ruleaction = |parser|{ let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); \n\n if let (Classes(cs),Maincl(mc),)=(_item1_.value,_item0_.value,) { Program(parser.lb(mc),cs) } else {parser.bad_pattern(\"(Classes(cs),Maincl(mc),)\")} };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<Construct,Construct>::new_skeleton(\"MainCl\");\n\n rule.Ruleaction = |parser|{ let mut _item16_ = parser.popstack(); let mut _item15_ = parser.popstack(); let mut _item14_ = parser.popstack(); let mut _item13_ = parser.popstack(); let mut _item12_ = parser.popstack(); let mut _item11_ = parser.popstack(); let mut _item10_ = parser.popstack(); let mut _item9_ = parser.popstack(); let mut _item8_ = parser.popstack(); let mut _item7_ = parser.popstack(); let mut _item6_ = parser.popstack(); let mut _item5_ = parser.popstack(); let mut _item4_ = parser.popstack(); let mut _item3_ = parser.popstack(); let mut _item2_ = parser.popstack(); let mut _item1_ = parser.popstack(); let mut _item0_ = parser.popstack(); \n\n if let (Stms(thebody),Id(an),Id(cn),)=(_item14_.value,_item11_.value,_item1_.value,) { \n\n Maincl(Mainclass{classname:cn,\n\n argvname:an,\n\n body: Blockst(thebody),\n\n\t })\n\n } else {parser.bad_pattern(\"(Stms(thebody),Id(an),Id(cn),)\")} };\n\n parser1.Rules.push(rule);\n\n rule = ZCRProduction::<Construct,Construct>::new_skeleton(\"ClassDecl\");\n\n rule.Ruleaction = |parser|{ Classes(Vec::new()) };\n\n parser1.Rules.push(rule);\n", "file_path": "examples/minijava/src/zcmjparser.rs", "rank": 86, "score": 168321.7369205432 }, { "content": "pub fn eval_prog(prog:&Vec<LBox<Term>>, defs:&mut HashMap<str8,Term>)\n\n{\n\n let mut reducer = BetaReducer::new();\n\n //let mut defs = HashMap::<str8,Term>::new();\n\n for line in prog\n\n {\n\n match &**line {\n\n Def(weak,x,xdef) => {\n\n let mut xdef2 = unbox!(xdef).clone(); //*xdef.exp.clone();\n\n if *weak {\n\n reducer.trace=0; reducer.cx=0;\n\n reducer.reduce_to_norm(&mut xdef2,defs); \n\n //reducer.weak_beta(&mut xdef2,defs);\n\n }\n\n defs.insert(*x,xdef2);\n\n },\n\n Weak(t) => {\n\n reducer.trace=1; reducer.cx=0;\n\n reducer.weak_beta(t,defs);\n\n println!(); \n", "file_path": "examples/lambdascript/src/untyped.rs", "rank": 87, "score": 168130.48945354595 }, { "content": "fn _semaction_rule_8_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut e = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; e }\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 88, "score": 164883.19368678733 }, { "content": "fn _semaction_rule_9_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut n = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; Seq(vec![parser.lbx(0,n)]) }\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 89, "score": 164883.19368678733 }, { "content": "fn _semaction_rule_10_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut e = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; \n\n if let (Seq(mut v),)=(_item0_,) { \n\n v.push(parser.lbx(1,e));\n\n Seq(v)\n\n } else {parser.report(\"(Seq(mut v),)\"); <Expr<'src_lt>>::default()} }\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 90, "score": 164883.19368678733 }, { "content": "fn _semaction_rule_7_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut _e_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut e = parser.lbx(1,_e_); let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; Negative(e) }\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 91, "score": 164883.19368678733 }, { "content": "fn _semaction_rule_3_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut _e2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut e2 = parser.lbx(2,_e2_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut _e1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut e1 = parser.lbx(0,_e1_); Plus(e1,e2) }\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 92, "score": 164883.19368678733 }, { "content": "fn _semaction_rule_0_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut m = if let RetTypeEnum::Enumvariant_1(_x_1)=parser.popstack().value { _x_1 } else {<i64>::default()}; Val(m) }\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 93, "score": 164883.19368678733 }, { "content": "fn _semaction_rule_11_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; <Expr<'src_lt>>::default()}\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 94, "score": 164883.19368678733 }, { "content": "fn _semaction_rule_1_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut s = if let RetTypeEnum::Enumvariant_2(_x_2)=parser.popstack().value { _x_2 } else {<&'src_lt str>::default()}; Var(s) }\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 95, "score": 164883.19368678733 }, { "content": "fn _semaction_rule_4_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut _e2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut e2 = parser.lbx(2,_e2_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut _e1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut e1 = parser.lbx(0,_e1_); Minus(e1,e2) }\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 96, "score": 164883.19368678733 }, { "content": "fn _semaction_rule_6_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut e2 = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut e1 = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; Times(parser.lbx(0,e1),parser.lbx(2,e2))}\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 97, "score": 164883.19368678733 }, { "content": "fn _semaction_rule_2_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut _b_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut b = parser.lbx(5,_b_); let mut _item4_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut _e_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut e = parser.lbx(3,_e_); let mut _item2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut _vx_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut vx = parser.lbx(1,_vx_); let mut _item0_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; \n\n if let (Var(x),)=(&mut *vx,) { Letexp(x,e,b)} else {parser.report(\"(Var(x),)\"); <Expr<'src_lt>>::default()} }\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 98, "score": 164883.19368678733 }, { "content": "fn _semaction_rule_5_<'src_lt>(parser:&mut ZCParser<RetTypeEnum<'src_lt>,i64>) -> Expr<'src_lt> {\n\nlet mut _e2_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut e2 = parser.lbx(2,_e2_); let mut _item1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut _e1_ = if let RetTypeEnum::Enumvariant_0(_x_0)=parser.popstack().value { _x_0 } else {<Expr<'src_lt>>::default()}; let mut e1 = parser.lbx(0,_e1_); Divide(e1,e2)}\n\n\n", "file_path": "examples/calc4/src/calcenumparser.rs", "rank": 99, "score": 164883.19368678733 } ]
Rust
src/main.rs
zxrs/pinion
95a7e624b40040e4b208731bb6c3e06b6950612f
#![windows_subsystem = "windows"] use anyhow::{ensure, Error, Result}; use image::{self, imageops, GenericImageView}; use std::char::{decode_utf16, REPLACEMENT_CHARACTER}; use std::env; use std::mem; use std::ptr; use std::slice; use winapi::{ ctypes::c_void, shared::{ minwindef::{HIWORD, LOWORD, LPARAM, LRESULT, TRUE, UINT, WPARAM}, windef::{HFONT, HMENU, HWND, RECT}, }, um::{ commdlg::{GetOpenFileNameW, OFN_FILEMUSTEXIST, OPENFILENAMEW}, wingdi::{ BitBlt, CreateCompatibleBitmap, CreateCompatibleDC, CreateFontW, DeleteDC, DeleteObject, SelectObject, SetDIBits, BITMAPINFO, BITMAPINFOHEADER, BI_RGB, CLIP_DEFAULT_PRECIS, DEFAULT_CHARSET, DEFAULT_PITCH, DEFAULT_QUALITY, DIB_RGB_COLORS, FF_DONTCARE, OUT_DEFAULT_PRECIS, SRCCOPY, }, winuser::{ BeginPaint, CreateWindowExW, DefWindowProcW, DispatchMessageW, EndPaint, GetMessageW, GetSysColorBrush, InvalidateRect, LoadCursorW, LoadIconW, MessageBoxW, PostQuitMessage, RegisterClassW, SendMessageW, SetWindowTextW, ShowWindow, TranslateMessage, UpdateWindow, BN_CLICKED, BS_PUSHBUTTON, COLOR_MENUBAR, CW_USEDEFAULT, IDI_APPLICATION, MB_OK, MSG, PAINTSTRUCT, SW_SHOW, WM_COMMAND, WM_CREATE, WM_DESTROY, WM_PAINT, WM_SETFONT, WNDCLASSW, WS_CAPTION, WS_CHILD, WS_OVERLAPPED, WS_SYSMENU, WS_VISIBLE, }, }, }; static mut H_WINDOW: HWND = ptr::null_mut(); static mut H_FONT: HFONT = ptr::null_mut(); static mut BUF: Vec<u8> = Vec::new(); static mut DATA_LEN: usize = 0; static mut WIDTH: i32 = 0; static mut HEIGHT: i32 = 0; const ID_OPEN_BUTTON: i32 = 2100; fn main() -> Result<()> { unsafe { let class_name = l("pinion_window_class"); let wnd_class = WNDCLASSW { style: 0, lpfnWndProc: Some(window_proc), cbClsExtra: 0, cbWndExtra: 0, hInstance: ptr::null_mut(), hIcon: LoadIconW(ptr::null_mut(), IDI_APPLICATION), hCursor: LoadCursorW(ptr::null_mut(), IDI_APPLICATION), hbrBackground: GetSysColorBrush(COLOR_MENUBAR), lpszMenuName: ptr::null_mut(), lpszClassName: class_name.as_ptr(), }; RegisterClassW(&wnd_class); let title = format!("{} v{}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); H_WINDOW = CreateWindowExW( 0, class_name.as_ptr(), l(&title).as_ptr(), WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_VISIBLE, CW_USEDEFAULT, CW_USEDEFAULT, 656, 551, ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ); ensure!(!H_WINDOW.is_null(), "CreateWindowExW failed."); BUF.reserve(640 * 480 * 3); ShowWindow(H_WINDOW, SW_SHOW); UpdateWindow(H_WINDOW); let mut msg = init::<MSG>(); loop { if GetMessageW(&mut msg, ptr::null_mut(), 0, 0) == 0 { break; } TranslateMessage(&msg); DispatchMessageW(&msg); } } Ok(()) } unsafe extern "system" fn window_proc( h_wnd: HWND, msg: UINT, w_param: WPARAM, l_param: LPARAM, ) -> LRESULT { match msg { WM_CREATE => create(h_wnd), WM_COMMAND => command(h_wnd, w_param), WM_PAINT => { if DATA_LEN > 0 { paint(h_wnd) } else { return DefWindowProcW(h_wnd, msg, w_param, l_param); } } WM_DESTROY => { DeleteObject(H_FONT as *mut c_void); PostQuitMessage(0); Ok(()) } _ => return DefWindowProcW(h_wnd, msg, w_param, l_param), } .map_err(msg_box) .ok(); 0 } unsafe fn create(h_wnd: HWND) -> Result<()> { create_font()?; create_button(h_wnd)?; Ok(()) } unsafe fn create_font() -> Result<()> { H_FONT = CreateFontW( 18, 0, 0, 0, 0, 0, 0, 0, DEFAULT_CHARSET, OUT_DEFAULT_PRECIS, CLIP_DEFAULT_PRECIS, DEFAULT_QUALITY, DEFAULT_PITCH | FF_DONTCARE, l("メイリオ").as_ptr(), ); ensure!(!H_FONT.is_null(), "CreateFontW failed."); Ok(()) } unsafe fn create_button(h_wnd: HWND) -> Result<()> { let h_button = CreateWindowExW( 0, l("BUTTON").as_ptr(), l("Open").as_ptr(), WS_CHILD | WS_VISIBLE | BS_PUSHBUTTON, 4, 4, 80, 24, h_wnd, ID_OPEN_BUTTON as HMENU, ptr::null_mut(), ptr::null_mut(), ); ensure!(!h_button.is_null(), "CreateWindowExW BUTTON failed.",); SendMessageW(h_button, WM_SETFONT, H_FONT as WPARAM, 0); Ok(()) } unsafe fn command(h_wnd: HWND, w_param: WPARAM) -> Result<()> { let msg = HIWORD(w_param as u32); let id = LOWORD(w_param as u32) as i32; if msg == BN_CLICKED { if id == ID_OPEN_BUTTON { let file_path = open_dialog(h_wnd)?; read_image(&file_path)?; } } Ok(()) } unsafe fn read_image(file_path: &str) -> Result<()> { let img = image::open(file_path)?; let img = if img.width() > 640 || img.height() > 480 { let new_size = if img.width() as f32 / img.height() as f32 > 1.333 { 640 } else { if img.width() > img.height() { (480.0 / img.height() as f32 * img.width() as f32) as u32 } else { 480 } }; img.resize(new_size, new_size, imageops::Lanczos3) } else { img }; WIDTH = img.width() as i32; HEIGHT = img.height() as i32; let bgr = img.into_bgr(); ensure!(bgr.len() <= 640 * 480 * 3, "Invalid data length."); let remain = (3 * WIDTH as usize) % 4; if remain > 0 { let chunk_size = 3 * WIDTH as usize; let line_bytes_len = chunk_size + 4 - remain; DATA_LEN = line_bytes_len * HEIGHT as usize; let mut p = BUF.as_mut_ptr(); bgr.chunks(chunk_size).for_each(|c| { ptr::copy_nonoverlapping(c.as_ptr(), p, chunk_size); p = p.add(line_bytes_len); }); } else { DATA_LEN = (WIDTH * HEIGHT * 3) as usize; ptr::copy_nonoverlapping(bgr.as_ptr(), BUF.as_mut_ptr(), DATA_LEN); }; let rc = RECT { top: 32, left: 0, right: 640, bottom: 512, }; InvalidateRect(H_WINDOW, &rc, TRUE); SetWindowTextW(H_WINDOW, l(file_path).as_ptr()); Ok(()) } unsafe fn open_dialog(h_wnd: HWND) -> Result<String> { const MAX_PATH: u32 = 260; let mut buf = [0u16; MAX_PATH as usize]; let filter = l("Image file\0*.jpg;*.png;*.gif;*.bmp\0"); let title = l("Choose a image file"); let mut ofn = zeroed::<OPENFILENAMEW>(); ofn.lStructSize = mem::size_of::<OPENFILENAMEW>() as u32; ofn.lpstrFilter = filter.as_ptr(); ofn.lpstrTitle = title.as_ptr(); ofn.lpstrFile = buf.as_mut_ptr(); ofn.nMaxFile = MAX_PATH; ofn.Flags = OFN_FILEMUSTEXIST; ofn.hwndOwner = h_wnd; ensure!(GetOpenFileNameW(&mut ofn) != 0, "Cannot get file path."); let slice = slice::from_raw_parts(ofn.lpstrFile, MAX_PATH as usize); Ok(decode(slice)) } unsafe fn paint(h_wnd: HWND) -> Result<()> { let mut ps = init::<PAINTSTRUCT>(); let hdc = BeginPaint(h_wnd, &mut ps); let mut bi = zeroed::<BITMAPINFO>(); bi.bmiHeader = zeroed::<BITMAPINFOHEADER>(); bi.bmiHeader.biSize = mem::size_of::<BITMAPINFOHEADER>() as u32; bi.bmiHeader.biWidth = WIDTH; bi.bmiHeader.biHeight = -HEIGHT; bi.bmiHeader.biPlanes = 1; bi.bmiHeader.biBitCount = 24; bi.bmiHeader.biSizeImage = DATA_LEN as u32; bi.bmiHeader.biCompression = BI_RGB; let h_bmp = CreateCompatibleBitmap(hdc, WIDTH, HEIGHT); SetDIBits( hdc, h_bmp, 0, HEIGHT as u32, BUF.as_ptr() as *const c_void, &bi, DIB_RGB_COLORS, ); let h_mdc = CreateCompatibleDC(hdc); SelectObject(h_mdc, h_bmp as *mut c_void); let padding_left = (640 - WIDTH) / 2; let padding_top = (480 - HEIGHT) / 2; BitBlt( hdc, padding_left, padding_top + 32, WIDTH, HEIGHT, h_mdc, 0, 0, SRCCOPY, ); DeleteDC(h_mdc); DeleteObject(h_bmp as *mut c_void); EndPaint(h_wnd, &ps); Ok(()) } fn msg_box(e: Error) { unsafe { MessageBoxW( H_WINDOW, l(&e.to_string()).as_ptr(), l("Error").as_ptr(), MB_OK, ) }; } fn l(source: &str) -> Vec<u16> { source.encode_utf16().chain(Some(0)).collect() } fn decode(source: &[u16]) -> String { decode_utf16(source.iter().take_while(|&n| n != &0).cloned()) .map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)) .collect() } unsafe fn init<T>() -> T { mem::MaybeUninit::<T>::uninit().assume_init() } unsafe fn zeroed<T>() -> T { mem::MaybeUninit::<T>::zeroed().assume_init() }
#![windows_subsystem = "windows"] use anyhow::{ensure, Error, Result}; use image::{self, imageops, GenericImageView}; use std::char::{decode_utf16, REPLACEMENT_CHARACTER}; use std::env; use std::mem; use std::ptr; use std::slice; use winapi::{ ctypes::c_void, shared::{ minwindef::{HIWORD, LOWORD, LPARAM, LRESULT, TRUE, UINT, WPARAM}, windef::{HFONT, HMENU, HWND, RECT}, }, um::{ commdlg::{GetOpenFileNameW, OFN_FILEMUSTEXIST, OPENFILENAMEW}, wingdi::{ BitBlt, CreateCompatibleBitmap, CreateCompatibleDC, CreateFontW, DeleteDC, DeleteObject, SelectObject, SetDIBits, BITMAPINFO, BITMAPINFOHEADER, BI_RGB, CLIP_DEFAULT_PRECIS, DEFAULT_CHARSET, DEFAULT_PITCH, DEFAULT_QUALITY, DIB_RGB_COLORS, FF_DONTCARE, OUT_DEFAULT_PRECIS, SRCCOPY, }, winuser::{ BeginPaint, CreateWindowExW, DefWindowProcW, DispatchMessageW, EndPaint, GetMessageW, GetSysColorBrush, InvalidateRect, LoadCursorW, LoadIconW, MessageBoxW, PostQuitMessage, RegisterClassW, SendMessageW, SetWindowTextW, ShowWindow, TranslateMessage, UpdateWindow, BN_CLICKED, BS_PUSHBUTTON, COLOR_MENUBAR, CW_USEDEFAULT, IDI_APPLICATION, MB_OK, MSG, PAINTSTRUCT, SW_SHOW, WM_COMMAND, WM_CREATE, WM_DESTROY, WM_PAINT, WM_SETFONT, WNDCLASSW, WS_CAPTION, WS_CHILD, WS_OVERLAPPED, WS_SYSMENU, WS_VISIBLE, }, }, }; static mut H_WINDOW: HWND = ptr::null_mut(); static mut H_FONT: HFONT = ptr::null_mut(); static mut BUF: Vec<u8> = Vec::new(); static mut DATA_LEN: usize = 0; static mut WIDTH: i32 = 0; static mut HEIGHT: i32 = 0; const ID_OPEN_BUTTON: i32 = 2100; fn main() -> Result<()> { unsafe { let class_name = l("pinion_window_class"); let wnd_class = WNDCLASSW { style: 0, lpfnWndProc: Some(window_proc), cbClsExtra: 0, cbWndExtra: 0, hInstance: ptr::null_mut(), hIcon: LoadIconW(ptr::null_mut(), IDI_APPLICATION), hCursor: LoadCursorW(ptr::null_mut(), IDI_APPLICATION), hbrBackground: GetSysColorBrush(COLOR_MENUBAR), lpszMenuName: ptr::null_mut(), lpszClassName: class_name.as_ptr(), }; RegisterClassW(&wnd_class); let title = format!("{} v{}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION")); H_WINDOW = CreateWindowExW( 0, class_name.as_ptr(), l(&title).as_ptr(), WS_OVERLAPPED | WS_CAPTION | WS_SYSMENU | WS_VISIBLE, CW_USEDEFAULT, CW_USEDEFAULT, 656, 551, ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ptr::null_mut(), ); ensure!(!H_WINDOW.is_null(), "CreateWindowExW failed."); BUF.reserve(640 * 480 * 3); ShowWindow(H_WINDOW, SW_SHOW); UpdateWindow(H_WINDOW); let mut msg = init::<MSG>(); loop { if GetMessageW(&mut msg, ptr::null_mut(), 0, 0) == 0 { break; } TranslateMessage(&msg); DispatchMessageW(&msg); } } Ok(()) } unsafe extern "system" fn window_proc( h_wnd: HWND, msg: UINT, w_param: WPARAM, l_param: LPARAM, ) -> LRESULT { match msg { WM_CREATE => create(h_wnd), WM_COMMAND => command(h_wnd, w_param), WM_PAINT => { if DATA_LEN > 0 { paint(h_wnd) } else { return DefWindowProcW(h_wnd, msg, w_param, l_param); } } WM_DESTROY => { DeleteObject(H_FONT as *mut c_void); PostQuitMessage(0); Ok(()) } _ => return DefWindowProcW(h_wnd, msg, w_param, l_param), } .map_err(msg_box) .ok(); 0 } unsafe fn create(h_wnd: HWND) -> Result<()> { create_font()?; create_button(h_wnd)?; Ok(()) } unsafe fn create_font() -> Result<()> { H_FONT = CreateFontW( 18, 0, 0, 0, 0, 0, 0, 0, DEFAULT_CHARSET, OUT_DEFAULT_PRECIS, CLIP_DEFAULT_PRECIS, DEFAULT_QUALITY, DEFAULT_PITCH | FF_DONTCARE, l("メイリオ").as_ptr(), ); ensure!(!H_FONT.is_null(), "CreateFontW failed."); Ok(()) } unsafe fn create_button(h_wnd: HWND) -> Result<()> { let h_button = CreateWindowExW( 0, l("BUTTON").as_ptr(), l("Open").as_ptr(), WS_CHILD | WS_VISIBLE | BS_PUSHBUTTON, 4, 4, 80, 24, h_wnd, ID_OPEN_BUTTON as HMENU, ptr::null_mut(), ptr::null_mut(), ); ensure!(!h_button.is_null(), "CreateWindowExW BUTTON failed.",); SendMessageW(h_button, WM_SETFONT, H_FONT as WPARAM, 0); Ok(()) } unsafe fn command(h_wnd: HWND, w_param: WPARAM) -> Result<()> { let msg = HIWORD(w_param as u32); let id = LOWORD(w_param as u32) as i32; if msg == BN_CLICKED { if id == ID_OPEN_BUTTON { let file_path = open_dialog(h_wnd)?; read_image(&file_path)?; } } Ok(()) } unsafe fn read_image(file_path: &str) -> Result<()> { let img = image::open(file_path)?; let img = if img.width() > 640 || img.height() > 480 { let new_size = if img.width() as f32 / img.height() as f32 > 1.333 { 640 } else { if img.width() > img.height() { (480.0 / img.height() as f32 * img.width() as f32) as u32 } else { 480 } }; img.resize(new_size, new_size, imageops::Lanczos3) } else { img }; WIDTH = img.width() as i32; HEIGHT = img.height() as i32; let bgr = img.into_bgr(); ensure!(bgr.len() <= 640 * 480 * 3, "Invalid data length."); let remain = (3 * WIDTH as usize) % 4; if remain > 0 { let chunk_size = 3 * WIDTH as usize; let line_bytes_len = chunk_size + 4 - remain; DATA_LEN = line_bytes_len * HEIGHT as usize; let mut p = BUF.as_mut_ptr(); bgr.chunks(chunk_size).for_each(|c| { ptr::copy_nonoverlapping(c.as_ptr(), p, chunk_size); p = p.add(line_bytes_len); }); } else { DATA_LEN = (WIDTH * HEIGHT * 3) as usize; ptr::copy_nonoverlapping(bgr.as_ptr(), BUF.as_mut_ptr(), DATA_LEN); }; let rc = RECT { top: 32, left: 0, right: 640, bottom: 512, }; InvalidateRect(H_WINDOW, &rc, TRUE); SetWindowTextW(H_WINDOW, l(file_path).as_ptr()); Ok(()) } unsafe fn open_dialog(h_wnd: HWND) -> Result<String> { const MAX_PATH: u32 = 260; let mut buf = [0u16; MAX_PATH as usize]; let filter = l("Image file\0*.jpg;*.png;*.gif;*.bmp\0"); let title = l("Choose a image file"); let mut ofn = zeroed::<OPENFILENAMEW>(); ofn.lStructSize = mem::size_of::<OPENFILENAMEW>() as u32; ofn.lpstrFilter = filter.as_ptr(); ofn.lpstrTitle = title.as_ptr(); ofn.lpstrFile = buf.as_mut_ptr(); ofn.nMaxFile = MAX_PATH; ofn.Flags = OFN_FILEMUSTEXIST; ofn.hwndOwner = h_wnd; ensure!(GetOpenFileNameW(&mut ofn) != 0, "Cannot get file path."); let slice = slice::from_raw_parts(ofn.lpstrFile, MAX_PATH as usize); Ok(decode(slice)) } unsafe fn paint(h_wnd: HWND) -> Result<()> { let mut ps = init::<PAINTS
fn msg_box(e: Error) { unsafe { MessageBoxW( H_WINDOW, l(&e.to_string()).as_ptr(), l("Error").as_ptr(), MB_OK, ) }; } fn l(source: &str) -> Vec<u16> { source.encode_utf16().chain(Some(0)).collect() } fn decode(source: &[u16]) -> String { decode_utf16(source.iter().take_while(|&n| n != &0).cloned()) .map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)) .collect() } unsafe fn init<T>() -> T { mem::MaybeUninit::<T>::uninit().assume_init() } unsafe fn zeroed<T>() -> T { mem::MaybeUninit::<T>::zeroed().assume_init() }
TRUCT>(); let hdc = BeginPaint(h_wnd, &mut ps); let mut bi = zeroed::<BITMAPINFO>(); bi.bmiHeader = zeroed::<BITMAPINFOHEADER>(); bi.bmiHeader.biSize = mem::size_of::<BITMAPINFOHEADER>() as u32; bi.bmiHeader.biWidth = WIDTH; bi.bmiHeader.biHeight = -HEIGHT; bi.bmiHeader.biPlanes = 1; bi.bmiHeader.biBitCount = 24; bi.bmiHeader.biSizeImage = DATA_LEN as u32; bi.bmiHeader.biCompression = BI_RGB; let h_bmp = CreateCompatibleBitmap(hdc, WIDTH, HEIGHT); SetDIBits( hdc, h_bmp, 0, HEIGHT as u32, BUF.as_ptr() as *const c_void, &bi, DIB_RGB_COLORS, ); let h_mdc = CreateCompatibleDC(hdc); SelectObject(h_mdc, h_bmp as *mut c_void); let padding_left = (640 - WIDTH) / 2; let padding_top = (480 - HEIGHT) / 2; BitBlt( hdc, padding_left, padding_top + 32, WIDTH, HEIGHT, h_mdc, 0, 0, SRCCOPY, ); DeleteDC(h_mdc); DeleteObject(h_bmp as *mut c_void); EndPaint(h_wnd, &ps); Ok(()) }
function_block-function_prefixed
[ { "content": "# pinion\n\nAn image viewer sample application for Windows written in Rust.\n", "file_path": "README.md", "rank": 18, "score": 3.0048466812659997 } ]
Rust
changeforest-py/src/control.rs
mlondschien/changeforest
4188ba4de34e05214a8cf6a3bb88912067695c14
use biosphere::MaxFeatures; use changeforest::Control; use pyo3::exceptions; use pyo3::prelude::{pyclass, FromPyObject, PyAny, PyErr, PyResult}; use pyo3::prelude::{PyObject, Python}; pub fn control_from_pyobj(py: Python, obj: Option<PyObject>) -> PyResult<Control> { let mut control = Control::default(); if let Some(obj) = obj { if let Ok(pyvalue) = obj.getattr(py, "minimal_relative_segment_length") { if let Ok(value) = pyvalue.extract::<f64>(py) { control = control.with_minimal_relative_segment_length(value); } }; if let Ok(pyvalue) = obj.getattr(py, "minimal_gain_to_split") { if let Ok(value) = pyvalue.extract::<Option<f64>>(py) { control = control.with_minimal_gain_to_split(value); } }; if let Ok(pyvalue) = obj.getattr(py, "model_selection_alpha") { if let Ok(value) = pyvalue.extract::<f64>(py) { control = control.with_model_selection_alpha(value); } }; if let Ok(pyvalue) = obj.getattr(py, "model_selection_alpha") { if let Ok(value) = pyvalue.extract::<f64>(py) { control = control.with_model_selection_alpha(value); } }; if let Ok(pyvalue) = obj.getattr(py, "model_selection_n_permutations") { if let Ok(value) = pyvalue.extract::<usize>(py) { control = control.with_model_selection_n_permutations(value); } }; if let Ok(pyvalue) = obj.getattr(py, "number_of_wild_segments") { if let Ok(value) = pyvalue.extract::<usize>(py) { control = control.with_number_of_wild_segments(value); } }; if let Ok(pyvalue) = obj.getattr(py, "seed") { if let Ok(value) = pyvalue.extract::<u64>(py) { control = control.with_seed(value); control.random_forest_parameters = control.random_forest_parameters.with_seed(value); } }; if let Ok(pyvalue) = obj.getattr(py, "seeded_segments_alpha") { if let Ok(value) = pyvalue.extract::<f64>(py) { control = control.with_seeded_segments_alpha(value); } }; if let Ok(pyvalue) = obj.getattr(py, "random_forest_n_estimators") { if let Ok(value) = pyvalue.extract::<usize>(py) { control.random_forest_parameters = control.random_forest_parameters.with_n_estimators(value); } }; if let Ok(pyvalue) = obj.getattr(py, "random_forest_max_depth") { if let Ok(value) = pyvalue.extract::<Option<usize>>(py) { control.random_forest_parameters = control.random_forest_parameters.with_max_depth(value); } }; if let Ok(pyvalue) = obj.getattr(py, "random_forest_max_features") { if let Ok(value) = pyvalue.extract::<PyMaxFeatures>(py) { control.random_forest_parameters = control .random_forest_parameters .with_max_features(value.value); } }; if let Ok(pyvalue) = obj.getattr(py, "random_forest_n_jobs") { if let Ok(value) = pyvalue.extract::<Option<i32>>(py) { control.random_forest_parameters = control.random_forest_parameters.with_n_jobs(value); } }; } Ok(control) } #[pyclass(name = "MaxFeatures")] pub struct PyMaxFeatures { pub value: MaxFeatures, } impl PyMaxFeatures { fn new(value: MaxFeatures) -> Self { PyMaxFeatures { value } } } impl FromPyObject<'_> for PyMaxFeatures { fn extract(ob: &'_ PyAny) -> PyResult<Self> { if let Ok(value) = ob.extract::<usize>() { Ok(PyMaxFeatures::new(MaxFeatures::Value(value))) } else if let Ok(value) = ob.extract::<f64>() { if value > 1. || value <= 0. { Err(PyErr::new::<exceptions::PyTypeError, _>(format!( "Got max_features {}", value ))) } else { Ok(PyMaxFeatures::new(MaxFeatures::Fraction(value))) } } else if let Ok(value) = ob.extract::<Option<String>>() { if value.is_none() { Ok(PyMaxFeatures::new(MaxFeatures::None)) } else { if value.as_ref().unwrap() == "sqrt" { Ok(PyMaxFeatures::new(MaxFeatures::Sqrt)) } else { Err(PyErr::new::<exceptions::PyTypeError, _>(format!( "Unknown value for max_features: {}", value.unwrap() ))) } } } else { Err(PyErr::new::<exceptions::PyTypeError, _>(format!( "Unknown value for max_features: {}", ob ))) } } }
use biosphere::MaxFeatures; use changeforest::Control; use pyo3::exceptions; use pyo3::prelude::{pyclass, FromPyObject, PyAny, PyErr, PyResult}; use pyo3::prelude::{PyObject, Python}; pub fn control_from_pyobj(py: Python, obj: Option<PyObject>) -> PyResult<Control> { let mut control = Control::default(); if let Some(obj) = obj { if let Ok(pyvalue) = obj.getattr(py, "minimal_relative_segment_length") { if let Ok(value) = pyvalue.extract::<f64>(py) { control = control.with_minimal_relative_segment_length(value); } }; if let Ok(pyvalue) = obj.getattr(py, "minimal_gain_to_split") { if let Ok(value) = pyvalue.extract::<Option<f64>>(py) { control = control.with_minimal_gain_to_split(value); } }; if let Ok(pyvalue) = obj.getattr(py, "model_selection_alpha") { if let Ok(value) = pyvalue.extract::<f64>(py) { control = control.with_model_selection_alpha(value); } }; if let Ok(pyvalue) = obj.getattr(py, "model_selection_alpha") { if let Ok(value) = pyvalue.extract::<f64>(py) { control = control.with_model_selection_alpha(value); } }; if let Ok(pyvalue) = obj.getattr(py, "model_selection_n_permutations") { if let Ok(value) = pyvalue.extract::<usize>(py) { control = control.with_model_selection_n_permutations(value); } }; if let Ok(pyvalue) = obj.getattr(py, "number_of_wild_segments") { if let Ok(value) = pyvalue.extract::<usize>(py) { control = control.with_number_of_wild_segments(value); } }; if let Ok(pyvalue) = obj.getattr(py, "seed") { if let Ok(value) = pyvalue.extract::<u64>(py) { control = control.with_seed(value);
control.random_forest_parameters.with_n_jobs(value); } }; } Ok(control) } #[pyclass(name = "MaxFeatures")] pub struct PyMaxFeatures { pub value: MaxFeatures, } impl PyMaxFeatures { fn new(value: MaxFeatures) -> Self { PyMaxFeatures { value } } } impl FromPyObject<'_> for PyMaxFeatures { fn extract(ob: &'_ PyAny) -> PyResult<Self> { if let Ok(value) = ob.extract::<usize>() { Ok(PyMaxFeatures::new(MaxFeatures::Value(value))) } else if let Ok(value) = ob.extract::<f64>() { if value > 1. || value <= 0. { Err(PyErr::new::<exceptions::PyTypeError, _>(format!( "Got max_features {}", value ))) } else { Ok(PyMaxFeatures::new(MaxFeatures::Fraction(value))) } } else if let Ok(value) = ob.extract::<Option<String>>() { if value.is_none() { Ok(PyMaxFeatures::new(MaxFeatures::None)) } else { if value.as_ref().unwrap() == "sqrt" { Ok(PyMaxFeatures::new(MaxFeatures::Sqrt)) } else { Err(PyErr::new::<exceptions::PyTypeError, _>(format!( "Unknown value for max_features: {}", value.unwrap() ))) } } } else { Err(PyErr::new::<exceptions::PyTypeError, _>(format!( "Unknown value for max_features: {}", ob ))) } } }
control.random_forest_parameters = control.random_forest_parameters.with_seed(value); } }; if let Ok(pyvalue) = obj.getattr(py, "seeded_segments_alpha") { if let Ok(value) = pyvalue.extract::<f64>(py) { control = control.with_seeded_segments_alpha(value); } }; if let Ok(pyvalue) = obj.getattr(py, "random_forest_n_estimators") { if let Ok(value) = pyvalue.extract::<usize>(py) { control.random_forest_parameters = control.random_forest_parameters.with_n_estimators(value); } }; if let Ok(pyvalue) = obj.getattr(py, "random_forest_max_depth") { if let Ok(value) = pyvalue.extract::<Option<usize>>(py) { control.random_forest_parameters = control.random_forest_parameters.with_max_depth(value); } }; if let Ok(pyvalue) = obj.getattr(py, "random_forest_max_features") { if let Ok(value) = pyvalue.extract::<PyMaxFeatures>(py) { control.random_forest_parameters = control .random_forest_parameters .with_max_features(value.value); } }; if let Ok(pyvalue) = obj.getattr(py, "random_forest_n_jobs") { if let Ok(value) = pyvalue.extract::<Option<i32>>(py) { control.random_forest_parameters =
function_block-random_span
[ { "content": "pub fn changeforest(\n\n X: &ndarray::ArrayView2<'_, f64>,\n\n method: &str,\n\n segmentation_type: &str,\n\n control: &Control,\n\n) -> BinarySegmentationResult {\n\n let segmentation_type_enum: SegmentationType;\n\n let mut tree: BinarySegmentationTree;\n\n\n\n if segmentation_type == \"bs\" {\n\n segmentation_type_enum = SegmentationType::BS;\n\n } else if segmentation_type == \"sbs\" {\n\n segmentation_type_enum = SegmentationType::SBS;\n\n } else if segmentation_type == \"wbs\" {\n\n segmentation_type_enum = SegmentationType::WBS;\n\n } else {\n\n panic!(\"segmentation_type must be one of 'bs', 'sbs', 'wbs'\")\n\n }\n\n\n\n if method == \"knn\" {\n", "file_path": "src/wrapper.rs", "rank": 1, "score": 91913.9079297458 }, { "content": "pub fn array() -> Array2<f64> {\n\n let seed = 7;\n\n let mut rng = StdRng::seed_from_u64(seed);\n\n\n\n let mut X = Array::zeros((100, 5)); //\n\n\n\n X.slice_mut(s![0..25, 0]).fill(2.);\n\n X.slice_mut(s![40..80, 0]).fill(1.);\n\n X.slice_mut(s![0..40, 1]).fill(-2.);\n\n X.slice_mut(s![25..40, 2]).fill(3.);\n\n X.slice_mut(s![25..80, 1]).fill(-2.);\n\n\n\n X + Array::random_using((100, 5), Uniform::new(0., 1.), &mut rng)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use assert_approx_eq::*;\n", "file_path": "src/testing.rs", "rank": 2, "score": 79872.1336065325 }, { "content": "#[pymodule]\n\nfn my_module(_py: Python, m: &PyModule) -> PyResult<()> {\n\n m.add_class::<MyBinarySegmentationResult>()?;\n\n Ok(())\n\n}\n", "file_path": "changeforest-py/src/result.rs", "rank": 3, "score": 78384.69995166268 }, { "content": "#[allow(non_snake_case)] // Allow capital X for arrays.\n\n#[pymodule]\n\nfn changeforest(_py: Python<'_>, m: &PyModule) -> PyResult<()> {\n\n #[pyfn(m)]\n\n fn changeforest(\n\n py: Python<'_>,\n\n X: PyReadonlyArray2<f64>,\n\n method: Option<String>,\n\n segmentation_type: Option<String>,\n\n control: Option<PyObject>,\n\n ) -> PyResult<MyBinarySegmentationResult> {\n\n let control = control_from_pyobj(py, control).unwrap();\n\n let method = method.unwrap_or(\"random_forest\".to_string());\n\n let segmentation_type = segmentation_type.unwrap_or(\"bs\".to_string());\n\n Ok(MyBinarySegmentationResult {\n\n result: wrapper::changeforest(&X.as_array(), &method, &segmentation_type, &control),\n\n })\n\n }\n\n\n\n m.add_class::<MyBinarySegmentationResult>()?;\n\n m.add_class::<MyOptimizerResult>()?;\n\n Ok(())\n\n}\n", "file_path": "changeforest-py/src/lib.rs", "rank": 4, "score": 75665.09523063374 }, { "content": "pub fn log_eta(x: f64) -> f64 {\n\n // 1e-6 ~ 0.00247, 1 - 1e-6 ~ 0.99752\n\n // log_eta(1) = 0\n\n (0.0024787521766663585 + 0.9975212478233336 * x).ln()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use rstest::*;\n\n\n\n #[rstest]\n\n #[case(1., 0.)]\n\n #[case(0., -6.)]\n\n fn test_log_eta(#[case] x: f64, #[case] expected: f64) {\n\n assert_eq!(log_eta(x), expected);\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 5, "score": 75110.77038462293 }, { "content": "pub fn gain_from_likelihoods(likelihoods: &Array2<f64>) -> Array1<f64> {\n\n let n = likelihoods.shape()[1];\n\n let mut gain = Array1::<f64>::zeros(n);\n\n // Move everything one to the right.\n\n gain.slice_mut(s![1..])\n\n .assign(&(&likelihoods.slice(s![0, ..(n - 1)]) - &likelihoods.slice(s![1, ..(n - 1)])));\n\n gain.accumulate_axis_inplace(Axis(0), |&prev, curr| *curr += prev);\n\n\n\n gain + likelihoods.slice(s![1, ..]).sum()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::optimizer::{Optimizer, TwoStepSearch};\n\n use crate::testing::RandomClassifier;\n\n\n\n #[test]\n\n fn test_gain_from_likelihoods() {\n\n let likelihoods = ndarray::array![\n", "file_path": "src/gain/classifier_gain.rs", "rank": 6, "score": 62326.922298590864 }, { "content": "class Control:\n\n \"\"\"\n\n Storage container for hyperparameters.\n\n\n\n See rust documentation of changeforest::control::Control for more details.\n\n \"\"\"\n\n\n\n def __init__(\n\n self,\n\n minimal_relative_segment_length=\"default\",\n\n minimal_gain_to_split=\"default\",\n\n model_selection_alpha=\"default\",\n\n model_selection_n_permutations=\"default\",\n\n number_of_wild_segments=\"default\",\n\n seeded_segments_alpha=\"default\",\n\n seed=\"default\",\n\n random_forest_n_estimators=\"default\",\n\n random_forest_max_depth=\"default\",\n\n random_forest_max_features=\"default\",\n\n random_forest_n_jobs=\"default\",\n\n ):\n\n self.minimal_relative_segment_length = _to_float(\n\n minimal_relative_segment_length\n\n )\n\n self.minimal_gain_to_split = _to_float(minimal_gain_to_split)\n\n self.model_selection_alpha = _to_float(model_selection_alpha)\n\n self.model_selection_n_permutations = _to_int(model_selection_n_permutations)\n\n self.number_of_wild_segments = _to_int(number_of_wild_segments)\n\n self.seeded_segments_alpha = _to_float(seeded_segments_alpha)\n\n self.seed = _to_int(seed)\n\n self.random_forest_n_estimators = _to_int(random_forest_n_estimators)\n\n self.random_forest_max_depth = _to_int(random_forest_max_depth)\n\n self.random_forest_max_features = _to_int(random_forest_max_features)\n", "file_path": "changeforest-py/changeforest/control.py", "rank": 7, "score": 56815.36390765912 }, { "content": "pub trait Optimizer {\n\n /// Find the element of `split_candidates` to split segment `[start, stop)`.\n\n ///\n\n /// Returns a tuple with the best split and the maximal gain.\n\n fn find_best_split(&self, start: usize, stop: usize) -> Result<OptimizerResult, &str>;\n\n\n\n /// Does a certain split corresponds to a true change point?\n\n fn model_selection(&self, optimizer_result: &OptimizerResult) -> ModelSelectionResult;\n\n\n\n /// Total number of observations.\n\n fn n(&self) -> usize;\n\n\n\n /// Control parameters.\n\n fn control(&self) -> &Control;\n\n\n\n /// Vector with indices of allowed split points.\n\n fn split_candidates(&self, start: usize, stop: usize) -> Result<Vec<usize>, &str> {\n\n let minimal_segment_length =\n\n (self.control().minimal_relative_segment_length * (self.n() as f64)).ceil() as usize;\n\n if 2 * minimal_segment_length >= (stop - start) {\n\n Err(\"Segment too small.\")\n\n } else {\n\n Ok(((start + minimal_segment_length)..(stop - minimal_segment_length)).collect())\n\n }\n\n }\n\n}\n", "file_path": "src/optimizer/optimizer.rs", "rank": 8, "score": 44829.05129131459 }, { "content": "pub trait Classifier {\n\n fn n(&self) -> usize;\n\n\n\n fn predict(&self, start: usize, stop: usize, split: usize) -> Array1<f64>;\n\n\n\n fn single_likelihood(\n\n &self,\n\n predictions: &Array1<f64>,\n\n start: usize,\n\n stop: usize,\n\n split: usize,\n\n ) -> f64 {\n\n if (stop - split <= 1) || (split - start <= 1) {\n\n return 0.;\n\n }\n\n\n\n let (left, right) = predictions.slice(s![..]).split_at(Axis(0), split - start);\n\n let left_correction = ((stop - start - 1) as f64) / ((split - start - 1) as f64);\n\n let right_correction = ((stop - start - 1) as f64) / ((stop - split - 1) as f64);\n\n left.mapv(|x| log_eta((1. - x) * left_correction)).sum()\n", "file_path": "src/classifier/classifier.rs", "rank": 9, "score": 44829.05129131459 }, { "content": "pub trait Gain {\n\n /// Total number of observations.\n\n fn n(&self) -> usize;\n\n\n\n #[allow(unused_variables)]\n\n /// Loss of segment `[start, stop)`.\n\n ///\n\n /// This is typically a parametric loss, i.e. minimal negative log-likelihood. Needs\n\n /// not be normalized the by segment length.\n\n fn loss(&self, start: usize, stop: usize) -> f64 {\n\n panic!(\"Not implemented.\");\n\n }\n\n\n\n /// Gain when splitting segment [start, stop) at `split`.\n\n fn gain(&self, start: usize, stop: usize, split: usize) -> f64 {\n\n self.loss(start, stop) - self.loss(start, split) - self.loss(split, stop)\n\n }\n\n\n\n /// Gain when splitting segment `[start, stop)` at points in `split_candidates`.\n\n ///\n", "file_path": "src/gain/gain.rs", "rank": 10, "score": 44829.05129131459 }, { "content": "#[extendr]\n\nfn changeforest_api(\n\n X: ndarray::ArrayView2<f64>,\n\n method: &str,\n\n segmentation: &str,\n\n control: MyControl,\n\n) -> MyBinarySegmentationResult {\n\n MyBinarySegmentationResult {\n\n result: wrapper::changeforest(&X, method, segmentation, &control.control),\n\n }\n\n}\n\n\n\n// Macro to generate exports.\n\n// This ensures exported functions are registered with R.\n\n// See corresponding C code in `entrypoint.c`.\n\nextendr_module! {\n\n mod changeforestr;\n\n fn changeforest_api;\n\n}\n", "file_path": "changeforest-r/src/rust/src/lib.rs", "rank": 11, "score": 44326.91806697841 }, { "content": "pub trait ApproxGain {\n\n #[allow(unused_variables)]\n\n /// An approximation of the gain when splitting segment `[start, stop)` at points in `split_candidates`.\n\n ///\n\n /// Returns an `ndarray::Array1` of length `stop - start`. Entries without\n\n /// corresponding entry in `split_candidates` are `f64::NAN`.\n\n ///\n\n /// This can be useful when combining classifier-based gains and the two-step-search\n\n /// optimizer.\n\n fn gain_approx(\n\n &self,\n\n start: usize,\n\n stop: usize,\n\n guess: usize,\n\n split_points: &[usize],\n\n ) -> ApproxGainResult;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/gain/gain.rs", "rank": 12, "score": 43576.97815879469 }, { "content": "fn _display_option<T>(option: &Option<T>) -> String\n\nwhere\n\n T: Display,\n\n{\n\n if let Some(val) = option {\n\n format!(\"{}\", val)\n\n } else {\n\n \"\".to_string()\n\n }\n\n}\n\n\n", "file_path": "src/fmt.rs", "rank": 13, "score": 34340.37982327529 }, { "content": "fn _format_tree(result: &BinarySegmentationResult) -> Vec<Vec<String>> {\n\n let mut output = vec![vec![\n\n format!(\"({}, {}]\", result.start, result.stop),\n\n _display_option(&result.optimizer_result.as_ref().map(|x| x.best_split)),\n\n // Truncate max_gain to three decimal places.\n\n _display_option(\n\n &result\n\n .optimizer_result\n\n .as_ref()\n\n .map(|x| f64::trunc(x.max_gain * 1000.0) / 1000.0),\n\n ),\n\n _display_option(&result.model_selection_result.p_value),\n\n ]];\n\n\n\n if result.left.is_some() {\n\n let mut left = _format_tree(result.left.as_ref().unwrap());\n\n let mut right = _format_tree(result.right.as_ref().unwrap());\n\n left[0][0] = format!(\" ¦--{}\", left[0][0]);\n\n right[0][0] = format!(\" °--{}\", right[0][0]);\n\n\n", "file_path": "src/fmt.rs", "rank": 14, "score": 32633.319422234243 }, { "content": " /// https://arxiv.org/pdf/1411.0858.pdf.\n\n pub number_of_wild_segments: usize,\n\n /// Decay parameter in seeded binary segmentation. Should be in `[1/2, 1)`, with a\n\n /// value close to 1 resulting in many segments. Corresponds to `\\alpha` in\n\n /// https://arxiv.org/pdf/2002.06633.pdf.\n\n pub seeded_segments_alpha: f64,\n\n /// Seed used for segmentation.\n\n pub seed: u64,\n\n /// Hyperparameters for random forests.\n\n pub random_forest_parameters: RandomForestParameters,\n\n}\n\n\n\nimpl Control {\n\n pub fn default() -> Control {\n\n Control {\n\n minimal_relative_segment_length: 0.01,\n\n minimal_gain_to_split: None,\n\n model_selection_alpha: 0.02,\n\n model_selection_n_permutations: 199,\n\n number_of_wild_segments: 100,\n", "file_path": "src/control.rs", "rank": 15, "score": 31486.72740931115 }, { "content": " self\n\n }\n\n\n\n pub fn with_seed(mut self, seed: u64) -> Self {\n\n self.seed = seed;\n\n self\n\n }\n\n\n\n pub fn with_random_forest_parameters(\n\n mut self,\n\n random_forest_parameters: RandomForestParameters,\n\n ) -> Self {\n\n self.random_forest_parameters = random_forest_parameters;\n\n self\n\n }\n\n}\n", "file_path": "src/control.rs", "rank": 16, "score": 31486.111889941698 }, { "content": " mut self,\n\n model_selection_n_permutations: usize,\n\n ) -> Self {\n\n self.model_selection_n_permutations = model_selection_n_permutations;\n\n self\n\n }\n\n\n\n pub fn with_number_of_wild_segments(mut self, number_of_wild_segments: usize) -> Self {\n\n self.number_of_wild_segments = number_of_wild_segments;\n\n self\n\n }\n\n\n\n pub fn with_seeded_segments_alpha(mut self, seeded_segments_alpha: f64) -> Self {\n\n if (1. <= seeded_segments_alpha) | (seeded_segments_alpha <= 0.) {\n\n panic!(\n\n \"seeded_segments_alpha needs to be strictly between 0 and 1. Got {}\",\n\n seeded_segments_alpha\n\n );\n\n }\n\n self.seeded_segments_alpha = seeded_segments_alpha;\n", "file_path": "src/control.rs", "rank": 17, "score": 31485.264391100834 }, { "content": "use biosphere::{MaxFeatures, RandomForestParameters};\n\n\n\n/// Storage container for hyperparameters.\n\n#[derive(Clone)]\n\npub struct Control {\n\n /// Segments with length smaller than `2 * n * minimal_relative_segment_length` will\n\n /// not be split.\n\n pub minimal_relative_segment_length: f64,\n\n /// Only keep split point if the gain exceeds `minimal_gain_to_split`. Relevant for\n\n /// change in mean. Note that this is relative to the number of observations.\n\n /// Use value motivated by BIC `minimal_gain_to_split = log(n_samples) * n_features / n_samples`\n\n /// if `None`.\n\n pub minimal_gain_to_split: Option<f64>,\n\n /// Type two error in model selection to be approximated. Relevant for classifier-\n\n /// based changepoint detection.\n\n pub model_selection_alpha: f64,\n\n /// Number of permutations for model selection in classifier-based change point\n\n /// detection.\n\n pub model_selection_n_permutations: usize,\n\n /// Number of randomly drawn segments. Corresponds to parameter `M` in\n", "file_path": "src/control.rs", "rank": 18, "score": 31481.83664624245 }, { "content": " seeded_segments_alpha: std::f64::consts::FRAC_1_SQRT_2, // 1 / sqrt(2)\n\n seed: 0,\n\n random_forest_parameters: RandomForestParameters::default()\n\n .with_max_depth(Some(8))\n\n .with_max_features(MaxFeatures::Sqrt)\n\n .with_n_jobs(Some(-1)),\n\n }\n\n }\n\n\n\n pub fn with_minimal_relative_segment_length(\n\n mut self,\n\n minimal_relative_segment_length: f64,\n\n ) -> Self {\n\n if (minimal_relative_segment_length >= 0.5) | (minimal_relative_segment_length <= 0.) {\n\n panic!(\n\n \"minimal_relative_segment_length needs to be strictly between 0 and 0.5 Got {}\",\n\n minimal_relative_segment_length\n\n );\n\n }\n\n self.minimal_relative_segment_length = minimal_relative_segment_length;\n", "file_path": "src/control.rs", "rank": 19, "score": 31481.509419309077 }, { "content": " self\n\n }\n\n\n\n pub fn with_minimal_gain_to_split(mut self, minimal_gain_to_split: Option<f64>) -> Self {\n\n self.minimal_gain_to_split = minimal_gain_to_split;\n\n self\n\n }\n\n\n\n pub fn with_model_selection_alpha(mut self, model_selection_alpha: f64) -> Self {\n\n if (model_selection_alpha >= 1.) | (model_selection_alpha <= 0.) {\n\n panic!(\n\n \"model_selection_alpha needs to be strictly between 0 and 1. Got {}\",\n\n model_selection_alpha\n\n );\n\n }\n\n self.model_selection_alpha = model_selection_alpha;\n\n self\n\n }\n\n\n\n pub fn with_model_selection_n_permutations(\n", "file_path": "src/control.rs", "rank": 20, "score": 31481.179576159753 }, { "content": "// These are slow. Only run them with --release, i.e. cargo test --release\n\nfn test_integration_letters(#[case] method: &str, #[case] segmentation_type: &str) {\n\n let file = File::open(\"testdata/letters.csv\").unwrap();\n\n let mut reader = ReaderBuilder::new().has_headers(true).from_reader(file);\n\n let X: Array2<f64> = reader.deserialize_array2((20000, 16)).unwrap();\n\n\n\n let mut control = Control::default();\n\n control.random_forest_parameters = control.random_forest_parameters.with_n_estimators(20);\n\n\n\n let _ = changeforest(&X.view(), method, segmentation_type, &control);\n\n}\n", "file_path": "tests/test_integration.rs", "rank": 21, "score": 30296.05565721547 }, { "content": "#[rstest]\n\n#[case(\"knn\", \"bs\")]\n\n#[case(\"knn\", \"wbs\")]\n\n#[case(\"knn\", \"sbs\")]\n\n#[case(\"change_in_mean\", \"bs\")]\n\n#[case(\"change_in_mean\", \"wbs\")]\n\n#[case(\"change_in_mean\", \"sbs\")]\n\n#[case(\"random_forest\", \"bs\")]\n\n#[case(\"random_forest\", \"wbs\")]\n\n#[case(\"random_forest\", \"sbs\")]\n\nfn test_integration_iris(#[case] method: &str, #[case] segmentation_type: &str) {\n\n let file = File::open(\"testdata/iris.csv\").unwrap();\n\n let mut reader = ReaderBuilder::new().has_headers(true).from_reader(file);\n\n let X: Array2<f64> = reader.deserialize_array2((150, 4)).unwrap();\n\n\n\n let control = Control::default();\n\n\n\n let _ = changeforest(&X.view(), method, segmentation_type, &control);\n\n}\n\n\n\n#[rstest]\n\n// TODO: These kill my machine.\n\n// #[case(\"knn\", \"bs\")]\n\n// #[case(\"knn\", \"wbs\")]\n\n// #[case(\"knn\", \"sbs\")]\n\n#[case(\"change_in_mean\", \"bs\")]\n\n#[case(\"change_in_mean\", \"sbs\")]\n\n#[case(\"change_in_mean\", \"wbs\")]\n\n#[case(\"random_forest\", \"bs\")]\n\n#[case(\"random_forest\", \"sbs\")]\n\n#[case(\"random_forest\", \"wbs\")]\n", "file_path": "tests/test_integration.rs", "rank": 22, "score": 30296.05565721547 }, { "content": "def test_control_defaults(iris_dataset, key, default_value, another_value):\n\n result = changeforest(iris_dataset, \"random_forest\", \"bs\", Control())\n\n default_result = changeforest(\n\n iris_dataset, \"random_forest\", \"bs\", Control(**{key: default_value})\n\n )\n\n another_result = changeforest(\n\n iris_dataset, \"random_forest\", \"bs\", Control(**{key: another_value})\n\n )\n\n\n\n assert str(result) == str(default_result)\n", "file_path": "changeforest-py/tests/test_control.py", "rank": 23, "score": 29978.249070447146 }, { "content": "def test_control_segmentation_parameters(\n\n iris_dataset, segmentation_type, kwargs, expected_number_of_segments\n\n):\n\n result = changeforest(\n\n iris_dataset, \"change_in_mean\", segmentation_type, Control(**kwargs)\n\n )\n\n # For each split, add evaluation on left / right segment to segments.\n\n expected_number_of_segments = (\n\n expected_number_of_segments + 2 * len(result.split_points()) + 1\n\n )\n", "file_path": "changeforest-py/tests/test_control.py", "rank": 24, "score": 29201.990658445015 }, { "content": "def test_control_model_selection_parameters(\n\n iris_dataset,\n\n X_test,\n\n X_correlated,\n\n data,\n\n method,\n\n segmentation_type,\n\n kwargs,\n\n expected,\n\n):\n\n if data == \"iris\":\n\n X = iris_dataset\n\n elif data == \"X_test\":\n\n X = X_test\n\n else:\n\n X = X_correlated\n\n\n\n result = changeforest(X, method, segmentation_type, Control(**kwargs))\n", "file_path": "changeforest-py/tests/test_control.py", "rank": 25, "score": 28464.918510280666 }, { "content": "use biosphere::MaxFeatures;\n\nuse changeforest::Control;\n\nuse extendr_api::Operators;\n\nuse extendr_api::Rinternals;\n\nuse extendr_api::{FromRobj, Robj};\n\n\n\npub struct MyControl {\n\n pub control: Control,\n\n}\n\n\n\nimpl<'a> FromRobj<'a> for MyControl {\n\n fn from_robj(robj: &'a Robj) -> std::result::Result<Self, &'static str> {\n\n let mut control = Control::default();\n\n\n\n if let Some(value) = robj\n\n .dollar(\"minimal_relative_segment_length\")\n\n .unwrap()\n\n .as_real()\n\n {\n\n control = control.with_minimal_relative_segment_length(value);\n", "file_path": "changeforest-r/src/rust/src/control.rs", "rank": 33, "score": 26972.679777581165 }, { "content": " control = control.with_number_of_wild_segments(value as usize);\n\n }\n\n\n\n if let Some(value) = robj.dollar(\"seeded_segments_alpha\").unwrap().as_real() {\n\n control = control.with_seeded_segments_alpha(value);\n\n }\n\n\n\n if let Some(value) = robj.dollar(\"seed\").unwrap().as_real() {\n\n control = control.with_seed(value as u64);\n\n control.random_forest_parameters =\n\n control.random_forest_parameters.with_seed(value as u64);\n\n }\n\n\n\n if let Some(value) = robj.dollar(\"random_forest_n_estimators\").unwrap().as_real() {\n\n control.random_forest_parameters = control\n\n .random_forest_parameters\n\n .with_n_estimators(value as usize);\n\n }\n\n\n\n if let Some(value) = robj.dollar(\"random_forest_max_features\").unwrap().as_real() {\n", "file_path": "changeforest-r/src/rust/src/control.rs", "rank": 34, "score": 26967.935973528478 }, { "content": " } else if value != \"default\" {\n\n panic!(\"Got random_forest_max_features = {}\", value);\n\n }\n\n }\n\n\n\n if let Some(value) = robj.dollar(\"random_forest_n_jobs\").unwrap().as_real() {\n\n control.random_forest_parameters = control\n\n .random_forest_parameters\n\n .with_n_jobs(Some(value as i32));\n\n }\n\n\n\n if let Some(value) = robj.dollar(\"random_forest_max_depth\").unwrap().as_real() {\n\n control.random_forest_parameters = control\n\n .random_forest_parameters\n\n .with_max_depth(Some(value as usize));\n\n }\n\n\n\n Ok(MyControl { control })\n\n }\n\n}\n", "file_path": "changeforest-r/src/rust/src/control.rs", "rank": 35, "score": 26963.97438520736 }, { "content": " }\n\n\n\n if let Some(value) = robj.dollar(\"minimal_gain_to_split\").unwrap().as_real() {\n\n control = control.with_minimal_gain_to_split(Some(value));\n\n }\n\n\n\n if let Some(value) = robj.dollar(\"model_selection_alpha\").unwrap().as_real() {\n\n control = control.with_model_selection_alpha(value);\n\n }\n\n\n\n if let Some(value) = robj\n\n .dollar(\"model_selection_n_permutations\")\n\n .unwrap()\n\n .as_real()\n\n {\n\n control = control.with_model_selection_n_permutations(value as usize);\n\n }\n\n\n\n // as_integer does not seem to work.\n\n if let Some(value) = robj.dollar(\"number_of_wild_segments\").unwrap().as_real() {\n", "file_path": "changeforest-r/src/rust/src/control.rs", "rank": 36, "score": 26963.943185951626 }, { "content": " if value <= 0. {\n\n panic!(\"Got random_forest_max_features = {}\", value);\n\n } else if value < 1. {\n\n control.random_forest_parameters = control\n\n .random_forest_parameters\n\n .with_max_features(MaxFeatures::Fraction(value));\n\n } else {\n\n control.random_forest_parameters = control\n\n .random_forest_parameters\n\n .with_max_features(MaxFeatures::Value(value as usize));\n\n }\n\n } else if robj.dollar(\"random_forest_max_features\").unwrap().is_null() {\n\n control.random_forest_parameters = control\n\n .random_forest_parameters\n\n .with_max_features(MaxFeatures::None);\n\n } else if let Some(value) = robj.dollar(\"random_forest_max_features\").unwrap().as_str() {\n\n if value == \"sqrt\" {\n\n control.random_forest_parameters = control\n\n .random_forest_parameters\n\n .with_max_features(MaxFeatures::Sqrt);\n", "file_path": "changeforest-r/src/rust/src/control.rs", "rank": 37, "score": 26963.906051426984 }, { "content": "class Control:\n\n \"\"\"\n\n Storage container for hyperparameters.\n\n\n\n See rust documentation of changeforest::control::Control for more details.\n\n \"\"\"\n\n\n\n def __init__(\n\n self,\n\n minimal_relative_segment_length=\"default\",\n\n minimal_gain_to_split=\"default\",\n\n model_selection_alpha=\"default\",\n\n model_selection_n_permutations=\"default\",\n\n number_of_wild_segments=\"default\",\n\n seeded_segments_alpha=\"default\",\n\n seed=\"default\",\n\n random_forest_n_estimators=\"default\",\n\n random_forest_max_depth=\"default\",\n\n random_forest_max_features=\"default\",\n\n random_forest_n_jobs=\"default\",\n\n ):\n\n self.minimal_relative_segment_length = _to_float(\n\n minimal_relative_segment_length\n\n )\n\n self.minimal_gain_to_split = _to_float(minimal_gain_to_split)\n\n self.model_selection_alpha = _to_float(model_selection_alpha)\n\n self.model_selection_n_permutations = _to_int(model_selection_n_permutations)\n\n self.number_of_wild_segments = _to_int(number_of_wild_segments)\n\n self.seeded_segments_alpha = _to_float(seeded_segments_alpha)\n\n self.seed = _to_int(seed)\n\n self.random_forest_n_estimators = _to_int(random_forest_n_estimators)\n\n self.random_forest_max_depth = _to_int(random_forest_max_depth)\n\n self.random_forest_max_features = _to_int(random_forest_max_features)\n\n self.random_forest_n_jobs = _to_int(random_forest_n_jobs)\n\n\n\n\n\ndef _to_float(value):\n\n if value is None:\n\n return None\n\n elif isinstance(value, str):\n\n return value\n\n else:\n\n return float(value)\n\n\n\n\n\ndef _to_int(value):\n\n if value is None:\n\n return None\n\n elif isinstance(value, str):\n\n return value\n\n else:\n\n return int(value)\n", "file_path": "changeforest-py/changeforest/control.py", "rank": 38, "score": 24607.184479871175 }, { "content": "def _to_float(value):\n\n if value is None:\n\n return None\n\n elif isinstance(value, str):\n\n return value\n\n else:\n", "file_path": "changeforest-py/changeforest/control.py", "rank": 39, "score": 23578.242357591786 }, { "content": " def __init__(\n\n self,\n\n minimal_relative_segment_length=\"default\",\n\n minimal_gain_to_split=\"default\",\n\n model_selection_alpha=\"default\",\n\n model_selection_n_permutations=\"default\",\n\n number_of_wild_segments=\"default\",\n\n seeded_segments_alpha=\"default\",\n\n seed=\"default\",\n\n random_forest_n_estimators=\"default\",\n\n random_forest_max_depth=\"default\",\n\n random_forest_max_features=\"default\",\n\n random_forest_n_jobs=\"default\",\n\n ):\n\n self.minimal_relative_segment_length = _to_float(\n\n minimal_relative_segment_length\n\n )\n\n self.minimal_gain_to_split = _to_float(minimal_gain_to_split)\n\n self.model_selection_alpha = _to_float(model_selection_alpha)\n\n self.model_selection_n_permutations = _to_int(model_selection_n_permutations)\n\n self.number_of_wild_segments = _to_int(number_of_wild_segments)\n\n self.seeded_segments_alpha = _to_float(seeded_segments_alpha)\n\n self.seed = _to_int(seed)\n\n self.random_forest_n_estimators = _to_int(random_forest_n_estimators)\n\n self.random_forest_max_depth = _to_int(random_forest_max_depth)\n\n self.random_forest_max_features = _to_int(random_forest_max_features)\n", "file_path": "changeforest-py/changeforest/control.py", "rank": 40, "score": 23578.242357591786 }, { "content": "import numpy as np\n\nimport pytest\n\n\n\nfrom changeforest import Control, changeforest\n\n\n\n\n\[email protected](\n\n \"data, segmentation_type, method, kwargs, expected\",\n\n [\n\n # minimal_relative_segment_length\n\n (\"iris\", \"bs\", \"knn\", {\"minimal_relative_segment_length\": 0.05}, [50, 100]),\n\n (\"iris\", \"bs\", \"knn\", {\"minimal_relative_segment_length\": 0.4}, [60]),\n\n # minimal_gain_to_split\n\n (\n\n \"iris\",\n\n \"bs\",\n\n \"change_in_mean\",\n\n {\"minimal_gain_to_split\": 150 * 0.1},\n\n [50, 100],\n\n ),\n\n (\"iris\", \"bs\", \"change_in_mean\", {\"minimal_gain_to_split\": 150 * 1}, [50]),\n\n (\"iris\", \"bs\", \"change_in_mean\", {\"minimal_gain_to_split\": 150 * 10}, []),\n\n (\"iris\", \"bs\", \"change_in_mean\", {\"minimal_gain_to_split\": 150 * 10.0}, []),\n\n (\n\n \"iris\",\n\n \"bs\",\n\n \"change_in_mean\",\n\n {\"minimal_gain_to_split\": None},\n\n [50, 100],\n\n ), # log(150) * 4 / 150\n\n # model_selection_alpha\n\n (\"iris\", \"bs\", \"knn\", {\"model_selection_alpha\": 0.001}, []),\n\n (\"iris\", \"bs\", \"knn\", {\"model_selection_alpha\": 0.05}, [50, 100]),\n\n # random_forest_n_estimators\n\n # This is impressive and unexpected.\n\n (\"iris\", \"bs\", \"random_forest\", {\"random_forest_n_estimators\": 1}, [47, 99]),\n\n (\"iris\", \"bs\", \"random_forest\", {\"random_forest_n_estimators\": 100}, [50, 100]),\n\n # Use X_test instead\n\n (\"X_test\", \"bs\", \"random_forest\", {\"random_forest_n_estimators\": 1}, []),\n\n (\"X_test\", \"bs\", \"random_forest\", {\"random_forest_n_estimators\": 1.0}, []),\n\n (\"X_test\", \"bs\", \"random_forest\", {\"random_forest_n_estimators\": 100}, [5]),\n\n (\"X_correlated\", \"bs\", \"random_forest\", {\"random_forest_max_depth\": 1}, []),\n\n (\"X_correlated\", \"bs\", \"random_forest\", {\"random_forest_max_depth\": 2}, [49]),\n\n (\n\n \"X_correlated\",\n\n \"bs\",\n\n \"random_forest\",\n\n {\"random_forest_max_features\": \"sqrt\"},\n\n [49],\n\n ),\n\n (\"iris\", \"bs\", \"random_forest\", {\"model_selection_n_permutations\": 10}, []),\n\n ],\n\n)\n\ndef test_control_model_selection_parameters(\n\n iris_dataset,\n\n X_test,\n\n X_correlated,\n\n data,\n\n method,\n\n segmentation_type,\n\n kwargs,\n\n expected,\n\n):\n\n if data == \"iris\":\n\n X = iris_dataset\n\n elif data == \"X_test\":\n\n X = X_test\n\n else:\n\n X = X_correlated\n\n\n\n result = changeforest(X, method, segmentation_type, Control(**kwargs))\n\n np.testing.assert_array_equal(result.split_points(), expected)\n\n\n\n\n\[email protected](\n\n \"segmentation_type, kwargs, expected_number_of_segments\",\n\n [\n\n # seeded_segments_alpha\n\n (\n\n \"sbs\",\n\n {\n\n \"minimal_relative_segment_length\": 0.05,\n\n \"seeded_segments_alpha\": 1 / np.sqrt(2),\n\n },\n\n 44,\n\n ),\n\n (\n\n \"sbs\",\n\n {\"minimal_relative_segment_length\": 0.05, \"seeded_segments_alpha\": 0.5},\n\n 25,\n\n ),\n\n (\"bs\", {}, 0),\n\n # number_of_wild_segments\n\n (\"wbs\", {\"number_of_wild_segments\": 10}, 10),\n\n (\"wbs\", {\"number_of_wild_segments\": 25}, 25),\n\n ],\n\n)\n\ndef test_control_segmentation_parameters(\n\n iris_dataset, segmentation_type, kwargs, expected_number_of_segments\n\n):\n\n result = changeforest(\n\n iris_dataset, \"change_in_mean\", segmentation_type, Control(**kwargs)\n\n )\n\n # For each split, add evaluation on left / right segment to segments.\n\n expected_number_of_segments = (\n\n expected_number_of_segments + 2 * len(result.split_points()) + 1\n\n )\n\n assert len(result.segments) == expected_number_of_segments\n\n\n\n\n\[email protected](\n\n \"key, default_value, another_value\",\n\n [\n\n (\"random_forest_n_estimators\", 100, 11),\n\n (\"minimal_relative_segment_length\", 0.01, 0.05),\n\n (\"seed\", 0, 1),\n\n (\"random_forest_max_features\", \"default\", 1),\n\n (\"random_forest_max_depth\", 8, None),\n\n ],\n\n)\n\ndef test_control_defaults(iris_dataset, key, default_value, another_value):\n\n result = changeforest(iris_dataset, \"random_forest\", \"bs\", Control())\n\n default_result = changeforest(\n\n iris_dataset, \"random_forest\", \"bs\", Control(**{key: default_value})\n\n )\n\n another_result = changeforest(\n\n iris_dataset, \"random_forest\", \"bs\", Control(**{key: another_value})\n\n )\n\n\n\n assert str(result) == str(default_result)\n\n assert str(result) != str(another_result)\n", "file_path": "changeforest-py/tests/test_control.py", "rank": 41, "score": 23578.242357591786 }, { "content": "def _to_int(value):\n\n if value is None:\n\n return None\n\n elif isinstance(value, str):\n\n return value\n\n else:\n", "file_path": "changeforest-py/changeforest/control.py", "rank": 42, "score": 23578.242357591786 }, { "content": " }\n\n}\n\n\n\n/// Classifier that predicts uniformly distributed values.\n\npub struct RandomClassifier<'a> {\n\n pub n: usize,\n\n pub control: &'a Control,\n\n pub seed: u64,\n\n}\n\n\n\nimpl<'a> Classifier for RandomClassifier<'a> {\n\n fn n(&self) -> usize {\n\n self.n\n\n }\n\n\n\n fn predict(&self, start: usize, stop: usize, guess: usize) -> Array1<f64> {\n\n let mut rng = StdRng::seed_from_u64(self.seed);\n\n let mut predictions = Array1::zeros(stop - start);\n\n let left = Array1::random_using(\n\n guess - start,\n", "file_path": "src/testing.rs", "rank": 43, "score": 15.069749576298422 }, { "content": "\n\n if let Some(right_boxed) = &self.right {\n\n split_points.append(&mut right_boxed.split_points());\n\n }\n\n\n\n split_points\n\n }\n\n\n\n pub fn with_segments(mut self, segmentation: Segmentation) -> Self {\n\n self.segments = Some(segmentation.segments);\n\n self\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::control::Control;\n\n use super::*;\n\n use crate::optimizer::GridSearch;\n\n use crate::segmentation::{Segmentation, SegmentationType};\n", "file_path": "src/binary_segmentation.rs", "rank": 44, "score": 11.813001132941928 }, { "content": "// Allow capital X for arrays.\n\n#![allow(non_snake_case)]\n\n#![allow(clippy::module_inception)]\n\n// BS, SBS and WBS\n\n#![allow(clippy::upper_case_acronyms)]\n\n\n\nmod binary_segmentation;\n\npub mod classifier;\n\nmod control;\n\nmod fmt;\n\npub mod gain;\n\nmod model_selection_result;\n\npub mod optimizer;\n\nmod segmentation;\n\n\n\npub use binary_segmentation::{BinarySegmentationResult, BinarySegmentationTree};\n\npub use classifier::Classifier;\n\npub use control::Control;\n\npub use gain::{ClassifierGain, Gain};\n\npub use model_selection_result::ModelSelectionResult;\n\npub use optimizer::Optimizer;\n\npub use segmentation::{Segmentation, SegmentationType};\n\npub mod utils;\n\npub mod wrapper;\n\n\n\n#[cfg(test)]\n\nmod testing;\n", "file_path": "src/lib.rs", "rank": 45, "score": 10.402873127101776 }, { "content": "use crate::gain::GainResult;\n\nuse crate::optimizer::OptimizerResult;\n\nuse crate::{Control, Gain, ModelSelectionResult, Optimizer};\n\n\n\npub struct GridSearch<T: Gain> {\n\n pub gain: T,\n\n}\n\n\n\nimpl<T> Optimizer for GridSearch<T>\n\nwhere\n\n T: Gain,\n\n{\n\n fn n(&self) -> usize {\n\n self.gain.n()\n\n }\n\n\n\n fn control(&self) -> &Control {\n\n self.gain.control()\n\n }\n\n\n", "file_path": "src/optimizer/grid_search.rs", "rank": 46, "score": 10.190756390789215 }, { "content": "use crate::{Classifier, Control};\n\nuse biosphere::RandomForest as BioForest;\n\nuse ndarray::{s, Array1, ArrayView2};\n\n\n\npub struct RandomForest<'a, 'b> {\n\n X: &'a ArrayView2<'b, f64>,\n\n control: &'a Control,\n\n}\n\n\n\nimpl<'a, 'b> RandomForest<'a, 'b> {\n\n pub fn new(X: &'a ArrayView2<'b, f64>, control: &'a Control) -> RandomForest<'a, 'b> {\n\n RandomForest { X, control }\n\n }\n\n}\n\n\n\nimpl<'a, 'b> Classifier for RandomForest<'a, 'b> {\n\n fn n(&self) -> usize {\n\n self.X.nrows()\n\n }\n\n\n", "file_path": "src/classifier/random_forest.rs", "rank": 47, "score": 10.09374940508097 }, { "content": "use crate::control::Control;\n\nuse crate::gain::Gain;\n\nuse crate::optimizer::OptimizerResult;\n\nuse crate::ModelSelectionResult;\n\nuse std::cell::{Ref, RefCell};\n\n\n\npub struct ChangeInMean<'a, 'b> {\n\n X: &'a ndarray::ArrayView2<'b, f64>,\n\n X_cumsum: RefCell<Option<ndarray::Array2<f64>>>,\n\n control: &'a Control,\n\n}\n\n\n\nimpl<'a, 'b> ChangeInMean<'a, 'b> {\n\n pub fn new(X: &'a ndarray::ArrayView2<'b, f64>, control: &'a Control) -> ChangeInMean<'a, 'b> {\n\n ChangeInMean {\n\n X,\n\n X_cumsum: RefCell::new(Option::None),\n\n control,\n\n }\n\n }\n", "file_path": "src/gain/change_in_mean.rs", "rank": 48, "score": 9.932599158296089 }, { "content": "use crate::classifier::Classifier;\n\nuse crate::gain::{gain_from_likelihoods, ApproxGain, ApproxGainResult, Gain};\n\nuse crate::optimizer::OptimizerResult;\n\nuse crate::{Control, ModelSelectionResult, Optimizer};\n\nuse ndarray::{s, stack, Array, Array1, Array2, ArrayView2, Axis};\n\nuse ndarray_rand::rand_distr::{Normal, Uniform};\n\nuse ndarray_rand::RandomExt;\n\nuse rand::rngs::StdRng;\n\nuse rand::SeedableRng;\n\n\n\npub struct ChangeInMean<'a> {\n\n X: &'a ndarray::ArrayView2<'a, f64>,\n\n control: &'a Control,\n\n}\n\n\n\nimpl<'a> ChangeInMean<'a> {\n\n pub fn new(X: &'a ArrayView2<'a, f64>, control: &'a Control) -> ChangeInMean<'a> {\n\n ChangeInMean { X, control }\n\n }\n\n}\n", "file_path": "src/testing.rs", "rank": 49, "score": 9.751232261275149 }, { "content": "use crate::{Classifier, Control};\n\nuse ndarray::{s, Array1, Array2, ArrayView2, Axis};\n\nuse std::cell::{Ref, RefCell};\n\n\n\n#[allow(non_camel_case_types)]\n\npub struct kNN<'a, 'b> {\n\n X: &'a ArrayView2<'b, f64>,\n\n ordering: RefCell<Option<Array2<usize>>>,\n\n control: &'a Control,\n\n}\n\n\n\nimpl<'a, 'b> kNN<'a, 'b> {\n\n pub fn new(X: &'a ArrayView2<'b, f64>, control: &'a Control) -> kNN<'a, 'b> {\n\n kNN {\n\n X,\n\n ordering: RefCell::new(Option::None),\n\n control,\n\n }\n\n }\n\n\n", "file_path": "src/classifier/knn.rs", "rank": 50, "score": 9.742462688213834 }, { "content": " predictions\n\n .slice_mut(s![(split - start)..])\n\n .map_inplace(|x| {\n\n if x.is_nan() {\n\n *x = (stop - split - 1) as f64 / (stop - start - 1) as f64\n\n }\n\n });\n\n predictions\n\n }\n\n\n\n fn control(&self) -> &Control {\n\n self.control\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::gain::ClassifierGain;\n\n use crate::optimizer::{Optimizer, TwoStepSearch};\n", "file_path": "src/classifier/random_forest.rs", "rank": 51, "score": 9.353791871302798 }, { "content": "use crate::gain::{ApproxGain, GainResult};\n\nuse crate::optimizer::OptimizerResult;\n\nuse crate::{Control, Gain, ModelSelectionResult, Optimizer};\n\n\n\npub struct TwoStepSearch<T: Gain> {\n\n pub gain: T,\n\n}\n\n\n\nimpl<T> TwoStepSearch<T>\n\nwhere\n\n T: Gain + ApproxGain,\n\n{\n\n fn _single_find_best_split(\n\n &self,\n\n start: usize,\n\n stop: usize,\n\n guess: usize,\n\n split_candidates: &[usize],\n\n ) -> GainResult {\n\n let mut approx_gain_result = self.gain.gain_approx(start, stop, guess, split_candidates);\n", "file_path": "src/optimizer/two_step_search.rs", "rank": 52, "score": 9.31488866948443 }, { "content": "use crate::control::Control;\n\nuse crate::gain::{ApproxGain, ApproxGainResult, Gain, GainResult};\n\nuse crate::optimizer::OptimizerResult;\n\nuse crate::Classifier;\n\nuse crate::ModelSelectionResult;\n\nuse ndarray::{s, Array1, Array2, Axis};\n\nuse rand::{rngs::StdRng, SeedableRng};\n\n\n\npub struct ClassifierGain<T: Classifier> {\n\n pub classifier: T,\n\n}\n\n\n\nimpl<T> Gain for ClassifierGain<T>\n\nwhere\n\n T: Classifier,\n\n{\n\n /// Total number of observations.\n\n fn n(&self) -> usize {\n\n self.classifier.n()\n\n }\n", "file_path": "src/gain/classifier_gain.rs", "rank": 53, "score": 9.238378938440803 }, { "content": "mod control;\n\nmod result;\n\n\n\nuse crate::control::control_from_pyobj;\n\nuse crate::result::{MyBinarySegmentationResult, MyOptimizerResult};\n\nuse changeforest::wrapper;\n\nuse numpy::PyReadonlyArray2;\n\nuse pyo3::prelude::{pymodule, PyModule, PyResult, Python};\n\nuse pyo3::PyObject;\n\n\n\n// Note: This has to match the lib.name in Cargo.toml.\n\n#[allow(non_snake_case)] // Allow capital X for arrays.\n\n#[pymodule]\n", "file_path": "changeforest-py/src/lib.rs", "rank": 54, "score": 9.147281755681393 }, { "content": " pub control: &'a Control,\n\n}\n\n\n\nimpl<'a> Classifier for TrivialClassifier<'a> {\n\n fn n(&self) -> usize {\n\n self.n\n\n }\n\n\n\n fn predict(&self, start: usize, stop: usize, split: usize) -> Array1<f64> {\n\n let mut X = Array::zeros(stop - start);\n\n X.slice_mut(s![0..(split - start)])\n\n .fill((stop - split) as f64 / (stop - start - 1) as f64);\n\n X.slice_mut(s![(split - start)..])\n\n .fill((stop - split - 1) as f64 / (stop - start - 1) as f64);\n\n X[[0]] = 0.;\n\n X\n\n }\n\n\n\n fn control(&self) -> &Control {\n\n self.control\n", "file_path": "src/testing.rs", "rank": 55, "score": 9.033321753472686 }, { "content": " Normal::new((stop - guess) as f64 / (stop - start - 1) as f64, 0.1).unwrap(),\n\n &mut rng,\n\n );\n\n let right = Array1::random_using(\n\n stop - guess,\n\n Normal::new((stop - guess) as f64 / (stop - start - 1) as f64, 0.1).unwrap(),\n\n &mut rng,\n\n );\n\n predictions.slice_mut(s![..(guess - start)]).assign(&left);\n\n predictions.slice_mut(s![(guess - start)..]).assign(&right);\n\n predictions.mapv_inplace(|x| f64::min(f64::max(0., x), 1.));\n\n predictions\n\n }\n\n\n\n fn control(&self) -> &Control {\n\n self.control\n\n }\n\n}\n\n\n", "file_path": "src/testing.rs", "rank": 56, "score": 8.770149508312217 }, { "content": " // n_segments, e.g. for n = 20'000, alpha_k = 1/sqrt(2), k=6\n\n stop = (start + (segment_length as f32).ceil() as usize).min(optimizer.n());\n\n segments.push(optimizer.find_best_split(start, stop).unwrap());\n\n }\n\n }\n\n }\n\n SegmentationType::WBS => {\n\n let mut rng = StdRng::seed_from_u64(optimizer.control().seed);\n\n let dist = Uniform::from(0..(optimizer.n() + 1));\n\n\n\n let mut start: usize;\n\n let mut stop: usize;\n\n\n\n while segments.len() < optimizer.control().number_of_wild_segments {\n\n start = dist.sample(&mut rng);\n\n stop = dist.sample(&mut rng);\n\n if start < stop {\n\n if let Ok(optimizer_result) = optimizer.find_best_split(start, stop) {\n\n segments.push(optimizer_result)\n\n }\n", "file_path": "src/segmentation.rs", "rank": 57, "score": 8.674794729207756 }, { "content": " ///\n\n /// For each permutation, we shuffle the predictions (and thus the likelihoods) of\n\n /// each of the three initial classifier fits (using the same permutation), and\n\n /// compute the maximum of the three resulting maximal gains. We count the number\n\n /// of permutations where the resulting maximal gain was larger than the observed\n\n /// maximal gain to compute a p-value.\n\n fn model_selection(&self, optimizer_result: &OptimizerResult) -> ModelSelectionResult {\n\n let mut rng = StdRng::seed_from_u64(self.control().seed);\n\n\n\n let mut max_gain = -f64::INFINITY;\n\n let mut deltas: Vec<Array1<f64>> = Vec::with_capacity(3);\n\n let mut likelihood_0: Vec<f64> = Vec::with_capacity(3);\n\n\n\n for gain_result in optimizer_result.gain_results.split_last().unwrap().1.iter() {\n\n let result = match gain_result {\n\n GainResult::ApproxGainResult(result) => result,\n\n _ => panic!(\"Not an ApproxGainResult\"),\n\n };\n\n\n\n deltas\n", "file_path": "src/gain/classifier_gain.rs", "rank": 58, "score": 8.620302226877818 }, { "content": " use crate::testing;\n\n use crate::Control;\n\n use csv::ReaderBuilder;\n\n use ndarray::Array2;\n\n use ndarray_csv::Array2Reader;\n\n use rstest::*;\n\n use std::fs::File;\n\n\n\n #[rstest]\n\n #[case(0, 50, 100)]\n\n #[case(0, 100, 150)]\n\n #[case(50, 100, 150)]\n\n #[case(0, 50, 150)]\n\n fn test_predictions(#[case] start: usize, #[case] split: usize, #[case] stop: usize) {\n\n let file = File::open(\"testdata/iris.csv\").unwrap();\n\n let mut reader = ReaderBuilder::new().has_headers(true).from_reader(file);\n\n let X: Array2<f64> = reader.deserialize_array2((150, 4)).unwrap();\n\n let X_view = X.view();\n\n\n\n let control = Control::default();\n", "file_path": "src/classifier/random_forest.rs", "rank": 59, "score": 8.512101310905784 }, { "content": " );\n\n // See Definition 1 of https://arxiv.org/pdf/2002.06633.pdf\n\n let n_layers = ((minimal_segment_length / optimizer.n() as f64).ln()\n\n / optimizer.control().seeded_segments_alpha.ln())\n\n .ceil();\n\n let mut segment_length: f64;\n\n let mut alpha_k: f64;\n\n let mut n_segments: usize;\n\n let mut segment_step: f64;\n\n let mut start: usize;\n\n let mut stop: usize;\n\n for k in 1..(n_layers as i32) {\n\n alpha_k = optimizer.control().seeded_segments_alpha.powi(k); // (1/alpha)^(k-1)\n\n segment_length = (optimizer.n() as f64) * alpha_k; // l_k\n\n n_segments = 2 * ((1. / alpha_k) as f32).ceil() as usize - 1; // n_k\n\n segment_step =\n\n (optimizer.n() as f64 - segment_length) / (n_segments - 1) as f64; // s_k\n\n for segment_id in 0..(n_segments as usize) {\n\n start = ((segment_id as f64 * segment_step) as f32) as usize;\n\n // start + segment_length > n through floating point errors in\n", "file_path": "src/segmentation.rs", "rank": 60, "score": 8.384780020677814 }, { "content": "\n\n#[pyproto]\n\n// https://stackoverflow.com/questions/62666926/str-function-of-class-ported-from-\\\n\n// rust-to-python-using-pyo3-doesnt-get-used\n\n// https://pyo3.rs/v0.9.2/python_from_rust.html\n\nimpl pyo3::class::basic::PyObjectProtocol for MyOptimizerResult {\n\n fn __repr__(&self) -> PyResult<String> {\n\n Ok(format!(\"{}\", self.result))\n\n }\n\n}\n\n\n\n#[pyclass(name = \"BinarySegmentationResult\")]\n\n#[derive(Clone, Debug)]\n\npub struct MyBinarySegmentationResult {\n\n pub result: BinarySegmentationResult,\n\n}\n\n\n\n#[pymethods]\n\nimpl MyBinarySegmentationResult {\n\n #[getter]\n", "file_path": "changeforest-py/src/result.rs", "rank": 61, "score": 8.368743509984423 }, { "content": "use crate::gain::GainResult;\n\nuse std::fmt;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct OptimizerResult {\n\n pub start: usize,\n\n pub stop: usize,\n\n pub best_split: usize,\n\n pub max_gain: f64,\n\n pub gain_results: Vec<GainResult>,\n\n}\n\n\n\n// https://doc.rust-lang.org/rust-by-example/hello/print/print_display.html\n\nimpl fmt::Display for OptimizerResult {\n\n // This trait requires `fmt` with this exact signature.\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"OptimizerResult(start={}, stop={}, best_split={}, max_gain={})\",\n\n self.start, self.stop, self.best_split, self.max_gain\n\n )\n\n }\n\n}\n", "file_path": "src/optimizer/optimizer_result.rs", "rank": 62, "score": 7.923088316594685 }, { "content": "mod classifier;\n\nmod knn;\n\nmod random_forest;\n\n\n\npub use classifier::Classifier;\n\npub use knn::kNN;\n\npub use random_forest::RandomForest;\n", "file_path": "src/classifier/mod.rs", "rank": 63, "score": 7.789362933386423 }, { "content": "mod grid_search;\n\nmod optimizer;\n\n\n\nmod optimizer_result;\n\nmod two_step_search;\n\n\n\npub use grid_search::GridSearch;\n\npub use optimizer::Optimizer;\n\npub use optimizer_result::OptimizerResult;\n\npub use two_step_search::TwoStepSearch;\n", "file_path": "src/optimizer/mod.rs", "rank": 64, "score": 7.687377697351472 }, { "content": " for l in left.iter_mut().skip(1) {\n\n l[0] = format!(\" ¦ {}\", l[0]);\n\n }\n\n for r in right.iter_mut().skip(1) {\n\n r[0] = format!(\" {}\", r[0]);\n\n }\n\n\n\n output.append(&mut left);\n\n output.append(&mut right);\n\n }\n\n\n\n output\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::optimizer::OptimizerResult;\n\n use crate::ModelSelectionResult;\n\n\n", "file_path": "src/fmt.rs", "rank": 65, "score": 7.487913687379539 }, { "content": "// Allow capital X for arrays.\n\n#![allow(non_snake_case)]\n\n\n\nmod control;\n\nmod result;\n\n\n\nuse crate::control::MyControl;\n\nuse crate::result::MyBinarySegmentationResult;\n\nuse changeforest::wrapper;\n\nuse extendr_api::prelude::*;\n\nuse ndarray;\n\n\n\n#[extendr]\n", "file_path": "changeforest-r/src/rust/src/lib.rs", "rank": 66, "score": 7.483184671168092 }, { "content": " use crate::testing;\n\n\n\n #[test]\n\n fn test_binary_segmentation_change_in_mean() {\n\n let X = testing::array();\n\n let X_view = X.view();\n\n\n\n assert_eq!(X_view.shape(), &[100, 5]);\n\n\n\n let control = Control::default();\n\n let gain = testing::ChangeInMean::new(&X_view, &control);\n\n let optimizer = GridSearch { gain };\n\n let mut segmentation = Segmentation::new(SegmentationType::BS, &optimizer);\n\n let mut binary_segmentation = BinarySegmentationTree::new(&X_view);\n\n\n\n binary_segmentation.grow(&mut segmentation);\n\n\n\n let optimizer_result = binary_segmentation.optimizer_result.as_ref().unwrap();\n\n assert_eq!(optimizer_result.best_split, 80);\n\n assert_eq!(optimizer_result.start, 0);\n", "file_path": "src/binary_segmentation.rs", "rank": 67, "score": 7.477292198494801 }, { "content": "mod change_in_mean;\n\nmod classifier_gain;\n\nmod gain;\n\nmod gain_result;\n\n\n\npub use change_in_mean::ChangeInMean;\n\npub use classifier_gain::{gain_from_likelihoods, ClassifierGain};\n\npub use gain::{ApproxGain, Gain};\n\npub use gain_result::{ApproxGainResult, FullGainResult, GainResult};\n", "file_path": "src/gain/mod.rs", "rank": 68, "score": 7.4722370769660005 }, { "content": "use std::fmt;\n\n\n\n#[derive(Clone, Debug, Default)]\n\npub struct ModelSelectionResult {\n\n pub is_significant: bool,\n\n pub p_value: Option<f64>,\n\n}\n\n\n\n// https://doc.rust-lang.org/rust-by-example/hello/print/print_display.html\n\nimpl fmt::Display for ModelSelectionResult {\n\n // This trait requires `fmt` with this exact signature.\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(\n\n f,\n\n \"ModelSelectionResult(is_significant={}, p_value={:?})\",\n\n self.is_significant, self.p_value\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/model_selection_result.rs", "rank": 69, "score": 7.346258144088342 }, { "content": "use crate::control::Control;\n\nuse crate::gain::{ApproxGainResult, FullGainResult};\n\nuse crate::optimizer::OptimizerResult;\n\nuse crate::ModelSelectionResult;\n\n\n", "file_path": "src/gain/gain.rs", "rank": 70, "score": 7.309979012430295 }, { "content": "use crate::optimizer::OptimizerResult;\n\nuse crate::ModelSelectionResult;\n\nuse crate::Optimizer;\n\nuse rand::{\n\n distributions::{Distribution, Uniform},\n\n rngs::StdRng,\n\n SeedableRng,\n\n};\n\npub enum SegmentationType {\n\n BS,\n\n WBS,\n\n SBS,\n\n}\n\n\n\npub struct Segmentation<'a> {\n\n pub segments: Vec<OptimizerResult>,\n\n optimizer: &'a dyn Optimizer,\n\n}\n\n\n\nimpl<'a> Segmentation<'a> {\n", "file_path": "src/segmentation.rs", "rank": 71, "score": 7.267989797522991 }, { "content": " self.control\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use crate::testing;\n\n use assert_approx_eq::*;\n\n use rstest::*;\n\n\n\n #[test]\n\n fn test_X_cumsum() {\n\n let X = ndarray::array![[1., 0.], [1., 0.], [1., 1.], [1., 1.]];\n\n let X_view = X.view();\n\n let control = Control::default();\n\n\n\n let change_in_mean = ChangeInMean::new(&X_view, &control);\n\n let X_cumsum = change_in_mean.calculate_cumsum();\n", "file_path": "src/gain/change_in_mean.rs", "rank": 72, "score": 7.191054031156611 }, { "content": " (0, 71), (14, 85), (29, 100), (0, 50), (25, 75),\n\n (50, 100), (0, 36), (16, 52), (32, 68), (48, 84),\n\n (64, 100), (0, 25), (12, 37), (25, 50), (37, 62),\n\n (50, 75), (62, 87), (75, 100)\n\n ])]\n\n #[case(0.12, 0.5, vec![\n\n (0, 50), (25, 75), (50, 100),\n\n (0, 25), (12, 37), (25, 50), (37, 62),\n\n (50, 75), (62, 87), (75, 100)\n\n ])]\n\n fn test_sbs_segments(\n\n #[case] minimal_relative_segment_length: f64,\n\n #[case] seeded_segments_alpha: f64,\n\n #[case] expected: Vec<(usize, usize)>,\n\n ) {\n\n let control = Control::default()\n\n .with_minimal_relative_segment_length(minimal_relative_segment_length)\n\n .with_seeded_segments_alpha(seeded_segments_alpha);\n\n\n\n let optimizer = testing::TrivialOptimizer { control: &control };\n", "file_path": "src/segmentation.rs", "rank": 73, "score": 7.1788457080766275 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use crate::testing::TrivialClassifier;\n\n use assert_approx_eq::*;\n\n\n\n #[test]\n\n fn test_single_likelihood() {\n\n let control = Control::default();\n\n let classifier = TrivialClassifier {\n\n n: 10,\n\n control: &control,\n\n };\n\n let predictions = classifier.predict(0, 10, 5);\n\n assert_approx_eq!(\n\n classifier.single_likelihood(&predictions, 0, 10, 5),\n\n 0.809552182\n\n );\n\n }\n", "file_path": "src/classifier/classifier.rs", "rank": 74, "score": 7.149224338816702 }, { "content": "use ndarray::{Array1, Array2};\n\nuse std::clone::Clone;\n\nuse std::fmt;\n\n\n\n#[derive(Debug, Clone)]\n\n/// Container to hold results of the `gain_approx` method of the `GainApprox` trait.\n\npub struct ApproxGainResult {\n\n pub start: usize,\n\n pub stop: usize,\n\n pub guess: usize,\n\n pub gain: Array1<f64>,\n\n pub max_gain: Option<f64>,\n\n pub best_split: Option<usize>,\n\n pub likelihoods: Array2<f64>,\n\n pub predictions: Array1<f64>,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\n/// Container to hold results of the `gain_full` method of the `Gain` trait.\n\npub struct FullGainResult {\n", "file_path": "src/gain/gain_result.rs", "rank": 75, "score": 7.1404341486926555 }, { "content": "## 0.5.0 - (2022-03-15)\n\n\n\n**Breaking changes:**\n\n\n\n- The parameters `random_forest_mtry` and `random_forest_n_trees` of `Control` have been renamed to `random_forest_max_features` and `random_forest_n_estimators`.\n\n- The default value for `random_forest_max_features` now is `floor(sqrt(d))`.\n\n\n\n**New features:**\n\n\n\n- The parameter `random_forest_max_features` now can be supplied with a fraction `0 < f < 1`, an integer `i>=1`, `None` (Python, Rust) / `NULL` (R) and `\"sqrt\"`. Then, for each split, repsectively `floor(f d)`, `i`, `d` or `floor(sqrt(d))` features are considered.\n\n\n\n**Other changes:**\n\n\n\n- Bump `biosphere` dependency to 0.3.0\n\n\n\n## 0.4.4 - (2022-02-22)\n\n\n\n**Other changes:**\n\n\n\n- Bump `biosphere` dependency to 0.2.2.\n\n\n\n## 0.4.3 - (2021-01-29)\n\n\n\n**Other changes:**\n\n\n\n- The default value for `Control.minimal_gain_to_split` is now `log(n_samples) * n_features / n_samples`,\n\nmotivated by the Bayesian information criterion (BIC). \n\n\n\n## 0.4.2 - (2021-01-21)\n\n\n\n**Other changes:**\n\n\n\n- The R-package now makes use of the latest version of `libR-sys`, enabling compilation for Apple silicon on `conda-forge` (#86).\n\n\n\n**Bug fixes:**\n\n\n\n- Fixed a bug where passing `Control()` to `changeforest` in the Python package overwrote the default value for `random_forest_max_depth` to `None`. Default values for `Control` in the python package are now `\"default\"` (#87).\n\n\n\n## 0.4.1 - (2021-01-13)\n\n\n\n**Bug fixes:**\n\n\n\n- Upgrade `biosphere` to `0.2.1` fixing a bug in `RandomForest` (#84).\n\n\n\n**Other changes:**\n\n\n\n- New parameter `model_selection_n_permutations` (#85).\n\n\n\n## 0.4.0 - (2021-01-11)\n\n\n\n**New features:**\n\n\n\n- `changeforest` now uses random forests from [`biosphere`](https://github.com/mlondschien/biosphere).\n\n This should be faster than `smartcore` used previously and supports parallelization (#82).\n\n\n", "file_path": "CHANGELOG.md", "rank": 76, "score": 7.093237046701566 }, { "content": " .take(k_usize) // Only look at first k neighbors\n\n .filter(|j| **j >= split)\n\n .count() as f64\n\n / k; // Proportion of neighbors from after split.\n\n }\n\n\n\n predictions\n\n }\n\n\n\n fn control(&self) -> &Control {\n\n self.control\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::gain::{ApproxGain, ClassifierGain, Gain};\n\n use crate::optimizer::{Optimizer, TwoStepSearch};\n\n use crate::testing;\n", "file_path": "src/classifier/knn.rs", "rank": 77, "score": 7.062862305517205 }, { "content": "use crate::classifier::{kNN, RandomForest};\n\nuse crate::control::Control;\n\nuse crate::gain::{ChangeInMean, ClassifierGain};\n\nuse crate::optimizer::{GridSearch, TwoStepSearch};\n\nuse crate::segmentation::{Segmentation, SegmentationType};\n\nuse crate::{BinarySegmentationResult, BinarySegmentationTree};\n\nuse ndarray;\n\n\n", "file_path": "src/wrapper.rs", "rank": 78, "score": 7.02144511482379 }, { "content": "## 0.3.0 - (2021-12-15)\n\n\n\n**New features:**\n\n\n\n- Implemented trait `Display` for `BinarySegmentationResult`. In Python `str(result)` now prints a pretty display (#77).\n\n\n\n**Other changes:**\n\n\n\n- The `TwoStepSearch` algorithm now only uses valid guesses from `split_candidates` (#76).\n\n\n\n**Bug fixes:**\n\n\n\n- (R only) The R6 class `Control` now gets correctly exported (#79).\n\n\n\n## 0.2.1 - (2021-12-13)\n\n\n\n**Bug fixes:**\n\n\n\n- (Python only) Parameters will now be correctly passed to `changeforest` via `Control` even\n\n if they have an incorrect data type (#67).\n\n- Fixed a bug where SBS would panic in cases with very small minimal segments lengths\n\n due to rounding (#70).\n\n- Fixed a bug in model selection that resulted in a higher type I error (#71).\n\n\n\n\n\n## 0.2.0 - (2021-12-10)\n\n\n\n**New features:**\n\n\n\n- The `TwoStepSearch` now uses three initial guesses, the 0.25, 0.5 and 0.75 quantiles\n\n of the segment, for the first step. The the best split corresponding to the highest\n\n maximal gain from the three guesses is used in the second step. The permutation test\n\n used for model selection has also been adjusted to be consistent (#65).\n\n\n\n This increases estimation performance for classifier-based methods, especially if used\n\n with standard binary segmentation, i.e. for `changeforst_bs` and `changeforest_knn`.\n\n\n\n## 0.1.1 - (2021-11-25)\n\n\n\n**Other changes:**\n\n\n\n- Added license file for compatability with conda-forge.\n", "file_path": "CHANGELOG.md", "rank": 79, "score": 7.003735987694611 }, { "content": "use crate::utils::log_eta;\n\nuse crate::Control;\n\nuse ndarray::{s, stack, Array1, Array2, Axis};\n\n\n", "file_path": "src/classifier/classifier.rs", "rank": 80, "score": 6.882507569832597 }, { "content": " fn predict(&self, start: usize, stop: usize, split: usize) -> Array1<f64> {\n\n let mut y = Array1::<f64>::zeros(stop - start);\n\n y.slice_mut(s![(split - start)..]).fill(1.);\n\n let y_slice = y.slice(s![..]);\n\n\n\n let X_slice = self.X.slice(s![start..stop, ..]);\n\n let parameters = self.control().random_forest_parameters.clone();\n\n\n\n let mut forest = BioForest::new(parameters);\n\n let mut predictions = forest.fit_predict_oob(&X_slice, &y_slice);\n\n\n\n // For a very small n_trees, the predictions may be NaN. In this case use the\n\n // prior. Note that we need to adjust by -1 because the predictions are oob.\n\n predictions\n\n .slice_mut(s![0..(split - start)])\n\n .map_inplace(|x| {\n\n if x.is_nan() {\n\n *x = (stop - split) as f64 / (stop - start - 1) as f64\n\n }\n\n });\n", "file_path": "src/classifier/random_forest.rs", "rank": 81, "score": 6.838899382217461 }, { "content": "use crate::optimizer::OptimizerResult;\n\nuse crate::{Control, ModelSelectionResult};\n\n\n", "file_path": "src/optimizer/optimizer.rs", "rank": 82, "score": 6.787024163427279 }, { "content": "\n\n likelihoods\n\n .slice_mut(s![0, ..(split - start)])\n\n .mapv_inplace(|x| log_eta((1. - x) * prior_00));\n\n likelihoods\n\n .slice_mut(s![0, (split - start)..])\n\n .mapv_inplace(|x| log_eta((1. - x) * prior_01));\n\n likelihoods\n\n .slice_mut(s![1, ..(split - start)])\n\n .mapv_inplace(|x| log_eta(x * prior_10));\n\n likelihoods\n\n .slice_mut(s![1, (split - start)..])\n\n .mapv_inplace(|x| log_eta(x * prior_11));\n\n\n\n likelihoods\n\n }\n\n\n\n fn control(&self) -> &Control;\n\n}\n\n\n", "file_path": "src/classifier/classifier.rs", "rank": 83, "score": 6.667764559633482 }, { "content": " use assert_approx_eq::*;\n\n use ndarray::arr1;\n\n use rstest::*;\n\n\n\n #[test]\n\n fn test_X_ordering() {\n\n let X = ndarray::array![[1.], [1.5], [3.], [-0.5]];\n\n let X_view = X.view();\n\n let control = Control::default();\n\n\n\n let knn = kNN::new(&X_view, &control);\n\n let ordering = knn.calculate_ordering();\n\n let expected = ndarray::array![[0, 1, 3, 2], [1, 0, 2, 3], [2, 1, 0, 3], [3, 0, 1, 2]];\n\n assert_eq!(ordering, expected)\n\n }\n\n\n\n #[rstest]\n\n #[case(0, 6, 2, arr1(&[0.5, 0.5, 0., 1., 1., 0.5]))]\n\n #[case(0, 6, 3, arr1(&[0., 0., 0., 1., 1., 0.5]))]\n\n #[case(1, 6, 2, arr1(&[1., 0.5, 1., 1., 0.5]))]\n", "file_path": "src/classifier/knn.rs", "rank": 84, "score": 6.576097964890429 }, { "content": "// Wrap GainResult, OptimizerResult and BinarySegmentationResult.\n\n// See https://github.com/PyO3/pyo3/issues/287.\n\n\n\nuse changeforest::gain::GainResult;\n\nuse changeforest::optimizer::OptimizerResult;\n\nuse changeforest::{BinarySegmentationResult, ModelSelectionResult};\n\nuse numpy::{PyArray1, PyArray2, ToPyArray};\n\nuse pyo3::prelude::*;\n\n\n\n#[pyclass(name = \"ModelSelectionResult\")]\n\n#[derive(Clone, Debug)]\n\npub struct MyModelSelectionResult {\n\n pub result: ModelSelectionResult,\n\n}\n\n\n\n#[pymethods]\n\nimpl MyModelSelectionResult {\n\n #[getter]\n\n pub fn is_significant(&self) -> bool {\n\n self.result.is_significant\n", "file_path": "changeforest-py/src/result.rs", "rank": 85, "score": 6.57279915272659 }, { "content": " best_split: (3 * start + stop) / 4,\n\n max_gain: ((stop - start) * (start + 10)) as f64,\n\n gain_results: vec![],\n\n })\n\n }\n\n\n\n fn model_selection(&self, optimizer_result: &OptimizerResult) -> ModelSelectionResult {\n\n ModelSelectionResult {\n\n is_significant: optimizer_result.stop <= 50,\n\n p_value: None,\n\n }\n\n }\n\n\n\n fn control(&self) -> &Control {\n\n self.control\n\n }\n\n}\n\n\n\npub struct TrivialClassifier<'a> {\n\n pub n: usize,\n", "file_path": "src/testing.rs", "rank": 86, "score": 6.506660270885885 }, { "content": "\n\n use super::*;\n\n use crate::testing;\n\n use assert_approx_eq::*;\n\n use rstest::*;\n\n\n\n #[rstest]\n\n #[case(0, 4, 4. * 0.25)]\n\n #[case(0, 2, 0.)]\n\n #[case(0, 3, 4. / 6.)]\n\n #[case(1, 4, 4. / 6.)]\n\n #[case(1, 3, 4. * 0.125)]\n\n #[case(3, 3, 0.)]\n\n fn test_change_in_mean_loss(#[case] start: usize, #[case] stop: usize, #[case] expected: f64) {\n\n let X = ndarray::array![[0., 0.], [0., 0.], [0., 1.], [0., 1.]];\n\n let X_view = X.view();\n\n\n\n let control = Control::default();\n\n assert_eq!(X.shape(), &[4, 2]);\n\n\n", "file_path": "src/gain/gain.rs", "rank": 87, "score": 6.493966944526141 }, { "content": "use crate::optimizer::OptimizerResult;\n\nuse crate::{ModelSelectionResult, Segmentation};\n\n\n\npub struct BinarySegmentationTree {\n\n pub start: usize,\n\n pub stop: usize,\n\n pub n: usize,\n\n pub model_selection_result: ModelSelectionResult,\n\n pub left: Option<Box<BinarySegmentationTree>>,\n\n pub right: Option<Box<BinarySegmentationTree>>,\n\n pub optimizer_result: Option<OptimizerResult>,\n\n}\n\n\n\nimpl BinarySegmentationTree {\n\n pub fn new(X: &ndarray::ArrayView2<'_, f64>) -> BinarySegmentationTree {\n\n BinarySegmentationTree {\n\n start: 0,\n\n stop: X.nrows(),\n\n n: X.nrows(),\n\n model_selection_result: ModelSelectionResult::default(),\n", "file_path": "src/binary_segmentation.rs", "rank": 88, "score": 6.4513012941045975 }, { "content": "\n\n#[pymethods]\n\nimpl MyGainResult {\n\n #[getter]\n\n pub fn start(&self) -> usize {\n\n self.result.start()\n\n }\n\n\n\n #[getter]\n\n pub fn stop(&self) -> usize {\n\n self.result.stop()\n\n }\n\n\n\n #[getter]\n\n pub fn gain<'py>(&self, py: Python<'py>) -> &'py PyArray1<f64> {\n\n self.result.gain().to_pyarray(py)\n\n }\n\n\n\n #[getter]\n\n pub fn guess(&self) -> Option<usize> {\n", "file_path": "changeforest-py/src/result.rs", "rank": 89, "score": 6.401846788804404 }, { "content": " max_gain: None,\n\n predictions,\n\n likelihoods,\n\n }\n\n }\n\n}\n\n\n\npub struct TrivialOptimizer<'a> {\n\n pub control: &'a Control,\n\n}\n\n\n\nimpl<'a> Optimizer for TrivialOptimizer<'a> {\n\n fn n(&self) -> usize {\n\n 100\n\n }\n\n\n\n fn find_best_split(&self, start: usize, stop: usize) -> Result<OptimizerResult, &'static str> {\n\n Ok(OptimizerResult {\n\n start,\n\n stop,\n", "file_path": "src/testing.rs", "rank": 90, "score": 6.363440826182046 }, { "content": "\n\n #[test]\n\n fn test_full_likelihood() {\n\n let control = Control::default();\n\n let classifier = TrivialClassifier {\n\n n: 10,\n\n control: &control,\n\n };\n\n let predictions = classifier.predict(0, 10, 5);\n\n let mut expected = Array2::<f64>::zeros((2, 10));\n\n expected[[0, 0]] = 0.8095521826214339; // TODO: Why not 6.0\n\n expected[[1, 0]] = -6.0;\n\n\n\n assert_eq!(classifier.full_likelihood(&predictions, 0, 10, 5), expected);\n\n }\n\n}\n", "file_path": "src/classifier/classifier.rs", "rank": 91, "score": 6.28808473306365 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n use crate::{testing, Control};\n\n use rstest::*;\n\n\n\n #[rstest]\n\n #[case(0.05, std::f64::consts::FRAC_1_SQRT_2, vec![\n\n (0, 71), (14, 85), (29, 100), (0, 50), (25, 75),\n\n (50, 100), (0, 36), (16, 52), (32, 68), (48, 84),\n\n (64, 100), (0, 25), (12, 37), (25, 50), (37, 62),\n\n (50, 75), (62, 87), (75, 100), (0, 18), (8, 26),\n\n (16, 34), (24, 42), (32, 50), (41, 59), (49, 67),\n\n (57, 75), (65, 83), (74, 92), (82, 100), (0, 13),\n\n (6, 19), (12, 25), (18, 31), (25, 38), (31, 44),\n\n (37, 50), (43, 56), (50, 63), (56, 69), (62, 75),\n\n (68, 81), (75, 88), (81, 94), (87, 100)\n\n ])]\n\n #[case(0.12, std::f64::consts::FRAC_1_SQRT_2, vec![\n", "file_path": "src/segmentation.rs", "rank": 92, "score": 6.281438056721662 }, { "content": " if self.model_selection_result.is_significant {\n\n let mut left = self.new_left(optimizer_result.best_split);\n\n left.grow(segmentation);\n\n self.left = Some(left);\n\n\n\n let mut right = self.new_right(optimizer_result.best_split);\n\n right.grow(segmentation);\n\n self.right = Some(right);\n\n }\n\n\n\n self.optimizer_result = Some(optimizer_result);\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\n/// Struct holding results from a BinarySegmentationTree after fitting.\n\npub struct BinarySegmentationResult {\n\n pub start: usize,\n\n pub stop: usize,\n", "file_path": "src/binary_segmentation.rs", "rank": 93, "score": 6.214197200911702 }, { "content": " start: split,\n\n stop: self.stop,\n\n n: self.n,\n\n model_selection_result: ModelSelectionResult::default(),\n\n left: None,\n\n right: None,\n\n optimizer_result: None,\n\n })\n\n }\n\n\n\n /// Grow a `BinarySegmentationTree`.\n\n ///\n\n /// Recursively split segments and add subsegments as children `left` and\n\n /// `right` until segments are smaller then the minimal segment length\n\n /// (`n * control.minimal_relative_segment_length`) or the `OptimizerResult` is no\n\n /// longer significant.\n\n pub fn grow(&mut self, segmentation: &mut Segmentation) {\n\n if let Ok(optimizer_result) = segmentation.find_best_split(self.start, self.stop) {\n\n self.model_selection_result = segmentation.model_selection(&optimizer_result);\n\n\n", "file_path": "src/binary_segmentation.rs", "rank": 94, "score": 6.199447396950127 }, { "content": "Change point estimates are marked in red.\n\n\n\nFor `method=\"random_forest\"` and `method=\"knn\"`, the `changeforest` algorithm uses a two-step approach to\n\nfind an optimizer of the gain. This fits a classifier for three split candidates\n\nat the segment's 1/4, 1/2 and 3/4 quantiles, computes approximate gain curves using\n\nthe resulting classifier log-likelihood ratios and selects the overall optimizer as a second guess.\n\nWe can investigate the gain curves from the optimizer using the `plot` method of `OptimizerResult`.\n\nThe initial guesses are marked in blue.\n\n\n\n```python\n\nIn [5]: result.optimizer_result.plot().show()\n\n```\n\n<p align=\"center\">\n\n <img src=\"docs/py_cic_rf_optimizer_result_plot.png\" />\n\n</p>\n\n \n\nOne can observe that the approximate gain curves are piecewise linear, with maxima\n\naround the true underlying change points.\n\n\n\nThe `BinarySegmentationResult` returned by `changeforest` is a tree-like object with attributes\n\n`start`, `stop`, `best_split`, `max_gain`, `p_value`, `is_significant`, `optimizer_result`, `model_selection_result`, `left`, `right` and `segments`. \n\nThese can be interesting to investigate the output of the algorithm further.\n\n\n\nThe `changeforest` algorithm can be tuned with hyperparameters. See\n\n[here](https://github.com/mlondschien/changeforest/blob/287ac0f10728518d6a00bf698a4d5834ae98715d/src/control.rs#L3-L30)\n\nfor their descriptions and default values. In Python, the parameters can\n\nbe specified with the [`Control` class](https://github.com/mlondschien/changeforest/blob/b33533fe0ddf64c1ea60d0d2203e55b117811667/changeforest-py/changeforest/control.py#L1-L26),\n\nwhich can be passed to `changeforest`. The following will build random forests with\n", "file_path": "README.md", "rank": 95, "score": 6.033646213258022 }, { "content": " assert_eq!(optimizer_result.stop, 100);\n\n }\n\n\n\n #[test]\n\n fn test_binary_segmentation_result() {\n\n let X = testing::array();\n\n let X_view = X.view();\n\n\n\n assert_eq!(X_view.shape(), &[100, 5]);\n\n let control = Control::default();\n\n let gain = testing::ChangeInMean::new(&X_view, &control);\n\n let optimizer = GridSearch { gain };\n\n let mut segmentation = Segmentation::new(SegmentationType::SBS, &optimizer);\n\n let mut tree = BinarySegmentationTree::new(&X_view);\n\n\n\n tree.grow(&mut segmentation);\n\n\n\n let result = BinarySegmentationResult::from_tree(tree);\n\n\n\n assert_eq!(result.split_points(), vec![25, 40, 80]);\n", "file_path": "src/binary_segmentation.rs", "rank": 96, "score": 6.030041906971518 }, { "content": " use ndarray::array;\n\n use rstest::*;\n\n\n\n #[rstest]\n\n #[case(0, 6, 0.25 * 6.)]\n\n #[case(0, 3, 0.)]\n\n #[case(3, 6, 0.)]\n\n #[case(2, 4, 0.25 * 2.)]\n\n fn test_loss(#[case] start: usize, #[case] stop: usize, #[case] expected: f64) {\n\n let X = ndarray::array![[0.], [0.], [0.], [1.], [1.], [1.]];\n\n let X_view = X.view();\n\n let control = Control::default();\n\n\n\n let change_in_mean = ChangeInMean::new(&X_view, &control);\n\n assert_eq!(change_in_mean.loss(start, stop), expected)\n\n }\n\n\n\n #[rstest]\n\n #[case(0, 6, 3, array![-1.5, -0.5, 0.5, 1.5, 0.5, -0.5])]\n\n #[case(1, 5, 3, array![-1., 0., 1., 0.])]\n", "file_path": "src/testing.rs", "rank": 97, "score": 5.973998735949424 }, { "content": "These can be interesting to investigate the output of the algorithm further.\n\n\n\nThe `changeforest` algorithm can be tuned with hyperparameters. See\n\n[here](https://github.com/mlondschien/changeforest/blob/287ac0f10728518d6a00bf698a4d5834ae98715d/src/control.rs#L3-L30)\n\nfor their descriptions and default values. In Python, the parameters can\n\nbe specified with the [`Control` class](https://github.com/mlondschien/changeforest/blob/b33533fe0ddf64c1ea60d0d2203e55b117811667/changeforest-py/changeforest/control.py#L1-L26),\n\nwhich can be passed to `changeforest`. The following will build random forests with\n\n20 trees:\n\n\n\n```python\n\nIn [6]: from changeforest import Control\n\n ...: changeforest(X, \"random_forest\", \"bs\", Control(random_forest_n_estimators=20))\n\nOut[6]: \n\n best_split max_gain p_value\n\n(0, 600] 592 -11.786 0.01\n\n ¦--(0, 592] 121 -6.26 0.015\n\n ¦ ¦--(0, 121] 13 -14.219 0.615\n\n ¦ °--(121, 592] 416 21.272 0.005\n\n ¦ ¦--(121, 416] 201 37.157 0.005\n\n ¦ ¦ ¦--(121, 201] 192 -17.54 0.65\n\n ¦ ¦ °--(201, 416] 207 -6.701 0.74\n\n ¦ °--(416, 592] 584 -44.054 0.935\n\n °--(592, 600] \n\n```\n\n\n\nThe `changeforest` algorithm still detects change points around `t=200, 400` but also\n\nreturns two false positives.\n\n\n\nDue to the nature of the change, `method=\"change_in_mean\"` is unable to detect any\n\nchange points at all:\n\n```python\n\nIn [7]: changeforest(X, \"change_in_mean\", \"bs\")\n\nOut[7]: \n\n best_split max_gain p_value\n\n(0, 600] 589 8.318 \n\n```\n\n\n", "file_path": "changeforest-py/README.md", "rank": 98, "score": 5.95432304933865 }, { "content": "use crate::binary_segmentation::BinarySegmentationResult;\n\nuse std::fmt::Display;\n\n\n\nimpl Display for BinarySegmentationResult {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let mut max_lengths = vec![0; 4];\n\n let mut rows = _format_tree(self);\n\n rows.insert(\n\n 0,\n\n vec![\n\n \"\".to_owned(),\n\n \"best_split\".to_owned(),\n\n \"max_gain\".to_owned(),\n\n \"p_value\".to_owned(),\n\n ],\n\n );\n\n for row in rows.iter() {\n\n for idx in 0..4 {\n\n if row[idx].chars().count() > max_lengths[idx] {\n\n max_lengths[idx] = row[idx].chars().count();\n", "file_path": "src/fmt.rs", "rank": 99, "score": 5.8917469637688065 } ]
Rust
src/tokenstream.rs
nikodemus/foolang
f8d824a8d7ef5ef828911a8b4d0d6419ad29bf6e
use crate::source_location::Span; use crate::unwind::Unwind; #[allow(non_camel_case_types)] #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] pub enum Token { EOF, HEX_INTEGER, BIN_INTEGER, DEC_INTEGER, SINGLE_FLOAT, DOUBLE_FLOAT, BLOCK_COMMENT, COMMENT, BLOCK_STRING, STRING, WORD, KEYWORD, SIGIL, } #[cfg(test)] impl Token { pub(crate) fn name(&self) -> String { format!("{:?}", self) } } pub struct TokenStream<'a> { source: &'a str, indices: std::cell::RefCell<std::str::CharIndices<'a>>, span: Span, current: (usize, char), offset: usize, } impl<'a> TokenStream<'a> { pub(crate) fn new(source: &'a str) -> TokenStream<'a> { let mut stream = TokenStream { source, indices: std::cell::RefCell::new(source.char_indices()), span: 0..0, current: (0, '.'), offset: 0, }; if source.len() > 0 { stream.next(); } else { } return stream; } #[cfg(test)] pub(crate) fn slice(&self) -> &str { &self.source[self.span()] } pub(crate) fn slice_at(&self, span: Span) -> &str { &self.source[span] } pub(crate) fn span(&self) -> Span { self.span.clone() } fn len(&self) -> usize { self.source.len() } fn pos(&self) -> usize { self.current.0 } fn character(&self) -> char { self.current.1 } fn result(&mut self, token: Token, span: Span) -> Result<Token, Unwind> { self.span = span; Ok(token) } pub(crate) fn scan(&mut self) -> Result<Token, Unwind> { if self.at_eof() { return self.result(Token::EOF, self.len()..self.len()); } if self.at_whitespace() { while self.at_whitespace() { self.next(); } return self.scan(); } if self.at_special() { let start = self.next(); return self.result(Token::SIGIL, start..self.pos()); } if self.at_digit(10) { return self.scan_number(); } if self.at_str("---") { let start = self.consume("---"); while !self.at_str("---") { if self.at_eof() { return self.result(Token::EOF, self.len()..self.len()); } else { self.next(); } } self.consume("---"); return self.result(Token::BLOCK_COMMENT, start..self.pos()); } if self.at_str("--") { let start = self.consume("--"); while !self.at_newline() { self.next(); } return self.result(Token::COMMENT, start..self.pos()); } if self.at_str(r#"""""#) { let start = self.consume(r#"""""#); while !self.at_str(r#"""""#) { self.next(); if self.at_str("\\") { self.next(); self.next(); } } self.consume(r#"""""#); return self.result(Token::BLOCK_STRING, start..self.pos()); } if self.at_str(r#"""#) { let start = self.consume(r#"""#); while !self.at_str(r#"""#) { if self.at_str("\\") { self.next(); self.next(); } else { self.next(); } } self.consume(r#"""#); return self.result(Token::STRING, start..self.pos()); } if self.at_word() { let start = self.next(); loop { while self.at_word() { self.next(); } if self.at_char(':') { let pos = self.next(); if self.at_char(':') { self.reset(pos); } else { return self.result(Token::KEYWORD, start..self.pos()); } } if self.at_char('.') { let pos = self.next(); if self.at_word() { continue; } else { self.reset(pos); } } return self.result(Token::WORD, start..self.pos()); } } assert!(self.at_sigil()); let start = self.next(); while self.at_sigil() { self.next(); } return self.result(Token::SIGIL, start..self.pos()); } fn scan_number(&mut self) -> Result<Token, Unwind> { let start = self.next(); if self.at_char('x') || self.at_char('X') { self.next(); while self.at_word() { self.next(); } return self.result(Token::HEX_INTEGER, start..self.pos()); } if self.at_char('b') || self.at_char('B') { self.next(); while self.at_word() { self.next(); } return self.result(Token::BIN_INTEGER, start..self.pos()); } while self.at_digit(10) || self.at_char('_') { self.next(); } let dot = self.at_char('.'); if dot { let p = self.pos(); self.next(); if self.at_whitespace() { self.reset(p); return self.result(Token::DEC_INTEGER, start..self.pos()); } while self.at_digit(10) || self.at_char('_') { self.next(); } } let single = self.at_char('f') || self.at_char('F'); let double = self.at_char('e') || self.at_char('E'); if single || double { self.next(); if self.at_char('+') || self.at_char('-') { self.next(); } while self.at_word() { self.next(); } if single { return self.result(Token::SINGLE_FLOAT, start..self.pos()); } else { return self.result(Token::DOUBLE_FLOAT, start..self.pos()); } } while self.at_word() { self.next(); } if dot { return self.result(Token::DOUBLE_FLOAT, start..self.pos()); } else { return self.result(Token::DEC_INTEGER, start..self.pos()); } } fn at_eof(&self) -> bool { self.pos() >= self.len() } fn at_whitespace(&self) -> bool { !self.at_eof() && self.character().is_whitespace() } fn at_alphanumeric(&self) -> bool { !self.at_eof() && self.character().is_alphanumeric() } fn at_digit(&self, base: u32) -> bool { !self.at_eof() && self.character().is_digit(base) } fn at_newline(&self) -> bool { self.at_char('\n') } fn at_special(&self) -> bool { if self.at_eof() { return false; } let c = self.character(); return c == '(' || c == ')' || c == '[' || c == ']' || c == '{' || c == '}' || c == ',' || c == '.' || c == ';' || c == '$' || c == '!' || c == '#'; } fn at_terminating(&self) -> bool { self.at_whitespace() || self.at_special() } fn at_word(&self) -> bool { self.at_alphanumeric() || self.at_char('_') } fn at_sigil(&self) -> bool { !(self.at_eof() || self.at_word() || self.at_terminating()) } fn at_char(&self, c: char) -> bool { !self.at_eof() && c == self.character() } fn at_str(&self, target: &str) -> bool { let start = self.pos(); let end = start + target.len(); if self.len() < end { return false; } &self.source[start..end] == target } fn next(&mut self) -> usize { let p = self.pos(); self.current = match self.indices.borrow_mut().next() { Some((p, ch)) => (p + self.offset, ch), None => (self.len(), '.'), }; return p; } fn consume(&mut self, target: &str) -> usize { assert!(self.at_str(target)); let p = self.pos(); self.reset(p + target.len()); return p; } fn reset(&mut self, position: usize) { self.offset = position; self.indices = std::cell::RefCell::new(self.source[position..].char_indices()); self.next(); } }
use crate::source_location::Span; use crate::unwind::Unwind; #[allow(non_camel_case_types)] #[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)] pub enum Token { EOF, HEX_INTEGER, BIN_INTEGER, DEC_INTEGER, SINGLE_FLOAT, DOUBLE_FLOAT, BLOCK_COMMENT, COMMENT, BLOCK_STRING, STRING, WORD, KEYWORD, SIGIL, } #[cfg(test)] impl Token { pub(crate) fn name(&self) -> String { format!("{:?}", self) } } pub struct TokenStream<'a> { source: &'a str, indices: std::cell::RefCell<std::str::CharIndices<'a>>, span: Span, current: (usize, char), offset: usize, } impl<'a> TokenStream<'a> { pub(crate) fn new(source: &'a str) -> TokenStream<'a> { let mut stream = TokenStream { source, indices: std::cell::RefCell::new(source.char_indices()), span: 0..0, current: (0, '.'), offset: 0, }; if source.len() > 0 { stream.next(); } else { } return stream; } #[cfg(test)] pub(crate) fn slice(&self) -> &str { &self.source[self.span()] } pub(crate) fn slice_at(&self, span: Span) -> &str { &self.source[span] } pub(crate) fn span(&self) -> Span { self.span.clone() } fn len(&self) -> usize { self.source.len() } fn pos(&self) -> usize { self.current.0 } fn character(&self) -> char { self.current.1 } fn result(&mut self, token: Token, span: Span) -> Result<Token, Unwind> { self.span = span; Ok(token) } pub(crate) fn scan(&mut self) -> Result<Token, Unwind> { if self.at_eof() { return self.result(Token::EOF, self.len()..self.len()); } if self.at_whitespace() { while self.at_whitespace() { self.next(); } return self.scan(); } if self.at_special() { let start = self.next(); return self.result(Token::SIGIL, start..self.pos()); } if self.at_digit(10) { return self.scan_number(); } if self.at_str("---") { let start = self.consume("---"); while !self.at_str("---") { if self.at_eof() { return self.result(Token::EOF, self.len()..self.len()); } else { self.next(); } } self.consume("---"); return self.result(Token::BLOCK_COMMENT, start..self.pos()); } if self.at_str("--") { let start = self.consume("--"); while !self.at_newline() { self.next(); } return self.result(Token::COMMENT, start..self.pos()); } if self.at_str(r#"""""#) { let start = self.consume(r#"""""#); while !self.at_str(r#"""""#) { self.next(); if self.at_str("\\") { self.next(); self.next(); } } self.consume(r#"""""#); return self.result(Token::BLOCK_STRING, start..self.pos()); } if self.at_str(r#"""#) { let start = self.consume(r#"""#); while !self.at_str(r#"""#) { if self.at_str("\\") { self.next(); self.next(); } else { self.next(); } } self.consume(r#"""#); return self.result(Token::STRING, start..self.pos()); } if self.at_word() { let start = self.next(); loop { while self.at_word() { self.next(); } if self.at_char(':') { let pos = self.next(); if self.at_char(':') { self.reset(pos); } else { return self.result(Token::KEYWORD, start..self.pos()); } } if self.at_char('.') { let pos = self.next(); if self.at_word() { continue; } else { self.reset(pos); } } return self.result(Token::WORD, start..self.pos()); } } assert!(self.at_sigil()); let start = self.next(); while self.at_sigil() { self.next(); } return self.result(Token::SIGIL, start..self.pos()); } fn scan_number(&mut self) -> Result<Token, Unwind> { let start = self.next(); if self.at_char('x') || self.at_char('X') { self.next(); while self.at_word() { self.next(); } return self.result(Token::HEX_INTEGER, start..self.pos()); } if self.at_char('b') || self.at_char('B') { self.next(); while self.at_word() { self.next(); } return self.result(Token::BIN_INTEGER, start..self.pos()); } while self.at_digit(10) || self.at_char('_') { self.next(); } let dot = self.at_char('.'); if dot { let p = self.pos(); self.next(); if self.at_whitespace() { self.reset(p); return self.result(Token::DEC_INTEGER, start..self.pos()); } while self.at_digit(10) || self.at_char('_') { self.next(); } } let single = self.at_char('f') || self.at_char('F'); let double = self.at_char('e') || self.at_char('E'); if single || double { self.next(); if self.at_char('+') || self.at_char('-') { self.next(); } while self.at_word() { self.next(); } if single { return self.result(Token::SINGLE_FLOAT, start..self.pos()); } else { return self.result(Token::DOUBLE_FLOAT, start..self.pos()); } } while self.at_word() { self.next(); } if dot { return self.result(Token::DOUBLE_FLOAT, start..self.pos()); } else { return self.result(Token::DEC_INTEGER, start..self.pos()); } } fn at_eof(&self) -> bool { self.pos() >= self.len() } fn at_whitespace(&self) -> bool { !self.at_eof() && self.character().is_whitespace() } fn at_alphanumeric(&self) -> bool { !self.at_eof() && self.character().is_alphanumeric() } fn at_digit(&self, base: u32) -> bool { !self.at_eof() && self.character().is_digit(base) } fn at_newline(&self) -> bool { self.at_char('\n') } fn at_special(&self) -> bool { if self.at_eof() { return false; } let c = self.character(); return c == '(' || c == ')' || c == '[' || c == ']' || c == '{' || c == '}' || c == ',' || c == '.' || c == ';' || c == '$' || c == '!' || c == '#'; } fn at_terminating(&self) -> bool { self.at_whitespace() || self.at_special() } fn at_word(&self) -> bool { self.at_alphanumeric() || self.at_char('_') } fn at_sigil(&self) -> bool { !(self.at_eof() || self.at_word() || self.at_terminating()) } fn at_char(&self, c: char) -> bool { !self.at_eof() && c == self.character() } fn at_str(&self, target: &str) -> bool { let start = self.pos(); let end = start + target.len(); if self.len() < end { return false; } &self.source[start..end] == target }
fn consume(&mut self, target: &str) -> usize { assert!(self.at_str(target)); let p = self.pos(); self.reset(p + target.len()); return p; } fn reset(&mut self, position: usize) { self.offset = position; self.indices = std::cell::RefCell::new(self.source[position..].char_indices()); self.next(); } }
fn next(&mut self) -> usize { let p = self.pos(); self.current = match self.indices.borrow_mut().next() { Some((p, ch)) => (p + self.offset, ch), None => (self.len(), '.'), }; return p; }
function_block-full_function
[ { "content": "fn _append_context_line(context: &mut String, lineno: usize, line: &str) {\n\n if lineno == 0 {\n\n context.push_str(format!(\" {}\\n\", line).as_str());\n\n } else {\n\n context.push_str(format!(\"{:03} {}\\n\", lineno, line).as_str());\n\n }\n\n}\n", "file_path": "src/unwind.rs", "rank": 0, "score": 316721.31480447925 }, { "content": "/// Takes care of \\n, and such. Terminates on { or end of string.\n\nfn scan_string_part(parser: &Parser, mut source_location: SourceLocation) -> Result<Expr, Unwind> {\n\n // println!(\"scan: '{}'\", parser.slice_at(source_location.get_span()));\n\n let span = source_location.get_span();\n\n let start = span.start;\n\n let mut chars = parser.slice_at(span).char_indices();\n\n let mut res = String::new();\n\n loop {\n\n match chars.next() {\n\n None => return Ok(Const::expr(source_location, Literal::String(res))),\n\n Some((pos0, '\\\\')) => match chars.next() {\n\n None => {\n\n source_location.set_span(&(start + pos0..start + pos0 + 1));\n\n return Unwind::error_at(source_location, \"Literal string ends on escape.\");\n\n }\n\n Some((_, '\"')) => res.push_str(\"\\\"\"),\n\n Some((_, '\\\\')) => res.push_str(\"\\\\\"),\n\n Some((_, 'n')) => res.push_str(\"\\n\"),\n\n Some((_, 't')) => res.push_str(\"\\t\"),\n\n Some((_, 'r')) => res.push_str(\"\\r\"),\n\n Some((_, '{')) => res.push_str(\"{\"),\n", "file_path": "src/parse.rs", "rank": 1, "score": 248671.92730151096 }, { "content": "fn operator_precedence(parser: &Parser, span: Span) -> Result<usize, Unwind> {\n\n let slice = parser.slice_at(span.clone());\n\n let syntax = parser.name_table.get(slice).unwrap_or(&UNKNOWN_OPERATOR_SYNTAX);\n\n parser.syntax_precedence(syntax, span)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 2, "score": 247024.20319249795 }, { "content": "fn identifier_precedence(parser: &Parser, span: Span) -> Result<usize, Unwind> {\n\n match parser.name_table.get(parser.slice_at(span.clone())) {\n\n Some(syntax) => parser.syntax_precedence(syntax, span),\n\n None => return Ok(1000), // unary messages\n\n }\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 3, "score": 247024.20319249795 }, { "content": "fn precedence_2(_: &Parser, _: Span) -> Result<usize, Unwind> {\n\n Ok(2)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 4, "score": 236028.9658137968 }, { "content": "fn precedence_4(_: &Parser, _: Span) -> Result<usize, Unwind> {\n\n Ok(4)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 5, "score": 236028.9658137968 }, { "content": "fn precedence_10(_: &Parser, _: Span) -> Result<usize, Unwind> {\n\n Ok(10)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 6, "score": 236028.9658137968 }, { "content": "fn precedence_1(_: &Parser, _: Span) -> Result<usize, Unwind> {\n\n Ok(1)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 7, "score": 236028.9658137968 }, { "content": "fn precedence_3(_: &Parser, _: Span) -> Result<usize, Unwind> {\n\n Ok(3)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 8, "score": 236028.9658137968 }, { "content": "fn precedence_0(_: &Parser, _: Span) -> Result<usize, Unwind> {\n\n Ok(0)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 9, "score": 236028.9658137968 }, { "content": "fn precedence_9(_: &Parser, _: Span) -> Result<usize, Unwind> {\n\n Ok(9)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 10, "score": 236028.9658137968 }, { "content": "fn precedence_1000(_: &Parser, _: Span) -> Result<usize, Unwind> {\n\n Ok(1000)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 11, "score": 236028.9658137968 }, { "content": "fn parse_def(source: &str) -> Result<Def, Unwind> {\n\n Ok(parse_str(source)?.def())\n\n}\n\n\n\nuse pretty_assertions::assert_eq;\n\n\n", "file_path": "src/tests/test_parser.rs", "rank": 12, "score": 232290.29977664948 }, { "content": "// FIXME: can I remove the span from here?\n\ntype PrecedenceFunction = fn(&Parser, Span) -> Result<usize, Unwind>;\n\n\n", "file_path": "src/parse.rs", "rank": 13, "score": 231486.075655533 }, { "content": "fn precedence_invalid(_: &Parser, _: Span) -> Result<usize, Unwind> {\n\n // To guarantee it aways gets parsed.\n\n Ok(1001)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 14, "score": 231411.43183808925 }, { "content": "pub fn as_dictionary<'a>(obj: &'a Object, ctx: &str) -> Result<&'a Dictionary, Unwind> {\n\n match &obj.datum {\n\n Datum::Dictionary(ref dict) => Ok(dict),\n\n _ => Unwind::error(&format!(\"{:?} is not a Dictionary in {}\", obj, ctx)),\n\n }\n\n}\n\n\n", "file_path": "src/classes/dictionary.rs", "rank": 15, "score": 229132.85851754167 }, { "content": "pub fn as_record<'a>(obj: &'a Object, ctx: &str) -> Result<&'a Record, Unwind> {\n\n match &obj.datum {\n\n Datum::Record(ref record) => Ok(record),\n\n _ => Unwind::error(&format!(\"{:?} is not a Record in {}\", obj, ctx)),\n\n }\n\n}\n\n\n", "file_path": "src/classes/record.rs", "rank": 16, "score": 229132.85851754167 }, { "content": "pub fn as_file<'a>(obj: &'a Object, ctx: &str) -> Result<&'a File, Unwind> {\n\n match &obj.datum {\n\n Datum::File(ref file) => Ok(file),\n\n _ => Unwind::error(&format!(\"{:?} is not a File in {}\", obj, ctx)),\n\n }\n\n}\n\n\n", "file_path": "src/classes/file.rs", "rank": 17, "score": 229132.85851754167 }, { "content": "pub fn as_random<'a>(obj: &'a Object, ctx: &str) -> Result<&'a Random, Unwind> {\n\n match &obj.datum {\n\n Datum::Random(ref random) => Ok(random),\n\n _ => Unwind::error(&format!(\"{:?} is not a Random ({})\", obj, ctx)),\n\n }\n\n}\n\n\n", "file_path": "src/classes/random.rs", "rank": 18, "score": 229132.85851754167 }, { "content": "pub fn as_array<'a>(obj: &'a Object, ctx: &str) -> Result<&'a Array, Unwind> {\n\n match &obj.datum {\n\n Datum::Array(ref array) => Ok(array),\n\n _ => Unwind::error(&format!(\"{:?} is not a Array in {}\", obj, ctx)),\n\n }\n\n}\n\n\n", "file_path": "src/classes/array.rs", "rank": 19, "score": 229132.85851754164 }, { "content": "pub fn as_filepath<'a>(obj: &'a Object, ctx: &str) -> Result<&'a FilePath, Unwind> {\n\n match &obj.datum {\n\n Datum::FilePath(ref filepath) => Ok(filepath),\n\n _ => Unwind::error(&format!(\"{:?} is not a FilePath in {}\", obj, ctx)),\n\n }\n\n}\n\n\n", "file_path": "src/classes/filepath.rs", "rank": 20, "score": 225482.09353239462 }, { "content": "pub fn as_byte_array<'a>(obj: &'a Object, ctx: &str) -> Result<&'a ByteArray, Unwind> {\n\n match &obj.datum {\n\n Datum::ByteArray(ref byte_array) => Ok(byte_array),\n\n _ => Unwind::error(&format!(\"{:?} is not a ByteArray in {}\", obj, ctx)),\n\n }\n\n}\n\n\n", "file_path": "src/classes/byte_array.rs", "rank": 21, "score": 218745.34279294632 }, { "content": "(defun foolang--syntax-propertize-extend-block-comments (start end)\n\n (save-excursion\n\n (save-restriction\n\n (widen)\n\n (narrow-to-region (point-min) start)\n\n (goto-char (point-min))\n\n ;; Count the number of --- comment fences before the start: if odd, move\n\n ;; region start to the beginning of the line with the first one.\n\n (let ((n 0))\n\n (while (looking-at \"\\\\(.\\\\|\\n\\\\)*?---\")\n\n (goto-char (match-end 0))\n\n (incf n))\n\n (if (eql 0 (% n 2))\n\n ;; Even number of fences, everything is fine.\n\n (cons start end)\n\n ;; Odd number of fences, widen\n\n (cons (match-beginning 0) end))))))\n\n\n\n(define-derived-mode foolang-mode prog-mode \"Foolang Mode\"\n\n :syntax-table foolang-syntax-table\n", "file_path": "elisp/foolang.el", "rank": 22, "score": 202554.5616342249 }, { "content": "fn parse_type_designator(parser: &Parser) -> Result<String, Unwind> {\n\n if let Token::WORD = parser.next_token()? {\n\n Ok(parser.tokenstring())\n\n } else {\n\n parser.error(\"Invalid type designator\")\n\n }\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 23, "score": 200181.4897621339 }, { "content": "fn parse_str(source: &str) -> Parse {\n\n Parser::new(source, \"test/\").parse().map_err(|unwind| unwind.with_context(source))\n\n}\n\n\n", "file_path": "src/tests/test_parser.rs", "rank": 24, "score": 190912.05628664274 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"String\");\n\n vt.add_primitive_method_or_panic(\"append:\", string_append_);\n\n vt.add_primitive_method_or_panic(\"toString\", string_to_string);\n\n vt.add_primitive_method_or_panic(\"size\", string_size);\n\n vt.add_primitive_method_or_panic(\"do:\", string_do);\n\n vt.add_primitive_method_or_panic(\"codeAt:\", string_code_at);\n\n vt.add_primitive_method_or_panic(\"from:to:\", string_from_to);\n\n vt.add_primitive_method_or_panic(\"isEquivalent:\", string_is_equivalent);\n\n vt.add_primitive_method_or_panic(\"sendTo:with:\", string_send_to_with);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/string.rs", "rank": 25, "score": 189886.61637859937 }, { "content": "pub fn class_vtable() -> Vtable {\n\n let vt = Vtable::for_class(\"String\");\n\n vt.add_primitive_method_or_panic(\"new\", class_string_new);\n\n vt.add_primitive_method_or_panic(\"concat:\", class_string_concat);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/string.rs", "rank": 26, "score": 189886.61637859937 }, { "content": "fn find_module_or_abort(spec: &str, app: &App) -> (String, PathBuf) {\n\n let path = match std::fs::canonicalize(Path::new(&spec)) {\n\n Ok(path) => path,\n\n Err(_) => oops(format!(\"cannot find module: {}\", spec), Some(app)),\n\n };\n\n let root = match path.parent() {\n\n Some(path) => path.to_path_buf(),\n\n None => oops(format!(\"cannot determine root of module: {}\", spec), Some(app)),\n\n };\n\n let name = match path.file_name() {\n\n Some(name) => match name.to_str() {\n\n Some(name) => name.to_string(),\n\n None => oops(format!(\"module has invalid filename: {}\", spec), Some(app)),\n\n },\n\n None => oops(format!(\"cannot determine name of module: {}\", spec), Some(app)),\n\n };\n\n return (name, root);\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 27, "score": 189569.5643033108 }, { "content": "fn parse_var(parser: &Parser, dynamic: bool) -> Result<Var, Unwind> {\n\n let mut loc = parser.source_location();\n\n let mut name = String::new();\n\n if dynamic {\n\n let span = parser.span();\n\n loc.set_span(&(span.start - 1..span.end));\n\n name.push_str(\"$\");\n\n };\n\n name.push_str(parser.slice());\n\n let (token, span) = parser.lookahead()?;\n\n let var = if token == Token::SIGIL && parser.slice_at(span) == \"::\" {\n\n parser.next_token()?;\n\n Var::typed(loc, name, parse_type_designator(parser)?, dynamic)\n\n } else {\n\n Var::untyped(loc, name, dynamic)\n\n };\n\n Ok(var)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 28, "score": 187948.63240078173 }, { "content": "(defun foolang--syntax-propertize (start end)\n\n (goto-char start)\n\n (funcall (syntax-propertize-rules foolang--syntax-propertize-rules)\n\n start end))\n\n\n", "file_path": "elisp/foolang.el", "rank": 29, "score": 187118.91623809998 }, { "content": "// Cannot be a method since requires access to target Rc.\n\npub fn vt_add_interface(target: &Rc<Vtable>, interface_vt: &Rc<Vtable>) {\n\n let mut interfaces = target.interfaces.borrow_mut();\n\n for inherited in interface_vt.interfaces.borrow().iter() {\n\n interfaces.insert(inherited.clone());\n\n inherited.implementations.borrow_mut().insert(target.clone());\n\n }\n\n interfaces.insert(interface_vt.clone());\n\n interface_vt.implementations.borrow_mut().insert(target.clone());\n\n}\n\n\n\nimpl Vtable {\n\n pub fn raw(name: &str) -> Vtable {\n\n Vtable {\n\n name: name.to_string(),\n\n methods: RefCell::new(HashMap::new()),\n\n slots: RefCell::new(Vec::new()),\n\n interfaces: RefCell::new(HashSet::new()),\n\n implementations: RefCell::new(HashSet::new()),\n\n class: RefCell::new(None),\n\n }\n", "file_path": "src/objects.rs", "rank": 30, "score": 182112.77225086803 }, { "content": "fn parse_expr(source: &str) -> ExprParse {\n\n Ok(parse_str(source)?.expr())\n\n}\n\n\n", "file_path": "src/tests/test_parser.rs", "rank": 31, "score": 175615.62172102107 }, { "content": "pub fn make_current_directory_filepath(env: &Env) -> Eval {\n\n match std::env::current_dir() {\n\n Ok(p) => Ok(into_filepath(p, env)),\n\n Err(e) => Unwind::error(&format!(\"Could not determine current directory: {}\", e)),\n\n }\n\n}\n\n\n", "file_path": "src/classes/filepath.rs", "rank": 32, "score": 168074.26229625073 }, { "content": "pub trait TweakSpan {\n\n fn tweak(&mut self, shift: usize, extend: isize);\n\n fn shift(&mut self, shift: usize) {\n\n self.tweak(shift, 0);\n\n }\n\n fn extend(&mut self, extend: isize) {\n\n self.tweak(0, extend);\n\n }\n\n}\n\n\n\nimpl TweakSpan for Span {\n\n fn tweak(&mut self, shift: usize, extend: isize) {\n\n self.start += shift;\n\n self.end += shift;\n\n if extend < 0 {\n\n self.start -= (-extend) as usize;\n\n } else {\n\n self.end += extend as usize;\n\n }\n\n }\n", "file_path": "src/source_location.rs", "rank": 33, "score": 164041.06239913896 }, { "content": "pub fn read_instance_variable(receiver: &Object, index: usize) -> Eval {\n\n let instance = receiver.instance()?;\n\n let value = instance.instance_variables.borrow()[index].clone();\n\n Ok(value)\n\n}\n\n\n", "file_path": "src/objects.rs", "rank": 34, "score": 161272.0175799121 }, { "content": "(defun foolang--indent-to (target col base stack ctx indent-all)\n\n (let ((now (line-number-at-pos)))\n\n ;; (foolang--note \"line %s (target %s)\" now target)\n\n (cond ((eql target now)\n\n (unless (foolang--dont-indent)\n\n (foolang--indent-line-to col)))\n\n ((or (looking-at \"^\\\\s-*$\") (foolang--dont-indent))\n\n ;; Skip over empty lines and strings and comments\n\n (next-line)\n\n (beginning-of-line)\n\n (foolang--indent-to target col base stack ctx indent-all))\n\n (t\n\n ;; Compute indentation for next line and move down.\n\n (when indent-all\n\n (foolang--indent-line-to col))\n\n (destructuring-bind (new-col new-base new-stack new-ctx)\n\n (foolang--compute-next-line-indent col base stack ctx)\n\n (next-line)\n\n (beginning-of-line)\n\n (foolang--indent-to target new-col new-base new-stack new-ctx indent-all))))))\n", "file_path": "elisp/foolang.el", "rank": 35, "score": 161090.9556909764 }, { "content": "pub fn make_method_closure(\n\n env: &Env,\n\n name: &str,\n\n params: &[Var],\n\n body: &Expr,\n\n return_type: &Option<String>,\n\n) -> Result<Closure, Unwind> {\n\n let mut args = vec![];\n\n let mut parameter_types = vec![];\n\n for param in params {\n\n args.push(Arg::new(param.source_location.clone(), param.name.clone()));\n\n match &param.typename {\n\n Some(name) => parameter_types.push(Some(env.find_type(name)?)),\n\n None => parameter_types.push(None),\n\n }\n\n }\n\n Ok(Closure {\n\n name: name.to_string(),\n\n env_ref: env.env_ref.clone(),\n\n params: args,\n", "file_path": "src/objects.rs", "rank": 36, "score": 158095.60790226667 }, { "content": "pub fn vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"System\");\n\n vt.add_primitive_method_or_panic(\"abort\", system_abort);\n\n vt.add_primitive_method_or_panic(\"clock\", system_clock);\n\n vt.add_primitive_method_or_panic(\"command:\", system_command);\n\n vt.add_primitive_method_or_panic(\"currentDirectory\", system_current_directory);\n\n vt.add_primitive_method_or_panic(\"exit\", system_exit);\n\n vt.add_primitive_method_or_panic(\"exit:\", system_exit_arg);\n\n vt.add_primitive_method_or_panic(\"files\", system_files);\n\n vt.add_primitive_method_or_panic(\"getenv:\", system_getenv);\n\n vt.add_primitive_method_or_panic(\"input\", system_input);\n\n vt.add_primitive_method_or_panic(\"isWindows\", system_is_windows);\n\n vt.add_primitive_method_or_panic(\"isUnix\", system_is_unix);\n\n vt.add_primitive_method_or_panic(\"isMacOS\", system_is_macos);\n\n vt.add_primitive_method_or_panic(\"output\", system_output);\n\n vt.add_primitive_method_or_panic(\"output:\", system_output_arg);\n\n vt.add_primitive_method_or_panic(\"random\", system_random);\n\n vt.add_primitive_method_or_panic(\"random:\", system_random_arg);\n\n vt.add_primitive_method_or_panic(\"sleep\", system_sleep);\n\n vt.add_primitive_method_or_panic(\"sleep:\", system_sleep_arg);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/system.rs", "rank": 37, "score": 152916.43659033932 }, { "content": "pub fn vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Integer\");\n\n vt.add_primitive_method_or_panic(\"asFloat\", integer_as_float);\n\n vt.add_primitive_method_or_panic(\"integerAdd:\", integer_integer_add);\n\n vt.add_primitive_method_or_panic(\"integerDiv:\", integer_integer_div);\n\n vt.add_primitive_method_or_panic(\"integerEq:\", integer_integer_eq);\n\n vt.add_primitive_method_or_panic(\"integerGt:\", integer_integer_gt);\n\n vt.add_primitive_method_or_panic(\"integerGte:\", integer_integer_gte);\n\n vt.add_primitive_method_or_panic(\"integerLt:\", integer_integer_lt);\n\n vt.add_primitive_method_or_panic(\"integerLte:\", integer_integer_lte);\n\n vt.add_primitive_method_or_panic(\"integerMul:\", integer_integer_mul);\n\n vt.add_primitive_method_or_panic(\"integerSub:\", integer_integer_sub);\n\n vt.add_primitive_method_or_panic(\"toString\", integer_to_string);\n\n vt.add_primitive_method_or_panic(\"prefix-\", integer_neg);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/integer.rs", "rank": 38, "score": 152916.43659033932 }, { "content": "pub fn vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Input\");\n\n vt.add_primitive_method_or_panic(\"readline\", input_readline);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/input.rs", "rank": 39, "score": 152916.43659033932 }, { "content": "pub fn vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Closure\");\n\n // FUNDAMENTAL\n\n vt.add_primitive_method_or_panic(\"apply:\", closure_apply_array);\n\n vt.add_primitive_method_or_panic(\"signature\", closure_signature);\n\n vt.add_primitive_method_or_panic(\"finally:\", closure_finally);\n\n vt.add_primitive_method_or_panic(\"arity\", closure_arity);\n\n vt.add_primitive_method_or_panic(\"onPanic:\", closure_on_panic);\n\n vt.add_primitive_method_or_panic(\"loop\", closure_loop);\n\n vt\n\n}\n\n\n\n// FUNDAMENTAL METHODS\n\n\n", "file_path": "src/classes/closure.rs", "rank": 40, "score": 152916.43659033932 }, { "content": "fn eof_suffix(parser: &Parser, _: Expr, _: PrecedenceFunction) -> Result<Expr, Unwind> {\n\n parser.eof_error(\"Unexpected EOF in suffix position\")\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 41, "score": 150768.50486232055 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Compiler\");\n\n vt.add_primitive_method_or_panic(\"define:as:\", compiler_define_as);\n\n vt.add_primitive_method_or_panic(\"evaluate\", compiler_evaluate);\n\n vt.add_primitive_method_or_panic(\"parse:\", compiler_parse);\n\n vt.add_primitive_method_or_panic(\"parse:onEof:\", compiler_parse_on_eof);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/compiler.rs", "rank": 42, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n Vtable::for_class(\"FileStream\")\n\n}\n\n\n", "file_path": "src/classes/filestream.rs", "rank": 43, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"FileStream\");\n\n vt.add_primitive_method_or_panic(\"pathname\", filestream_pathname);\n\n vt.add_primitive_method_or_panic(\"close\", filestream_close);\n\n vt.add_primitive_method_or_panic(\"flush\", filestream_flush);\n\n vt.add_primitive_method_or_panic(\"isClosed\", filestream_is_closed);\n\n vt.add_primitive_method_or_panic(\"offset\", filestream_offset);\n\n vt.add_primitive_method_or_panic(\"offset:\", filestream_offset_arg);\n\n vt.add_primitive_method_or_panic(\"offsetFromEnd:\", filestream_offset_from_end);\n\n vt.add_primitive_method_or_panic(\"offsetFromHere:\", filestream_offset_from_here);\n\n vt.add_primitive_method_or_panic(\"readString\", filestream_read_string);\n\n vt.add_primitive_method_or_panic(\"resize:\", filestream_resize);\n\n vt.add_primitive_method_or_panic(\n\n \"tryReadOnce:bytesInto:at:\",\n\n filestream_try_read_once_bytes_into_at,\n\n );\n\n vt.add_primitive_method_or_panic(\n\n \"tryWriteOnce:bytesFrom:at:\",\n\n filestream_try_write_once_bytes_from_at,\n\n );\n\n vt.add_primitive_method_or_panic(\"writeString:\", filestream_write_string);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/filestream.rs", "rank": 44, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Random\");\n\n vt.add_primitive_method_or_panic(\"integer\", random_integer);\n\n vt.add_primitive_method_or_panic(\"float\", random_float);\n\n vt.add_primitive_method_or_panic(\"boolean\", random_boolean);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/random.rs", "rank": 45, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Clock\");\n\n vt.add_primitive_method_or_panic(\"time\", clock_time);\n\n vt.add_primitive_method_or_panic(\"toString\", clock_to_string);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/clock.rs", "rank": 46, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Float\");\n\n vt.add_primitive_method_or_panic(\"floatAdd:\", float_float_add);\n\n vt.add_primitive_method_or_panic(\"floatDiv:\", float_float_div);\n\n vt.add_primitive_method_or_panic(\"floatEq:\", float_float_eq);\n\n vt.add_primitive_method_or_panic(\"floatGt:\", float_float_gt);\n\n vt.add_primitive_method_or_panic(\"floatGte:\", float_float_gte);\n\n vt.add_primitive_method_or_panic(\"floatLt:\", float_float_lt);\n\n vt.add_primitive_method_or_panic(\"floatLte:\", float_float_lte);\n\n vt.add_primitive_method_or_panic(\"floatMul:\", float_float_mul);\n\n vt.add_primitive_method_or_panic(\"floatSub:\", float_float_sub);\n\n vt.add_primitive_method_or_panic(\"isFinite\", float_is_finite);\n\n vt.add_primitive_method_or_panic(\"isInfinite\", float_is_infinite);\n\n vt.add_primitive_method_or_panic(\"prefix-\", float_neg);\n\n vt.add_primitive_method_or_panic(\"sqrt\", float_sqrt);\n\n vt.add_primitive_method_or_panic(\"truncate\", float_truncate);\n\n vt.add_primitive_method_or_panic(\"round\", float_round);\n\n vt.add_primitive_method_or_panic(\"toString\", float_to_string);\n\n vt\n\n}\n\n\n\n// FUNDAMENTAL METHODS\n\n\n", "file_path": "src/classes/float.rs", "rank": 47, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"File\");\n\n vt.add_primitive_method_or_panic(\"pathname\", file_pathname);\n\n vt.add_primitive_method_or_panic(\"create\", file_create);\n\n vt.add_primitive_method_or_panic(\"createOrOpen\", file_create_or_open);\n\n vt.add_primitive_method_or_panic(\"forAppend\", file_for_append);\n\n vt.add_primitive_method_or_panic(\"forRead\", file_for_read);\n\n vt.add_primitive_method_or_panic(\"forWrite\", file_for_write);\n\n vt.add_primitive_method_or_panic(\"isAppend\", file_is_append);\n\n vt.add_primitive_method_or_panic(\"isRead\", file_is_read);\n\n vt.add_primitive_method_or_panic(\"isTruncate\", file_is_truncate);\n\n vt.add_primitive_method_or_panic(\"isWrite\", file_is_write);\n\n vt.add_primitive_method_or_panic(\"open\", file_open);\n\n vt.add_primitive_method_or_panic(\"truncateExisting\", file_truncate_existing);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/file.rs", "rank": 48, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n let vt = Vtable::for_class(\"Time\");\n\n vt.add_primitive_method_or_panic(\"user:system:real:\", class_time_user_system_real_);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/time.rs", "rank": 49, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n Vtable::for_class(\"File\")\n\n}\n\n\n", "file_path": "src/classes/file.rs", "rank": 50, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Time\");\n\n vt.add_primitive_method_or_panic(\"real\", time_real);\n\n vt.add_primitive_method_or_panic(\"system\", time_system);\n\n vt.add_primitive_method_or_panic(\"user\", time_user);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/time.rs", "rank": 51, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Dictionary\");\n\n vt.add_primitive_method_or_panic(\"at:ifNone:\", dictionary_at_if_none);\n\n vt.add_primitive_method_or_panic(\"doKeys:\", dictionary_do_keys);\n\n vt.add_primitive_method_or_panic(\"remove:\", dictionary_remove);\n\n vt.add_primitive_method_or_panic(\"put:at:\", dictionary_put_at);\n\n vt.add_primitive_method_or_panic(\"size\", dictionary_size);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/dictionary.rs", "rank": 52, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n let vt = Vtable::for_class(\"Record\");\n\n vt.add_primitive_method_or_panic(\"perform:with:\", class_record_perform_with);\n\n vt.add_primitive_method_or_panic(\"keysIn:\", class_record_keys_in);\n\n vt.add_primitive_method_or_panic(\"at:in:\", class_record_at_in);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/record.rs", "rank": 53, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n let vt = Vtable::for_class(\"Random\");\n\n vt.add_primitive_method_or_panic(\"new\", class_random_new);\n\n vt.add_primitive_method_or_panic(\"new:\", class_random_new_arg);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/random.rs", "rank": 54, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n Vtable::for_class(\"Boolean\")\n\n}\n\n\n", "file_path": "src/classes/boolean.rs", "rank": 55, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n let vt = Vtable::for_class(\"Float class\");\n\n vt.add_primitive_method_or_panic(\"parse:\", float_class_parse_);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/float.rs", "rank": 56, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n let vt = Vtable::for_class(\"Dictionary\");\n\n vt.add_primitive_method_or_panic(\"new\", class_dictionary_new);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/dictionary.rs", "rank": 57, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n let vt = Vtable::for_class(\"Array\");\n\n vt.add_primitive_method_or_panic(\"of:new:value:\", class_array_of_new_value);\n\n vt.add_primitive_method_or_panic(\"toString\", class_array_to_string);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/array.rs", "rank": 58, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n let vt = Vtable::for_class(\"Output\");\n\n vt.add_primitive_method_or_panic(\"debug\", class_output_debug);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/output.rs", "rank": 59, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Boolean\");\n\n vt.add_primitive_method_or_panic(\"ifTrue:ifFalse:\", boolean_if_true_if_false);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/boolean.rs", "rank": 60, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Array\");\n\n vt.add_primitive_method_or_panic(\"of:\", array_of);\n\n vt.add_primitive_method_or_panic(\"at:\", array_at);\n\n vt.add_primitive_method_or_panic(\"put:at:\", array_put_at);\n\n vt.add_primitive_method_or_panic(\"size\", array_size);\n\n vt.add_primitive_method_or_panic(\"toString\", array_to_string);\n\n vt.add_primitive_method_or_panic(\"arrayElementType\", array_element_type);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/array.rs", "rank": 61, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Output\");\n\n vt.add_primitive_method_or_panic(\"flush\", output_flush);\n\n vt.add_primitive_method_or_panic(\"writeString:\", output_write_string);\n\n vt.add_primitive_method_or_panic(\"toString\", output_to_string);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/output.rs", "rank": 62, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"FilePath\");\n\n vt.add_primitive_method_or_panic(\"deleteFile\", filepath_delete_file);\n\n vt.add_primitive_method_or_panic(\"exists\", filepath_exists);\n\n vt.add_primitive_method_or_panic(\"file\", filepath_file);\n\n vt.add_primitive_method_or_panic(\"isDirectory\", filepath_is_directory);\n\n vt.add_primitive_method_or_panic(\"isFile\", filepath_is_file);\n\n vt.add_primitive_method_or_panic(\"/\", filepath_slash);\n\n vt.add_primitive_method_or_panic(\"pathname\", filepath_to_pathname);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/filepath.rs", "rank": 63, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n let vt = Vtable::for_class(\"Compiler\");\n\n vt.add_primitive_method_or_panic(\"new\", class_compiler_new);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/compiler.rs", "rank": 64, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n let vt = Vtable::raw(\"Class\");\n\n vt.add_primitive_method_or_panic(\n\n \"new:slots:interfaces:directMethods:instanceMethods:\",\n\n class_new_,\n\n );\n\n vt.add_primitive_method_or_panic(\"classOf\", generic_class_class);\n\n vt.add_primitive_method_or_panic(\"includes:\", class_includes_);\n\n vt.add_primitive_method_or_panic(\"typecheck:\", class_typecheck_);\n\n vt.add_primitive_method_or_panic(\"name\", generic_class_name);\n\n vt.add_primitive_method_or_panic(\"interfaces\", generic_class_interfaces);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/class.rs", "rank": 65, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n Vtable::for_class(\"FilePath\")\n\n}\n\n\n", "file_path": "src/classes/filepath.rs", "rank": 66, "score": 150334.0397873136 }, { "content": "pub fn interface_vtable() -> Vtable {\n\n let vt = Vtable::raw(\"Interface\");\n\n vt.add_primitive_method_or_panic(\n\n \"new:interfaces:directMethods:instanceMethods:\",\n\n interface_new_,\n\n );\n\n vt.add_primitive_method_or_panic(\"classOf\", generic_class_class);\n\n vt.add_primitive_method_or_panic(\"includes:\", interface_includes_);\n\n vt.add_primitive_method_or_panic(\"typecheck:\", interface_typecheck_);\n\n vt.add_primitive_method_or_panic(\"name\", generic_class_name);\n\n vt.add_primitive_method_or_panic(\"interfaces\", generic_class_interfaces);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/class.rs", "rank": 67, "score": 150334.0397873136 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"Record\");\n\n vt.add_primitive_method_or_panic(\"perform:with:\", record_perform_with);\n\n vt.add_primitive_method_or_panic(\"displayOn:\", record_display_on);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/record.rs", "rank": 68, "score": 150334.0397873136 }, { "content": "pub fn class_vtable() -> Vtable {\n\n Vtable::for_class(\"Clock\")\n\n}\n\n\n", "file_path": "src/classes/clock.rs", "rank": 69, "score": 150334.0397873136 }, { "content": "pub fn into_dictionary(foolang: &Foolang, data: HashMap<Object, Object>) -> Object {\n\n Object {\n\n vtable: Rc::clone(&foolang.dictionary_vtable),\n\n datum: Datum::Dictionary(Rc::new(Dictionary {\n\n data: RefCell::new(data),\n\n })),\n\n }\n\n}\n\n\n", "file_path": "src/classes/dictionary.rs", "rank": 70, "score": 150212.55840171667 }, { "content": "pub fn instance_vtable() -> Vtable {\n\n let vt = Vtable::for_instance(\"ByteArray\");\n\n vt.add_primitive_method_or_panic(\"at:\", byte_array_at);\n\n vt.add_primitive_method_or_panic(\"put:at:\", byte_array_put_at);\n\n vt.add_primitive_method_or_panic(\"size\", byte_array_size);\n\n vt.add_primitive_method_or_panic(\"toString\", byte_array_to_string);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/byte_array.rs", "rank": 71, "score": 147914.13789685874 }, { "content": "pub fn class_vtable() -> Vtable {\n\n let vt = Vtable::for_class(\"ByteArray\");\n\n vt.add_primitive_method_or_panic(\"new:\", class_byte_array_new);\n\n vt.add_primitive_method_or_panic(\"toString\", class_byte_array_to_string);\n\n vt\n\n}\n\n\n", "file_path": "src/classes/byte_array.rs", "rank": 72, "score": 147914.13789685874 }, { "content": "pub fn unsafe_write_instance_variable(receiver: &Object, value: &Object, index: usize) -> Eval {\n\n let instance = receiver.instance()?;\n\n instance.instance_variables.borrow_mut()[index] = value.clone();\n\n Ok(value.clone())\n\n}\n\n\n", "file_path": "src/objects.rs", "rank": 73, "score": 147592.91822158208 }, { "content": "// Is the argument an interface?\n\nfn is_interface(argument: &Object) -> bool {\n\n match &argument.datum {\n\n Datum::Class(class) => class.interface,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/classes/class.rs", "rank": 74, "score": 146411.89179402567 }, { "content": "// Is the argument a class?\n\nfn is_class(argument: &Object) -> bool {\n\n match &argument.datum {\n\n Datum::Class(class) => !class.interface,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/classes/class.rs", "rank": 75, "score": 146411.89179402567 }, { "content": "(defun foolang--run-face-test (name source target)\n\n (with-current-buffer (get-buffer-create \"*foolang-notes*\")\n\n (end-of-buffer)\n\n (let ((start (point)))\n\n (insert \"\\n\" name)\n\n (lexical-let ((result (with-temp-buffer\n\n (foolang-mode)\n\n (insert source)\n\n (font-lock-fontify-buffer)\n\n (buffer-substring (point-min) (point-max))\n\n ;; (thing-at-point 'buffer)\n\n )))\n\n (end-of-buffer)\n\n (multiple-value-bind (ok issue) (foolang--face-specs-match target result)\n\n (if ok\n\n (progn (insert \" ok!\")\n\n (message \"test %s ok\" name))\n\n (push name foolang--test-failures)\n\n (let ((oops (format \" FAILED! wanted %s at %s, got %s\"\n\n (second issue) (first issue) (third issue))))\n", "file_path": "elisp/foolang.el", "rank": 76, "score": 141852.26384337657 }, { "content": "(defun foolang--run-indentation-test (name source target)\n\n (with-current-buffer (get-buffer-create \"*foolang-notes*\")\n\n (end-of-buffer)\n\n (let ((start (point)))\n\n (insert \"\\n--- \" name \" ---\\n\")\n\n (lexical-let ((result (with-temp-buffer\n\n (foolang-mode)\n\n (setq indent-tabs-mode nil)\n\n (insert source)\n\n (end-of-buffer)\n\n (foolang-indent-all)\n\n (foolang-indent-line)\n\n (buffer-substring-no-properties (point-min) (point-max)))))\n\n (end-of-buffer)\n\n (if (string= target result)\n\n (progn (insert \"ok!\")\n\n (message \"test %s ok\" name))\n\n (push name foolang--test-failures)\n\n (insert \"FAILED!\\n\")\n\n (insert \"WANTED:\\n\")\n", "file_path": "elisp/foolang.el", "rank": 77, "score": 141852.26384337657 }, { "content": "pub fn make_compiler(foo: &Foolang) -> Object {\n\n Object {\n\n vtable: Rc::clone(&foo.compiler_vtable),\n\n datum: Datum::Compiler(Rc::new(Compiler {\n\n // This makes the objects resulting from Compiler eval share same\n\n // vtable instances as the parent, which seems like the right thing\n\n // -- but it would be nice to be able to specify a different\n\n // prelude. Meh.\n\n env: foo.toplevel_env(),\n\n source: RefCell::new(String::new()),\n\n parsed: RefCell::new(Vec::new()),\n\n })),\n\n }\n\n}\n\n\n", "file_path": "src/classes/compiler.rs", "rank": 78, "score": 137092.7488629242 }, { "content": "pub fn make_root_filepath(env: &Env) -> Eval {\n\n // NOTE: A bit of strangeness. We use relative path as the root\n\n // to get around Windows vs Unix differences.\n\n Ok(into_filepath(PathBuf::from(\"\"), env))\n\n}\n\n\n", "file_path": "src/classes/filepath.rs", "rank": 79, "score": 134954.98899417467 }, { "content": "fn parse_record(parser: &Parser) -> Result<Expr, Unwind> {\n\n let mut source_location = parser.source_location();\n\n let mut selector = String::new();\n\n let mut args = Vec::new();\n\n loop {\n\n match parser.next_token()? {\n\n Token::KEYWORD => {\n\n selector.push_str(parser.slice());\n\n args.push(parser.parse_expr(1)?);\n\n match parser.next_token()? {\n\n Token::SIGIL if \",\" == parser.slice() => continue,\n\n Token::SIGIL if \"}\" == parser.slice() => break,\n\n _ => return parser.error(\"Malformed record\"),\n\n }\n\n }\n\n Token::SIGIL if \"}\" == parser.slice() => break,\n\n _ => return parser.error(\"Malformed record\"),\n\n }\n\n }\n\n source_location.extend_span_to(parser.span().end);\n", "file_path": "src/parse.rs", "rank": 80, "score": 133766.5126632102 }, { "content": "fn parse_method(parser: &Parser) -> Result<MethodDefinition, Unwind> {\n\n let mut method = parse_method_signature(parser)?;\n\n // println!(\"- method: {}\", method.selector);\n\n // NOTE: This is the place where I could inform parser about instance\n\n // variables.\n\n // FIXME: Would be nice to add \"while parsing method Bar#foo\"\n\n // type info to the error.\n\n method.body = Some(Box::new(parser.parse_seq()?));\n\n if parser.at_eof() {\n\n return parser.eof_error(\"End of input inside method\");\n\n }\n\n parser.next_token()?;\n\n if parser.slice() != \"!\" {\n\n return Unwind::error_at(\n\n method.body.unwrap().source_location(),\n\n &format!(\"Expected '!' after method {}, got: '{}'\", method.selector, parser.slice()),\n\n );\n\n }\n\n Ok(method)\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 81, "score": 131627.04060361686 }, { "content": "fn parse_block_or_dictionary(parser: &Parser) -> Result<Expr, Unwind> {\n\n let mut source_location = parser.source_location();\n\n assert_eq!(\"{\", parser.slice());\n\n //\n\n // Blocks only (if any of this happens, we're in a block)\n\n //\n\n let mut params = vec![];\n\n let mut rtype = None;\n\n let (token, span) = parser.lookahead()?;\n\n if token == Token::SIGIL && parser.slice_at(span) == \"|\" {\n\n parser.next_token()?;\n\n loop {\n\n let token = parser.next_token()?;\n\n if token == Token::WORD {\n\n params.push(parse_var(parser, false)?);\n\n continue;\n\n }\n\n if token == Token::SIGIL && parser.slice() == \"|\" {\n\n break;\n\n }\n", "file_path": "src/parse.rs", "rank": 82, "score": 131627.04060361686 }, { "content": "fn parse_method_signature(parser: &Parser) -> Result<MethodDefinition, Unwind> {\n\n assert_eq!(parser.slice(), \"method\");\n\n let source_location = parser.source_location();\n\n let mut selector = String::new();\n\n let mut parameters = Vec::new();\n\n let mut prefix = false;\n\n loop {\n\n let token = parser.next_token()?;\n\n selector.push_str(parser.slice());\n\n match token {\n\n Token::WORD => {\n\n assert!(parameters.is_empty());\n\n if \"prefix\" == &selector {\n\n prefix = true;\n\n continue;\n\n }\n\n break;\n\n }\n\n Token::SIGIL => {\n\n assert!(parameters.is_empty());\n", "file_path": "src/parse.rs", "rank": 83, "score": 129610.58206321968 }, { "content": "#[test]\n\nfn test_return_returns() {\n\n let (obj, env) = eval_obj(\n\n \"class Foo {}\n\n method foo\n\n return 1.\n\n 2!\n\n end\n\n Foo new foo\",\n\n );\n\n assert_eq!(obj, env.foo.make_integer(1));\n\n}\n\n\n", "file_path": "src/tests/test_eval.rs", "rank": 84, "score": 129009.94450004454 }, { "content": "pub trait Source {\n\n fn source_expr(self, expr: &Expr) -> Self;\n\n fn source(self, source_location: &SourceLocation) -> Self;\n\n fn context(self, source: &str) -> Self;\n\n}\n\n\n\nimpl Source for Eval {\n\n fn source_expr(mut self, expr: &Expr) -> Self {\n\n if let Err(unwind) = &mut self {\n\n unwind.add_source_location(&expr.source_location());\n\n }\n\n self\n\n }\n\n\n\n fn source(mut self, source_location: &SourceLocation) -> Self {\n\n if let Err(unwind) = &mut self {\n\n unwind.add_source_location(source_location);\n\n }\n\n self\n\n }\n", "file_path": "src/objects.rs", "rank": 85, "score": 128929.81524309012 }, { "content": "#[test]\n\nfn test_string_append() {\n\n assert_eq!(\n\n eval_ok(\n\n r#\"\n\n \"foo\" append: \"bar\"\n\n \"#\n\n )\n\n .string_as_str(),\n\n \"foobar\"\n\n );\n\n}\n\n\n", "file_path": "src/tests/test_string.rs", "rank": 86, "score": 128839.37356826337 }, { "content": "#[test]\n\nfn test_empty_string() {\n\n assert_eq!(eval_ok(r#\" \"\" \"#).string_as_str(), \"\");\n\n}\n\n\n", "file_path": "src/tests/test_string.rs", "rank": 87, "score": 128839.37356826337 }, { "content": "#[test]\n\nfn test_string_interpolation1() {\n\n let (object, env) = eval_obj(\n\n r#\"let a = 1.\n\n let b = 3.\n\n \"{a}.{a+1}.{b}.{b+1}\"\n\n \"#,\n\n );\n\n assert_eq!(object, env.foo.make_string(\"1.2.3.4\"));\n\n}\n\n\n", "file_path": "src/tests/test_string.rs", "rank": 88, "score": 128839.37356826337 }, { "content": "#[test]\n\nfn test_newline_escape_string() {\n\n assert_eq!(eval_ok(r#\" \"\\n\" \"#).string_as_str(), \"\\n\");\n\n}\n\n\n", "file_path": "src/tests/test_string.rs", "rank": 89, "score": 126605.62492828895 }, { "content": "fn make_token_table() -> TokenTable {\n\n let mut table: TokenTable = HashMap::new();\n\n let t = &mut table;\n\n use Token::*;\n\n\n\n // Literals should appear in prefix-positions only, hence precedence_invald\n\n ParserSyntax::def(t, HEX_INTEGER, number_prefix, invalid_suffix, precedence_invalid);\n\n ParserSyntax::def(t, BIN_INTEGER, number_prefix, invalid_suffix, precedence_invalid);\n\n ParserSyntax::def(t, DEC_INTEGER, number_prefix, invalid_suffix, precedence_invalid);\n\n ParserSyntax::def(t, SINGLE_FLOAT, number_prefix, invalid_suffix, precedence_invalid);\n\n ParserSyntax::def(t, DOUBLE_FLOAT, number_prefix, invalid_suffix, precedence_invalid);\n\n ParserSyntax::def(t, STRING, string_prefix, invalid_suffix, precedence_invalid);\n\n // Comments\n\n ParserSyntax::def(t, COMMENT, ignore_prefix, ignore_suffix, precedence_1000);\n\n ParserSyntax::def(t, BLOCK_COMMENT, ignore_prefix, ignore_suffix, precedence_1000);\n\n // Others\n\n ParserSyntax::def(t, WORD, identifier_prefix, identifier_suffix, identifier_precedence);\n\n ParserSyntax::def(t, SIGIL, operator_prefix, operator_suffix, operator_precedence);\n\n ParserSyntax::def(t, KEYWORD, invalid_prefix, keyword_suffix, precedence_9);\n\n ParserSyntax::def(t, EOF, eof_prefix, eof_suffix, precedence_0);\n\n\n\n table\n\n}\n\n\n\n// KLUDGE: couple of places which don't have convenient access to the table\n\n// need this.\n\nconst PREFIX_PRECEDENCE: usize = 1000;\n\n\n\nconst UNKNOWN_OPERATOR_SYNTAX: ParserSyntax = ParserSyntax::Operator(true, true, 10);\n\n\n", "file_path": "src/parse.rs", "rank": 90, "score": 126196.52317144 }, { "content": "pub fn make_file(path: &Path, env: &Env) -> Object {\n\n File {\n\n path: PathBuf::from(path),\n\n read: false,\n\n truncate: false,\n\n write_mode: WriteMode::None,\n\n }\n\n .object(env)\n\n}\n\n\n", "file_path": "src/classes/file.rs", "rank": 91, "score": 126174.34159339718 }, { "content": "fn keyword_suffix(\n\n parser: &Parser,\n\n left: Expr,\n\n precedence: PrecedenceFunction,\n\n) -> Result<Expr, Unwind> {\n\n let precedence = precedence(parser, parser.span())?;\n\n let mut selector = parser.tokenstring();\n\n let mut args = vec![];\n\n let mut source_location = parser.source_location();\n\n loop {\n\n args.push(parser.parse_expr(precedence)?);\n\n // Two-element lookahead.\n\n let (token, _) = parser.lookahead()?;\n\n if token == Token::KEYWORD {\n\n parser.next_token()?;\n\n selector.push_str(parser.slice());\n\n } else {\n\n break;\n\n }\n\n }\n", "file_path": "src/parse.rs", "rank": 92, "score": 125642.97099355902 }, { "content": "fn filestream_offset_from_end(receiver: &Object, args: &[Object], env: &Env) -> Eval {\n\n let mut fileref =\n\n receiver.as_filestream(\"FileStream#offsetFromEnd:\")?.borrow_open(\"#offsetFromEnd:\")?;\n\n let pos = match fileref.seek(SeekFrom::End(args[0].integer())) {\n\n Ok(pos) => pos,\n\n Err(e) => {\n\n return Unwind::error(&format!(\n\n \"Could not set offset for {:?} ({:?})\",\n\n receiver,\n\n e.kind()\n\n ))\n\n }\n\n };\n\n Ok(env.foo.make_integer(pos as i64))\n\n}\n\n\n", "file_path": "src/classes/filestream.rs", "rank": 93, "score": 124021.57229832746 }, { "content": "type TokenTable = HashMap<Token, ParserSyntax>;\n", "file_path": "src/parse.rs", "rank": 94, "score": 123169.97599558553 }, { "content": "pub trait AsFileStream {\n\n fn as_filestream<'a>(&'a self, ctx: &str) -> Result<&'a FileStream, Unwind>;\n\n}\n\n\n\nimpl AsFileStream for Object {\n\n fn as_filestream<'a>(&'a self, ctx: &str) -> Result<&'a FileStream, Unwind> {\n\n match &self.datum {\n\n Datum::FileStream(ref filestream) => Ok(filestream),\n\n _ => Unwind::error(&format!(\"{:?} is not a FileStream in {}\", self, ctx)),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Debug for FileStream {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"#<FileStream {:?}>\", self.path.to_string_lossy())\n\n }\n\n}\n\n\n", "file_path": "src/classes/filestream.rs", "rank": 95, "score": 123086.25287687415 }, { "content": "(defun foolang--current-line ()\n\n (substring-no-properties (thing-at-point 'line)))\n\n\n", "file_path": "elisp/foolang.el", "rank": 96, "score": 123003.6111523466 }, { "content": "#[test]\n\nfn test_parse_keyword_literal_as_argument_to_keyword_message() {\n\n assert_eq!(\n\n parse_expr(\"List collectUsing: #doSelectors: from: self\"),\n\n Ok(keyword(\n\n 5..43,\n\n \"collectUsing:from:\",\n\n var(0..4, \"List\"),\n\n vec![selector(19..32, \"doSelectors:\"), var(39..43, \"self\")]\n\n ))\n\n );\n\n}\n\n\n", "file_path": "src/tests/test_parser.rs", "rank": 97, "score": 120727.39585333643 }, { "content": "#[test]\n\nfn test_array_eq() {\n\n assert_eq!(eval_ok(\"[1,2,3] is [1,2,3]\").boolean(), false);\n\n assert_eq!(eval_ok(\"{ |arr| arr is arr } value: [1,2,3]\").boolean(), true);\n\n}\n\n\n", "file_path": "src/tests/test_array.rs", "rank": 98, "score": 117316.09724876621 }, { "content": "#[test]\n\nfn test_parse_on_eof() {\n\n assert_eq!(\n\n eval_ok(\n\n r#\"\n\n let compiler = Compiler new.\n\n compiler parse: \"41 +\" onEof: { |err| err }\n\n \"#\n\n )\n\n .string_as_str(),\n\n \"Unexpected EOF in value position\"\n\n );\n\n}\n", "file_path": "src/tests/test_compiler.rs", "rank": 99, "score": 117316.09724876621 } ]
Rust
crates/lib/kajiya-backend/src/shader_compiler.rs
MrBenj4min/kajiya
fdf3a4e11ef0d4bfd5d177396fc9d674b5b8317a
use crate::file::LoadFile; use anyhow::{anyhow, bail, Context, Result}; use bytes::Bytes; use relative_path::RelativePathBuf; use std::{path::PathBuf, sync::Arc}; use turbosloth::*; pub struct CompiledShader { pub name: String, pub spirv: Bytes, } #[derive(Clone, Hash)] pub struct CompileShader { pub path: PathBuf, pub profile: String, } #[async_trait] impl LazyWorker for CompileShader { type Output = Result<CompiledShader>; async fn run(self, ctx: RunContext) -> Self::Output { let ext = self .path .extension() .map(|s| s.to_string_lossy().to_string()) .unwrap_or_else(|| "".to_string()); let name = self .path .file_stem() .map(|s| s.to_string_lossy().to_string()) .unwrap_or_else(|| "unknown".to_string()); match ext.as_str() { "glsl" => unimplemented!(), "spv" => { let spirv = LoadFile::new(self.path.clone())?.run(ctx).await?; Ok(CompiledShader { name, spirv }) } "hlsl" => { let file_path = self.path.to_str().unwrap().to_owned(); let source = shader_prepper::process_file( &file_path, &mut ShaderIncludeProvider { ctx }, String::new(), ); let source = source.map_err(|err| anyhow!("{}", err))?; let target_profile = format!("{}_6_4", self.profile); let spirv = compile_generic_shader_hlsl_impl(&name, &source, &target_profile)?; Ok(CompiledShader { name, spirv }) } _ => anyhow::bail!("Unrecognized shader file extension: {}", ext), } } } pub struct RayTracingShader { pub name: String, pub spirv: Bytes, } #[derive(Clone, Hash)] pub struct CompileRayTracingShader { pub path: PathBuf, } #[async_trait] impl LazyWorker for CompileRayTracingShader { type Output = Result<RayTracingShader>; async fn run(self, ctx: RunContext) -> Self::Output { let file_path = self.path.to_str().unwrap().to_owned(); let source = shader_prepper::process_file( &file_path, &mut ShaderIncludeProvider { ctx }, String::new(), ); let source = source.map_err(|err| anyhow!("{}", err))?; let ext = self .path .extension() .map(|s| s.to_string_lossy().to_string()) .unwrap_or_else(|| "".to_string()); let name = self .path .file_stem() .map(|s| s.to_string_lossy().to_string()) .unwrap_or_else(|| "unknown".to_string()); match ext.as_str() { "glsl" => unimplemented!(), "hlsl" => { let target_profile = "lib_6_4"; let spirv = compile_generic_shader_hlsl_impl(&name, &source, target_profile)?; Ok(RayTracingShader { name, spirv }) } _ => anyhow::bail!("Unrecognized shader file extension: {}", ext), } } } struct ShaderIncludeProvider { ctx: RunContext, } impl<'a> shader_prepper::IncludeProvider for ShaderIncludeProvider { type IncludeContext = String; fn get_include( &mut self, path: &str, parent_file: &Self::IncludeContext, ) -> std::result::Result< (String, Self::IncludeContext), shader_prepper::BoxedIncludeProviderError, > { let resolved_path = if let Some('/') = path.chars().next() { path.to_owned() } else { let mut folder: RelativePathBuf = parent_file.into(); folder.pop(); folder.join(path).as_str().to_string() }; let blob: Arc<Bytes> = smol::block_on( crate::file::LoadFile::new(&resolved_path) .with_context(|| format!("Failed loading shader include {}", path))? .into_lazy() .eval(&self.ctx), )?; Ok((String::from_utf8(blob.to_vec())?, resolved_path)) } } pub fn get_cs_local_size_from_spirv(spirv: &[u32]) -> Result<[u32; 3]> { let mut loader = rspirv::dr::Loader::new(); rspirv::binary::parse_words(spirv, &mut loader).unwrap(); let module = loader.module(); for inst in module.global_inst_iter() { if inst.class.opcode as u32 == 16 { let local_size = &inst.operands[2..5]; use rspirv::dr::Operand::LiteralInt32; if let [LiteralInt32(x), LiteralInt32(y), LiteralInt32(z)] = *local_size { return Ok([x, y, z]); } else { bail!("Could not parse the ExecutionMode SPIR-V op"); } } } Err(anyhow!("Could not find a ExecutionMode SPIR-V op")) } fn compile_generic_shader_hlsl_impl( name: &str, source: &[shader_prepper::SourceChunk], target_profile: &str, ) -> Result<Bytes> { let mut source_text = String::new(); for s in source { source_text += &s.source; } let t0 = std::time::Instant::now(); let spirv = hassle_rs::compile_hlsl( name, &source_text, "main", target_profile, &[ "-spirv", "-enable-templates", "-fspv-target-env=vulkan1.2", "-WX", "-Ges", ], &[], ) .map_err(|err| anyhow!("{}", err))?; log::trace!("dxc took {:?} for {}", t0.elapsed(), name,); Ok(spirv.into()) }
use crate::file::LoadFile; use anyhow::{anyhow, bail, Context, Result}; use bytes::Bytes; use relative_path::RelativePathBuf; use std::{path::PathBuf, sync::Arc}; use turbosloth::*; pub struct CompiledShader { pub name: String, pub spirv: Bytes, } #[derive(Clone, Hash)] pub struct CompileShader { pub path: PathBuf, pub profile: String, } #[async_trait] impl LazyWorker for CompileShader { type Output = Result<CompiledShader>;
} pub struct RayTracingShader { pub name: String, pub spirv: Bytes, } #[derive(Clone, Hash)] pub struct CompileRayTracingShader { pub path: PathBuf, } #[async_trait] impl LazyWorker for CompileRayTracingShader { type Output = Result<RayTracingShader>; async fn run(self, ctx: RunContext) -> Self::Output { let file_path = self.path.to_str().unwrap().to_owned(); let source = shader_prepper::process_file( &file_path, &mut ShaderIncludeProvider { ctx }, String::new(), ); let source = source.map_err(|err| anyhow!("{}", err))?; let ext = self .path .extension() .map(|s| s.to_string_lossy().to_string()) .unwrap_or_else(|| "".to_string()); let name = self .path .file_stem() .map(|s| s.to_string_lossy().to_string()) .unwrap_or_else(|| "unknown".to_string()); match ext.as_str() { "glsl" => unimplemented!(), "hlsl" => { let target_profile = "lib_6_4"; let spirv = compile_generic_shader_hlsl_impl(&name, &source, target_profile)?; Ok(RayTracingShader { name, spirv }) } _ => anyhow::bail!("Unrecognized shader file extension: {}", ext), } } } struct ShaderIncludeProvider { ctx: RunContext, } impl<'a> shader_prepper::IncludeProvider for ShaderIncludeProvider { type IncludeContext = String; fn get_include( &mut self, path: &str, parent_file: &Self::IncludeContext, ) -> std::result::Result< (String, Self::IncludeContext), shader_prepper::BoxedIncludeProviderError, > { let resolved_path = if let Some('/') = path.chars().next() { path.to_owned() } else { let mut folder: RelativePathBuf = parent_file.into(); folder.pop(); folder.join(path).as_str().to_string() }; let blob: Arc<Bytes> = smol::block_on( crate::file::LoadFile::new(&resolved_path) .with_context(|| format!("Failed loading shader include {}", path))? .into_lazy() .eval(&self.ctx), )?; Ok((String::from_utf8(blob.to_vec())?, resolved_path)) } } pub fn get_cs_local_size_from_spirv(spirv: &[u32]) -> Result<[u32; 3]> { let mut loader = rspirv::dr::Loader::new(); rspirv::binary::parse_words(spirv, &mut loader).unwrap(); let module = loader.module(); for inst in module.global_inst_iter() { if inst.class.opcode as u32 == 16 { let local_size = &inst.operands[2..5]; use rspirv::dr::Operand::LiteralInt32; if let [LiteralInt32(x), LiteralInt32(y), LiteralInt32(z)] = *local_size { return Ok([x, y, z]); } else { bail!("Could not parse the ExecutionMode SPIR-V op"); } } } Err(anyhow!("Could not find a ExecutionMode SPIR-V op")) } fn compile_generic_shader_hlsl_impl( name: &str, source: &[shader_prepper::SourceChunk], target_profile: &str, ) -> Result<Bytes> { let mut source_text = String::new(); for s in source { source_text += &s.source; } let t0 = std::time::Instant::now(); let spirv = hassle_rs::compile_hlsl( name, &source_text, "main", target_profile, &[ "-spirv", "-enable-templates", "-fspv-target-env=vulkan1.2", "-WX", "-Ges", ], &[], ) .map_err(|err| anyhow!("{}", err))?; log::trace!("dxc took {:?} for {}", t0.elapsed(), name,); Ok(spirv.into()) }
async fn run(self, ctx: RunContext) -> Self::Output { let ext = self .path .extension() .map(|s| s.to_string_lossy().to_string()) .unwrap_or_else(|| "".to_string()); let name = self .path .file_stem() .map(|s| s.to_string_lossy().to_string()) .unwrap_or_else(|| "unknown".to_string()); match ext.as_str() { "glsl" => unimplemented!(), "spv" => { let spirv = LoadFile::new(self.path.clone())?.run(ctx).await?; Ok(CompiledShader { name, spirv }) } "hlsl" => { let file_path = self.path.to_str().unwrap().to_owned(); let source = shader_prepper::process_file( &file_path, &mut ShaderIncludeProvider { ctx }, String::new(), ); let source = source.map_err(|err| anyhow!("{}", err))?; let target_profile = format!("{}_6_4", self.profile); let spirv = compile_generic_shader_hlsl_impl(&name, &source, &target_profile)?; Ok(CompiledShader { name, spirv }) } _ => anyhow::bail!("Unrecognized shader file extension: {}", ext), } }
function_block-full_function
[ { "content": "pub fn normalized_path_from_vfs(path: impl Into<PathBuf>) -> anyhow::Result<PathBuf> {\n\n let path = path.into();\n\n\n\n for (mount_point, mounted_path) in VFS_MOUNT_POINTS.lock().iter() {\n\n if let Ok(rel_path) = path.strip_prefix(mount_point) {\n\n return Ok(mounted_path\n\n .join(rel_path)\n\n .normalize()\n\n .with_context(|| {\n\n format!(\n\n \"Mounted parent folder: {:?}. Relative path: {:?}\",\n\n mounted_path, rel_path\n\n )\n\n })?\n\n .as_path()\n\n .to_owned());\n\n }\n\n }\n\n\n\n if path.strip_prefix(\"/\").is_ok() {\n", "file_path": "crates/lib/kajiya-backend/src/file.rs", "rank": 0, "score": 278249.7067215905 }, { "content": "pub fn canonical_path_from_vfs(path: impl Into<PathBuf>) -> anyhow::Result<PathBuf> {\n\n let path = path.into();\n\n\n\n for (mount_point, mounted_path) in VFS_MOUNT_POINTS.lock().iter() {\n\n if let Ok(rel_path) = path.strip_prefix(mount_point) {\n\n return mounted_path\n\n .join(rel_path)\n\n .canonicalize()\n\n .with_context(|| {\n\n format!(\n\n \"Mounted parent folder: {:?}. Relative path: {:?}\",\n\n mounted_path, rel_path\n\n )\n\n })\n\n .with_context(|| format!(\"canonicalize {:?}\", rel_path));\n\n }\n\n }\n\n\n\n if path.strip_prefix(\"/\").is_ok() {\n\n anyhow::bail!(\n\n \"No vfs mount point for {:?}. Current mount points: {:#?}\",\n\n path,\n\n VFS_MOUNT_POINTS.lock()\n\n );\n\n }\n\n\n\n Ok(path)\n\n}\n\n\n", "file_path": "crates/lib/kajiya-backend/src/file.rs", "rank": 1, "score": 278249.7067215905 }, { "content": "pub fn set_vfs_mount_point(mount_point: impl Into<String>, path: impl Into<PathBuf>) {\n\n VFS_MOUNT_POINTS\n\n .lock()\n\n .insert(mount_point.into(), path.into());\n\n}\n\n\n", "file_path": "crates/lib/kajiya-backend/src/file.rs", "rank": 2, "score": 273346.3847559596 }, { "content": "pub fn mmapped_asset<T, P: Into<std::path::PathBuf>>(path: P) -> anyhow::Result<&'static T> {\n\n let path = path.into();\n\n let path = kajiya_backend::canonical_path_from_vfs(&path)\n\n .with_context(|| format!(\"Can't mmap asset: file doesn't exist: {:?}\", path))?;\n\n\n\n let mut mmaps = ASSET_MMAPS.lock();\n\n let data: &[u8] = mmaps.entry(path.clone()).or_insert_with(|| {\n\n let file =\n\n File::open(&path).unwrap_or_else(|e| panic!(\"Could not mmap {:?}: {:?}\", path, e));\n\n unsafe { memmap2::MmapOptions::new().map(&file).unwrap() }\n\n });\n\n let asset: &T = unsafe { (data.as_ptr() as *const T).as_ref() }.unwrap();\n\n Ok(asset)\n\n}\n", "file_path": "crates/lib/kajiya/src/mmap.rs", "rank": 3, "score": 196863.80323165015 }, { "content": "pub fn set_standard_vfs_mount_points(kajiya_path: impl Into<PathBuf>) {\n\n let kajiya_path = kajiya_path.into();\n\n set_vfs_mount_point(\"/kajiya\", &kajiya_path);\n\n set_vfs_mount_point(\"/shaders\", kajiya_path.join(\"assets/shaders\"));\n\n set_vfs_mount_point(\n\n \"/rust-shaders-compiled\",\n\n kajiya_path.join(\"assets/rust-shaders-compiled\"),\n\n );\n\n set_vfs_mount_point(\"/images\", kajiya_path.join(\"assets/images\"));\n\n}\n\n\n", "file_path": "crates/lib/kajiya-backend/src/file.rs", "rank": 4, "score": 193231.126365063 }, { "content": "/// Import some glTF 2.0 from the file system.\n\npub fn import<P>(path: P) -> Result<Import>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n import_path(path.as_ref())\n\n}\n", "file_path": "crates/lib/kajiya-asset/src/import_gltf.rs", "rank": 6, "score": 183308.87557255174 }, { "content": "pub fn forget_queries(queries: impl Iterator<Item = GpuProfilerQueryId>) {\n\n let mut prof = GPU_PROFILER.lock();\n\n prof.forget_queries(queries);\n\n}\n\n\n", "file_path": "crates/lib/kajiya-backend/src/gpu_profiler.rs", "rank": 7, "score": 163142.7005435258 }, { "content": "fn import_path(path: &Path) -> Result<Import> {\n\n let base = path.parent().unwrap_or_else(|| Path::new(\"./\"));\n\n let file = fs::File::open(path).map_err(Error::Io)?;\n\n let reader = io::BufReader::new(file);\n\n import_impl(Gltf::from_reader(reader)?, Some(base))\n\n}\n\n\n", "file_path": "crates/lib/kajiya-asset/src/import_gltf.rs", "rank": 8, "score": 158295.71930415026 }, { "content": "fn import_impl(Gltf { document, blob }: Gltf, base: Option<&Path>) -> Result<Import> {\n\n let buffer_data = import_buffer_data(&document, base, blob)?;\n\n let image_data = import_image_data(&document, base, &buffer_data)?;\n\n let import = (document, buffer_data, image_data);\n\n Ok(import)\n\n}\n\n\n", "file_path": "crates/lib/kajiya-asset/src/import_gltf.rs", "rank": 9, "score": 147663.78349068196 }, { "content": "pub fn flatten_bytes(writer: &mut impl std::io::Write, data: &[u8]) {\n\n writer.write_all(data).unwrap();\n\n}\n\n\n\npub struct DeferredBlob {\n\n pub fixup_addr: usize, // offset within parent\n\n pub nested: FlattenCtx,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct FlattenCtx {\n\n pub section_idx: Option<usize>,\n\n pub bytes: Vec<u8>,\n\n pub deferred: Vec<DeferredBlob>,\n\n}\n\n\n\nimpl FlattenCtx {\n\n fn allocate_section_indices(&mut self) {\n\n let mut counter = 0;\n\n self.allocate_section_indices_impl(&mut counter);\n", "file_path": "crates/lib/kajiya-asset/src/mesh.rs", "rank": 10, "score": 146704.29215451202 }, { "content": "struct GpuProfiler {\n\n active_queries: HashMap<GpuProfilerQueryId, ActiveQuery>,\n\n frame_query_ids: Vec<GpuProfilerQueryId>,\n\n next_query_id: u64,\n\n stats: GpuProfilerStats,\n\n}\n\n\n\nimpl GpuProfiler {\n\n pub fn new() -> Self {\n\n Self {\n\n active_queries: Default::default(),\n\n frame_query_ids: Default::default(),\n\n next_query_id: 0,\n\n stats: Default::default(),\n\n }\n\n }\n\n\n\n fn report_durations_ticks(\n\n &mut self,\n\n ns_per_tick: f32,\n", "file_path": "crates/lib/kajiya-backend/src/gpu_profiler.rs", "rank": 11, "score": 130816.12948751046 }, { "content": "type BufferBytes = Bytes;\n\n\n", "file_path": "crates/lib/kajiya-asset/src/import_gltf.rs", "rank": 12, "score": 126933.70137088172 }, { "content": "fn read_to_end<P>(path: P) -> Result<Vec<u8>>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n use io::Read;\n\n let file = fs::File::open(path.as_ref()).map_err(Error::Io)?;\n\n // Allocate one extra byte so the buffer doesn't need to grow before the\n\n // final `read` call at the end of the file. Don't worry about `usize`\n\n // overflow because reading will fail regardless in that case.\n\n let length = file.metadata().map(|x| x.len() + 1).unwrap_or(0);\n\n let mut reader = io::BufReader::new(file);\n\n let mut data = Vec::with_capacity(length as usize);\n\n reader.read_to_end(&mut data).map_err(Error::Io)?;\n\n Ok(data)\n\n}\n\n\n", "file_path": "crates/lib/kajiya-asset/src/import_gltf.rs", "rank": 13, "score": 122998.89754491804 }, { "content": "struct ActiveQuery {\n\n id: GpuProfilerQueryId,\n\n scope: RenderScopeDesc,\n\n user_id: usize,\n\n}\n\n\n\nimpl GpuProfilerStats {\n\n fn report_duration_nanos(\n\n &mut self,\n\n query_id: GpuProfilerQueryId,\n\n duration: u64,\n\n active_query: ActiveQuery,\n\n ) {\n\n let scope_id = GpuProfilerScopeId::new(active_query.scope.clone(), active_query.user_id);\n\n self.order.push(scope_id.clone());\n\n\n\n let mut entry = self\n\n .scopes\n\n .entry(scope_id)\n\n .or_insert_with(|| GpuProfilerScope::new(active_query.scope));\n", "file_path": "crates/lib/kajiya-backend/src/gpu_profiler.rs", "rank": 14, "score": 120029.85139684423 }, { "content": "pub fn set_up_logging(default_log_level: log::LevelFilter) -> anyhow::Result<()> {\n\n use fern::colors::{Color, ColoredLevelConfig};\n\n\n\n // configure colors for the whole line\n\n let colors_line = ColoredLevelConfig::new()\n\n .error(Color::Red)\n\n .warn(Color::Yellow)\n\n // we actually don't need to specify the color for debug and info, they are white by default\n\n .info(Color::White)\n\n .debug(Color::White)\n\n // depending on the terminals color scheme, this is the same as the background color\n\n .trace(Color::BrightBlack);\n\n\n\n // configure colors for the name of the level.\n\n // since almost all of them are the some as the color for the whole line, we\n\n // just clone `colors_line` and overwrite our changes\n\n let colors_level = colors_line.info(Color::Green);\n\n // here we set up our fern Dispatch\n\n\n\n let console_out = fern::Dispatch::new()\n", "file_path": "crates/lib/kajiya/src/logging.rs", "rank": 15, "score": 118299.31702755674 }, { "content": "struct TangentCalcContext<'a> {\n\n indices: &'a [u32],\n\n positions: &'a [[f32; 3]],\n\n normals: &'a [[f32; 3]],\n\n uvs: &'a [[f32; 2]],\n\n tangents: &'a mut [[f32; 4]],\n\n}\n\n\n\nimpl<'a> mikktspace::Geometry for TangentCalcContext<'a> {\n\n fn num_faces(&self) -> usize {\n\n self.indices.len() / 3\n\n }\n\n\n\n fn num_vertices_of_face(&self, _face: usize) -> usize {\n\n 3\n\n }\n\n\n\n fn position(&self, face: usize, vert: usize) -> [f32; 3] {\n\n self.positions[self.indices[face * 3 + vert] as usize]\n\n }\n", "file_path": "crates/lib/kajiya-asset/src/mesh.rs", "rank": 16, "score": 117281.11305617107 }, { "content": "pub fn get_stats() -> GpuProfilerStats {\n\n GPU_PROFILER.lock().stats.clone()\n\n}\n\n\n\n#[derive(Clone, PartialEq, Eq, Hash)]\n\npub struct GpuProfilerScopeId(RenderScopeDesc, usize);\n\n\n\nimpl GpuProfilerScopeId {\n\n pub fn new(s: RenderScopeDesc, user_id: usize) -> Self {\n\n Self(s, user_id)\n\n }\n\n}\n\n\n\n// TODO: currently merges multiple invocations in a frame into a single bucket, and averages it\n\n// should instead report the count per frame along with correct per-hit timing\n\n#[derive(Clone)]\n\npub struct GpuProfilerScope {\n\n pub scope: RenderScopeDesc,\n\n pub hits: Vec<u64>, // nanoseconds\n\n pub write_head: u32,\n", "file_path": "crates/lib/kajiya-backend/src/gpu_profiler.rs", "rank": 17, "score": 115997.97878843575 }, { "content": "#[derive(DeJson)]\n\nstruct RustShaderCompileResult {\n\n // entry name -> shader path\n\n entry_to_shader_module: Vec<(String, String)>,\n\n}\n\n\n\n#[derive(Clone, Hash)]\n\npub struct CompileRustShaderCrate;\n\n\n\n#[async_trait]\n\nimpl LazyWorker for CompileRustShaderCrate {\n\n type Output = Result<()>;\n\n\n\n async fn run(self, ctx: RunContext) -> Self::Output {\n\n // Unlike regular shader building, this one runs in a separate thread in the background.\n\n //\n\n // The built shaders are cached and checked-in, meaning that\n\n // 1. Devs/users don't need to have Rust-GPU\n\n // 2. The previously built shaders can be used at startup without stalling the app\n\n //\n\n // To accomplish such behavior, this function lies to `turbosloth`, immediately claiming success.\n", "file_path": "crates/lib/kajiya-backend/src/rust_shader_compiler.rs", "rank": 18, "score": 115093.45720635187 }, { "content": "pub fn convert_image_type_to_view_type(image_type: ImageType) -> vk::ImageViewType {\n\n match image_type {\n\n ImageType::Tex1d => vk::ImageViewType::TYPE_1D,\n\n ImageType::Tex1dArray => vk::ImageViewType::TYPE_1D_ARRAY,\n\n ImageType::Tex2d => vk::ImageViewType::TYPE_2D,\n\n ImageType::Tex2dArray => vk::ImageViewType::TYPE_2D_ARRAY,\n\n ImageType::Tex3d => vk::ImageViewType::TYPE_3D,\n\n ImageType::Cube => vk::ImageViewType::CUBE,\n\n ImageType::CubeArray => vk::ImageViewType::CUBE_ARRAY,\n\n }\n\n}\n\n\n", "file_path": "crates/lib/kajiya-backend/src/vulkan/image.rs", "rank": 19, "score": 113981.09325723651 }, { "content": "pub trait TypeEquals {\n\n type Other;\n\n fn same(value: Self) -> Self::Other;\n\n}\n\n\n\nimpl<T: Sized> TypeEquals for T {\n\n type Other = Self;\n\n fn same(value: Self) -> Self::Other {\n\n value\n\n }\n\n}\n\n\n\nimpl Default for RenderGraph {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl RenderGraph {\n\n pub fn new() -> Self {\n", "file_path": "crates/lib/kajiya-rg/src/graph.rs", "rank": 20, "score": 113207.29825385148 }, { "content": "#[allow(dead_code)]\n\npub fn as_byte_slice<T>(t: &T) -> &[u8]\n\nwhere\n\n T: Copy,\n\n{\n\n unsafe { std::slice::from_raw_parts(t as *const T as *mut u8, std::mem::size_of::<T>()) }\n\n}\n", "file_path": "crates/lib/kajiya-backend/src/bytes.rs", "rank": 21, "score": 112575.07678264187 }, { "content": "pub fn enumerate_physical_devices(instance: &Arc<Instance>) -> Result<Vec<PhysicalDevice>> {\n\n unsafe {\n\n let pdevices = instance.raw.enumerate_physical_devices()?;\n\n\n\n Ok(pdevices\n\n .into_iter()\n\n .map(|pdevice| {\n\n let properties = instance.raw.get_physical_device_properties(pdevice);\n\n /*let properties = PhysicalDeviceProperties {\n\n api_version: properties.api_version,\n\n driver_version: properties.driver_version,\n\n vendor_id: properties.vendor_id,\n\n device_id: properties.device_id,\n\n device_type: properties.device_type,\n\n device_name: CStr::from_ptr(&properties.device_name[0])\n\n .to_str()\n\n .unwrap()\n\n .to_string(),\n\n pipeline_cache_uuid: properties.pipeline_cache_uuid,\n\n limits: properties.limits,\n", "file_path": "crates/lib/kajiya-backend/src/vulkan/physical_device.rs", "rank": 22, "score": 112121.6043982381 }, { "content": "pub fn reference_path_trace(\n\n rg: &mut RenderGraph,\n\n output_img: &mut rg::Handle<Image>,\n\n bindless_descriptor_set: vk::DescriptorSet,\n\n tlas: &rg::Handle<RayTracingAcceleration>,\n\n) {\n\n SimpleRenderPass::new_rt(\n\n rg.add_pass(\"reference pt\"),\n\n ShaderSource::hlsl(\"/shaders/rt/reference_path_trace.rgen.hlsl\"),\n\n [\n\n ShaderSource::hlsl(\"/shaders/rt/gbuffer.rmiss.hlsl\"),\n\n ShaderSource::hlsl(\"/shaders/rt/shadow.rmiss.hlsl\"),\n\n ],\n\n [ShaderSource::hlsl(\"/shaders/rt/gbuffer.rchit.hlsl\")],\n\n )\n\n .write(output_img)\n\n .raw_descriptor_set(1, bindless_descriptor_set)\n\n .trace_rays(tlas, output_img.desc().extent);\n\n}\n", "file_path": "crates/lib/kajiya/src/renderers/reference.rs", "rank": 23, "score": 111512.38396382672 }, { "content": "pub trait GpuViewType {\n\n const IS_WRITABLE: bool;\n\n}\n\nimpl GpuViewType for GpuSrv {\n\n const IS_WRITABLE: bool = false;\n\n}\n\nimpl GpuViewType for GpuUav {\n\n const IS_WRITABLE: bool = true;\n\n}\n\nimpl GpuViewType for GpuRt {\n\n const IS_WRITABLE: bool = true;\n\n}\n", "file_path": "crates/lib/kajiya-rg/src/resource.rs", "rank": 24, "score": 111493.54729237853 }, { "content": "pub fn report_durations_ticks(\n\n ns_per_tick: f32,\n\n durations: impl Iterator<Item = (GpuProfilerQueryId, u64)>,\n\n) {\n\n let mut prof = GPU_PROFILER.lock();\n\n prof.report_durations_ticks(ns_per_tick, durations);\n\n}\n\n\n", "file_path": "crates/lib/kajiya-backend/src/gpu_profiler.rs", "rank": 25, "score": 109816.19625062263 }, { "content": "pub fn get_access_info(access_type: AccessType) -> AccessInfo {\n\n match access_type {\n\n AccessType::Nothing => AccessInfo {\n\n stage_mask: vk::PipelineStageFlags::empty(),\n\n access_mask: vk::AccessFlags::empty(),\n\n image_layout: vk::ImageLayout::UNDEFINED,\n\n },\n\n AccessType::CommandBufferReadNVX => AccessInfo {\n\n stage_mask: vk::PipelineStageFlags::COMMAND_PREPROCESS_NV,\n\n access_mask: vk::AccessFlags::COMMAND_PREPROCESS_READ_NV,\n\n image_layout: vk::ImageLayout::UNDEFINED,\n\n },\n\n AccessType::IndirectBuffer => AccessInfo {\n\n stage_mask: vk::PipelineStageFlags::DRAW_INDIRECT,\n\n access_mask: vk::AccessFlags::INDIRECT_COMMAND_READ,\n\n image_layout: vk::ImageLayout::UNDEFINED,\n\n },\n\n AccessType::IndexBuffer => AccessInfo {\n\n stage_mask: vk::PipelineStageFlags::VERTEX_INPUT,\n\n access_mask: vk::AccessFlags::INDEX_READ,\n", "file_path": "crates/lib/kajiya-backend/src/vulkan/barrier.rs", "rank": 26, "score": 107463.94972132955 }, { "content": "pub fn with_stats<F: FnOnce(&GpuProfilerStats)>(f: F) {\n\n f(&GPU_PROFILER.lock().stats);\n\n}\n\n\n", "file_path": "crates/lib/kajiya-backend/src/gpu_profiler.rs", "rank": 27, "score": 107240.96237697247 }, { "content": "// TODO: is access type relevant here at all?\n\npub fn image_aspect_mask_from_access_type_and_format(\n\n access_type: AccessType,\n\n format: vk::Format,\n\n) -> Option<vk::ImageAspectFlags> {\n\n let image_layout = get_access_info(access_type).image_layout;\n\n\n\n match image_layout {\n\n vk::ImageLayout::GENERAL\n\n | vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL\n\n | vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL\n\n | vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL\n\n | vk::ImageLayout::DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL\n\n | vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL\n\n | vk::ImageLayout::TRANSFER_SRC_OPTIMAL\n\n | vk::ImageLayout::TRANSFER_DST_OPTIMAL => Some(image_aspect_mask_from_format(format)),\n\n _ => {\n\n //println!(\"{:?}\", image_layout);\n\n None\n\n }\n\n }\n", "file_path": "crates/lib/kajiya-backend/src/vulkan/barrier.rs", "rank": 28, "score": 105292.37056374385 }, { "content": "#[allow(dead_code)]\n\npub fn into_byte_vec<T>(mut v: Vec<T>) -> Vec<u8>\n\nwhere\n\n T: Copy,\n\n{\n\n unsafe {\n\n let p = v.as_mut_ptr();\n\n let item_sizeof = std::mem::size_of::<T>();\n\n let len = v.len() * item_sizeof;\n\n let cap = v.capacity() * item_sizeof;\n\n std::mem::forget(v);\n\n Vec::from_raw_parts(p as *mut u8, len, cap)\n\n }\n\n}\n\n\n", "file_path": "crates/lib/kajiya-backend/src/bytes.rs", "rank": 29, "score": 105144.11601590761 }, { "content": "type StageDescriptorSetLayouts = HashMap<u32, DescriptorSetLayout>;\n\n\n\npub struct ShaderPipelineCommon {\n\n pub pipeline_layout: vk::PipelineLayout,\n\n pub pipeline: vk::Pipeline,\n\n pub set_layout_info: Vec<HashMap<u32, vk::DescriptorType>>,\n\n pub descriptor_pool_sizes: Vec<vk::DescriptorPoolSize>,\n\n pub descriptor_set_layouts: Vec<vk::DescriptorSetLayout>,\n\n pub pipeline_bind_point: vk::PipelineBindPoint,\n\n}\n\npub struct ComputePipeline {\n\n pub common: ShaderPipelineCommon,\n\n pub group_size: [u32; 3],\n\n}\n\n\n\nimpl std::ops::Deref for ComputePipeline {\n\n type Target = ShaderPipelineCommon;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.common\n", "file_path": "crates/lib/kajiya-backend/src/vulkan/shader.rs", "rank": 30, "score": 101186.67677996015 }, { "content": "pub fn flatten_plain_field<T: Copy + Sized>(writer: &mut impl std::io::Write, data: &T) {\n\n writer\n\n .write_all(unsafe {\n\n std::slice::from_raw_parts(data as *const T as *const u8, std::mem::size_of::<T>())\n\n })\n\n .unwrap();\n\n}\n\n\n", "file_path": "crates/lib/kajiya-asset/src/mesh.rs", "rank": 31, "score": 100921.43893144121 }, { "content": "/// Return type of `import`.\n\ntype Import = (Document, Vec<BufferBytes>, Vec<ImageSource>);\n\n\n\n/// Represents the set of URI schemes the importer supports.\n", "file_path": "crates/lib/kajiya-asset/src/import_gltf.rs", "rank": 32, "score": 100420.04179795095 }, { "content": "type DescriptorSetLayout = HashMap<u32, rspirv_reflect::DescriptorInfo>;\n", "file_path": "crates/lib/kajiya-backend/src/vulkan/shader.rs", "rank": 33, "score": 99374.58795057566 }, { "content": "pub fn create_gpu_query(scope: RenderScopeDesc, user_id: usize) -> GpuProfilerQueryId {\n\n GPU_PROFILER.lock().create_gpu_query(scope, user_id)\n\n}\n\n\n", "file_path": "crates/lib/kajiya-backend/src/gpu_profiler.rs", "rank": 34, "score": 99144.86954453913 }, { "content": "pub fn hash_combine2(x: u32, y: u32) -> u32 {\n\n const M: u32 = 1664525;\n\n const C: u32 = 1013904223;\n\n let mut seed = (x * M + y + C) * M;\n\n // Tempering (from Matsumoto)\n\n seed ^= seed >> 11;\n\n seed ^= (seed << 7) & 0x9d2c5680;\n\n seed ^= (seed << 15) & 0xefc60000;\n\n seed ^= seed >> 18;\n\n seed\n\n}\n\n\n", "file_path": "crates/lib/rust-shaders-shared/src/util.rs", "rank": 35, "score": 98571.34325231006 }, { "content": "pub fn load2f(data: &[u32], byte_offset: u32) -> Vec2 {\n\n let offset = (byte_offset >> 2) as usize;\n\n let a = f32::from_bits(data[offset]);\n\n let b = f32::from_bits(data[offset + 1]);\n\n Vec2::new(a, b)\n\n}\n\n\n", "file_path": "crates/lib/rust-shaders-shared/src/util.rs", "rank": 36, "score": 95653.23816548922 }, { "content": "pub fn load4f(data: &[u32], byte_offset: u32) -> Vec4 {\n\n let offset = (byte_offset >> 2) as usize;\n\n let a = f32::from_bits(data[offset]);\n\n let b = f32::from_bits(data[offset + 1]);\n\n let c = f32::from_bits(data[offset + 2]);\n\n let d = f32::from_bits(data[offset + 3]);\n\n Vec4::new(a, b, c, d)\n\n}\n\n\n", "file_path": "crates/lib/rust-shaders-shared/src/util.rs", "rank": 37, "score": 95653.23816548922 }, { "content": "pub fn load3f(data: &[u32], byte_offset: u32) -> Vec3 {\n\n let offset = (byte_offset >> 2) as usize;\n\n let a = f32::from_bits(data[offset]);\n\n let b = f32::from_bits(data[offset + 1]);\n\n let c = f32::from_bits(data[offset + 2]);\n\n Vec3::new(a, b, c)\n\n}\n\n\n", "file_path": "crates/lib/rust-shaders-shared/src/util.rs", "rank": 38, "score": 95653.23816548922 }, { "content": "/// Decode mesh vertex from Kajiya (\"core\", position + normal packed together)\n\n/// The returned normal is not normalized (but close).\n\npub fn load_vertex(data: &[u32], byte_offset: u32) -> (Vec3, Vec3) {\n\n let core_offset = (byte_offset >> 2) as usize;\n\n let in_pos = Vec3::new(\n\n f32::from_bits(data[core_offset]),\n\n f32::from_bits(data[core_offset + 1]),\n\n f32::from_bits(data[core_offset + 2]),\n\n );\n\n let in_normal = unpack_unit_direction_11_10_11(data[core_offset + 3]);\n\n (in_pos, in_normal)\n\n}\n\n\n", "file_path": "crates/lib/rust-shaders-shared/src/util.rs", "rank": 39, "score": 91865.27131795073 }, { "content": "pub fn store_vertex(data: &mut [u32], byte_offset: u32, position: Vec3, normal: Vec3) {\n\n let offset = (byte_offset >> 2) as usize;\n\n let packed_normal = pack_unit_direction_11_10_11(normal.x, normal.y, normal.z);\n\n data[offset] = position.x.to_bits();\n\n data[offset + 1] = position.y.to_bits();\n\n data[offset + 2] = position.z.to_bits();\n\n data[offset + 3] = packed_normal;\n\n}\n\n\n", "file_path": "crates/lib/rust-shaders-shared/src/util.rs", "rank": 40, "score": 85450.9685416412 }, { "content": "#[derive(Clone, Copy, Debug)]\n\nstruct Brdf {\n\n spec_albedo: f64,\n\n diffuse_albedo: f64,\n\n}\n\n\n\nimpl Brdf {\n\n fn calculate(&self) -> f64 {\n\n self.spec_albedo + (1.0 - self.spec_albedo) * self.diffuse_albedo\n\n }\n\n\n\n fn with_scaled_albedo(mut self, k: f64) -> Self {\n\n self.spec_albedo *= k;\n\n self.diffuse_albedo *= k;\n\n self\n\n }\n\n}\n\n\n", "file_path": "misc/metalness_norm/src/main.rs", "rank": 41, "score": 80722.39203476343 }, { "content": "#[derive(Default, Clone, Copy)]\n\nstruct CascadeScroll {\n\n scroll: [i32; 4],\n\n}\n\n\n\nimpl CascadeScroll {\n\n fn volume_scroll_offset_from(&self, other: &Self) -> [i32; 4] {\n\n array_init::array_init(|i| {\n\n (self.scroll[i] - other.scroll[i]).clamp(-(VOLUME_DIMS as i32), VOLUME_DIMS as i32)\n\n })\n\n }\n\n}\n\n\n\nimpl CsgiRenderer {\n\n pub fn volume_size(gi_scale: f32) -> f32 {\n\n 12.0 * VOLUME_WORLD_SCALE_MULT * gi_scale\n\n }\n\n\n\n pub fn voxel_size(gi_scale: f32) -> f32 {\n\n Self::volume_size(gi_scale) / VOLUME_DIMS as f32\n\n }\n", "file_path": "crates/lib/kajiya/src/renderers/csgi.rs", "rank": 42, "score": 78803.03689640737 }, { "content": "struct ResourceInfo {\n\n _lifetimes: Vec<ResourceLifetime>,\n\n image_usage_flags: Vec<vk::ImageUsageFlags>,\n\n buffer_usage_flags: Vec<vk::BufferUsageFlags>,\n\n}\n\n\n\npub struct RenderGraphExecutionParams<'a> {\n\n pub device: &'a Device,\n\n pub pipeline_cache: &'a mut PipelineCache,\n\n pub frame_descriptor_set: vk::DescriptorSet,\n\n pub frame_constants_layout: FrameConstantsLayout,\n\n pub profiler_data: &'a VkProfilerData,\n\n}\n\n\n\npub struct RenderGraphPipelines {\n\n pub(crate) compute: Vec<ComputePipelineHandle>,\n\n pub(crate) raster: Vec<RasterPipelineHandle>,\n\n pub(crate) rt: Vec<RtPipelineHandle>,\n\n}\n\n\n\npub struct CompiledRenderGraph {\n\n rg: RenderGraph,\n\n resource_info: ResourceInfo,\n\n pipelines: RenderGraphPipelines,\n\n}\n\n\n", "file_path": "crates/lib/kajiya-rg/src/graph.rs", "rank": 43, "score": 78803.03689640737 }, { "content": "#[derive(Copy, Clone)]\n\n#[repr(C)]\n\nstruct GpuMaterial {\n\n base_color_mult: [f32; 4],\n\n maps: [u32; 4],\n\n}\n\n\n\nimpl Default for GpuMaterial {\n\n fn default() -> Self {\n\n Self {\n\n base_color_mult: [0.0f32; 4],\n\n maps: [0; 4],\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/lib/kajiya-asset/src/mesh.rs", "rank": 44, "score": 78803.03689640737 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone)]\n\nstruct GpuMesh {\n\n vertex_core_offset: u32,\n\n vertex_uv_offset: u32,\n\n vertex_mat_offset: u32,\n\n vertex_aux_offset: u32,\n\n vertex_tangent_offset: u32,\n\n\n\n mat_data_offset: u32,\n\n index_offset: u32,\n\n}\n\n\n\n#[derive(Clone, Copy, Hash, PartialEq, Eq, Debug)]\n\npub struct MeshHandle(pub usize);\n\n\n\n#[derive(Clone, Copy, Hash, PartialEq, Eq, Debug)]\n\npub struct InstanceHandle(pub usize);\n\n\n\nconst MAX_GPU_MESHES: usize = 1024;\n\nconst VERTEX_BUFFER_CAPACITY: usize = 1024 * 1024 * 512;\n\nconst TLAS_PREALLOCATE_BYTES: usize = 1024 * 1024 * 32;\n", "file_path": "crates/lib/kajiya/src/world_renderer.rs", "rank": 45, "score": 78803.03689640737 }, { "content": "#[derive(Debug)]\n\nstruct ResourceLifetime {\n\n //first_access: Option<usize>,\n\n last_access: Option<usize>,\n\n}\n\n\n", "file_path": "crates/lib/kajiya-rg/src/graph.rs", "rank": 46, "score": 78803.03689640737 }, { "content": "struct PendingBufferUpload {\n\n source: Box<dyn BufferDataSource>,\n\n offset: u64,\n\n}\n\n\n\nimpl<T: Copy> BufferDataSource for &'static [T] {\n\n fn as_bytes(&self) -> &[u8] {\n\n unsafe {\n\n std::slice::from_raw_parts(\n\n self.as_ptr() as *const u8,\n\n self.len() * std::mem::size_of::<T>(),\n\n )\n\n }\n\n }\n\n\n\n fn alignment(&self) -> u64 {\n\n std::mem::align_of::<T>() as u64\n\n }\n\n}\n\n\n", "file_path": "crates/lib/kajiya/src/buffer_builder.rs", "rank": 47, "score": 77903.56055593884 }, { "content": "struct GfxResources {\n\n //imgui_render_pass: RenderPass,\n\n pub imgui_render_pass: vk::RenderPass,\n\n pub imgui_framebuffer: vk::Framebuffer,\n\n pub imgui_texture: Arc<Image>,\n\n}\n\n\n\npub struct ImGuiBackendInner {\n\n imgui_renderer: ash_imgui::Renderer,\n\n gfx: Option<GfxResources>,\n\n}\n\n\n\npub struct ImGuiBackend {\n\n inner: Arc<Mutex<ImGuiBackendInner>>,\n\n device: Arc<Device>,\n\n imgui_platform: WinitPlatform,\n\n}\n\n\n\nimpl ImGuiBackend {\n\n pub fn new(\n", "file_path": "crates/lib/kajiya-imgui/src/imgui_backend.rs", "rank": 48, "score": 77903.56055593884 }, { "content": "struct PendingDebugPass {\n\n img: Handle<Image>,\n\n}\n\n\n\nimpl RenderGraph {\n\n pub fn add_pass<'s>(&'s mut self, name: &str) -> PassBuilder<'s> {\n\n let pass_idx = self.passes.len();\n\n\n\n PassBuilder {\n\n rg: self,\n\n pass_idx,\n\n pass: Some(RecordedPass::new(name, pass_idx)),\n\n }\n\n }\n\n\n\n fn calculate_resource_info(&self) -> ResourceInfo {\n\n let mut lifetimes: Vec<ResourceLifetime> = self\n\n .resources\n\n .iter()\n\n .map(|res| match res {\n", "file_path": "crates/lib/kajiya-rg/src/graph.rs", "rank": 49, "score": 77903.56055593884 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct DlssOptimalSettings {\n\n optimal_render_extent: [u32; 2],\n\n max_render_extent: [u32; 2],\n\n min_render_extent: [u32; 2],\n\n}\n\n\n\nimpl DlssOptimalSettings {\n\n fn for_target_resolution_at_quality(\n\n ngx_params: *mut NVSDK_NGX_Parameter,\n\n target_resolution: [u32; 2],\n\n value: NVSDK_NGX_PerfQuality_Value,\n\n ) -> Self {\n\n let mut optimal_render_extent = [0, 0];\n\n let mut max_render_extent = [0, 0];\n\n let mut min_render_extent = [0, 0];\n\n\n\n unsafe {\n\n let mut get_optimal_settings_fn = ptr::null_mut();\n\n ngx_checked!(NVSDK_NGX_Parameter_GetVoidPointer(\n\n ngx_params,\n", "file_path": "crates/lib/kajiya/src/renderers/dlss.rs", "rank": 50, "score": 77903.56055593884 }, { "content": "struct KeyMapState {\n\n map: KeyMap,\n\n activation: f32,\n\n}\n\n\n\npub struct KeyboardMap {\n\n bindings: Vec<(VirtualKeyCode, KeyMapState)>,\n\n}\n\n\n\nimpl Default for KeyboardMap {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl KeyboardMap {\n\n pub fn new() -> Self {\n\n Self {\n\n bindings: Default::default(),\n\n }\n", "file_path": "crates/lib/kajiya-simple/src/input.rs", "rank": 51, "score": 77903.56055593884 }, { "content": "#[repr(C)]\n\n#[derive(Clone, Debug, Copy)]\n\nstruct GeometryInstance {\n\n transform: [f32; 12],\n\n instance_id_and_mask: u32,\n\n instance_sbt_offset_and_flags: u32,\n\n blas_address: vk::DeviceAddress,\n\n}\n\n\n\nimpl GeometryInstance {\n\n fn new(\n\n transform: [f32; 12],\n\n id: u32,\n\n mask: u8,\n\n sbt_offset: u32,\n\n flags: ash::vk::GeometryInstanceFlagsKHR,\n\n blas_address: vk::DeviceAddress,\n\n ) -> Self {\n\n let mut instance = GeometryInstance {\n\n transform,\n\n instance_id_and_mask: 0,\n\n instance_sbt_offset_and_flags: 0,\n", "file_path": "crates/lib/kajiya-backend/src/vulkan/ray_tracing.rs", "rank": 52, "score": 77040.93026588068 }, { "content": "struct MainLoopOptional {\n\n #[cfg(feature = \"dear-imgui\")]\n\n imgui_backend: ImGuiBackend,\n\n\n\n #[cfg(feature = \"dear-imgui\")]\n\n imgui: imgui::Context,\n\n\n\n #[cfg(feature = \"puffin-server\")]\n\n _puffin_server: puffin_http::Server,\n\n}\n\n\n\npub enum WindowScale {\n\n Exact(f32),\n\n\n\n // Follow resolution scaling preferences in the OS\n\n SystemNative,\n\n}\n\n\n\npub enum FullscreenMode {\n\n Borderless,\n", "file_path": "crates/lib/kajiya-simple/src/main_loop.rs", "rank": 53, "score": 77040.93026588068 }, { "content": "struct RtPipelineCacheEntry {\n\n lazy_handle: Lazy<CompiledPipelineShaders>,\n\n desc: RayTracingPipelineDesc,\n\n pipeline: Option<Arc<RayTracingPipeline>>,\n\n}\n\n\n\npub struct PipelineCache {\n\n lazy_cache: Arc<LazyCache>,\n\n\n\n compute_entries: HashMap<ComputePipelineHandle, ComputePipelineCacheEntry>,\n\n raster_entries: HashMap<RasterPipelineHandle, RasterPipelineCacheEntry>,\n\n rt_entries: HashMap<RtPipelineHandle, RtPipelineCacheEntry>,\n\n\n\n compute_shader_to_handle: HashMap<ShaderSource, ComputePipelineHandle>,\n\n raster_shaders_to_handle: HashMap<Vec<PipelineShaderDesc>, RasterPipelineHandle>,\n\n rt_shaders_to_handle: HashMap<Vec<PipelineShaderDesc>, RtPipelineHandle>,\n\n}\n\n\n\nimpl PipelineCache {\n\n pub fn new(lazy_cache: &Arc<LazyCache>) -> Self {\n", "file_path": "crates/lib/kajiya-backend/src/pipeline_cache.rs", "rank": 55, "score": 76212.92743101165 }, { "content": "struct RasterPipelineCacheEntry {\n\n lazy_handle: Lazy<CompiledPipelineShaders>,\n\n desc: RasterPipelineDesc,\n\n pipeline: Option<Arc<RasterPipeline>>,\n\n}\n\n\n", "file_path": "crates/lib/kajiya-backend/src/pipeline_cache.rs", "rank": 56, "score": 76212.92743101165 }, { "content": "struct ComputePipelineCacheEntry {\n\n lazy_handle: Lazy<CompiledShader>,\n\n desc: ComputePipelineDesc,\n\n pipeline: Option<Arc<ComputePipeline>>,\n\n}\n\n\n\n#[derive(Clone, Copy, Hash, Eq, PartialEq)]\n\npub struct RasterPipelineHandle(usize);\n\n\n\n#[derive(Clone, Copy, Hash, Eq, PartialEq)]\n\npub struct RtPipelineHandle(usize);\n\n\n\npub struct CompiledPipelineShaders {\n\n shaders: Vec<PipelineShader<Arc<CompiledShader>>>,\n\n}\n\n\n\n#[derive(Clone, Hash)]\n\npub struct CompilePipelineShaders {\n\n shader_descs: Vec<PipelineShaderDesc>,\n\n}\n", "file_path": "crates/lib/kajiya-backend/src/pipeline_cache.rs", "rank": 57, "score": 76212.92743101165 }, { "content": "# Using `kajiya` as a crate\n\n\n\n_Please note that the project is experimental. Shipping games/apps is not one of its current goals, and is not actively supported._\n\n\n\n`kajiya` is not currently published on `crates.io`, and doesn't have an asset packaging system. For those reasons, using it as a crate is a bit fiddly. It's possible though.\n\n\n\nDocumentation is currently scarce, meaning that it's best to follow examples (see [`crates/bin/hello`](../crates/bin/hello)).\n\n\n\n## VFS\n\n\n\nThe renderer has a basic virtual file system used for loading assets (models, textures, shaders). That makes it possible to work on a game/app while pointing it at the assets in a separately synced `kajiya` repository.\n\n\n\n```rust\n\n// Point `kajiya` to standard assets and shaders in the parent directory\n\nset_standard_vfs_mount_points(\"../kajiya\");\n\n\n\n// Game-specific assets in the current directory\n\nset_vfs_mount_point(\"/baked\", \"./baked\");\n\n```\n\n\n\n## Cargo patches\n\n\n\nFor a standalone project to compile, please copy the `[patch.crates-io]` section from the top-level [`Cargo.toml`](../Cargo.toml)\n\n\n\n# Larger examples\n\n\n\n* [Cornell McRay t'Racing](https://github.com/h3r2tic/cornell-mcray) -- a simple racing game\n", "file_path": "docs/using-kajiya.md", "rank": 58, "score": 74958.91922440892 }, { "content": "## Using DLSS\n\n\n\nDLSS is supported on Nvidia RTX GPUs, and `kajiya` can currently use it when running on Windows.\n\n\n\n#### SDK\n\n\n\nNvidia's DLSS EULA prohibits distribution of the DLSS SDK, so you will have to obtain it yourself. The stand-alone SDK currently requires an NVIDIA Developer Program membership, _however_ the Unreal Enigine 5 plugin does not, yet it contains the necessary files.\n\n\n\nTherefore, the easiest way to get DLSS into `kajiya` is to [download the UE5 DLSS plugin](https://developer.nvidia.com/dlss-getting-started#ue-version), and extract the following:\n\n\n\n* Copy `DLSS/Binaries/ThirdParty/Win64/nvngx_dlss.dll` to the root `kajiya` folder (where this README resides).\n\n* Copy the entire `DLSS/Source/ThirdParty/NGX` folder to `crates/lib/ngx_dlss/NGX`\n\n\n\n#### Rust bindings\n\n\n\nPlease make sure you can run `bindgen`, which is necessary to generate a Rust binding to the SDK. Here's the official [installation instructions and requirements page](https://rust-lang.github.io/rust-bindgen/requirements.html). If `cargo` complains about `libclang.dll`, it's probably this.\n\n\n\n#### Usage\n\n\n\nWhen building `kajiya`, use the `dlss` Cargo feature, and specify temporal upsampling, e.g.:\n\n\n\n```\n\ncargo run --bin view --release --features dlss -- --scene battle --no-debug --temporal-upsampling 1.5 --width 1920 --height 1080\n\n```\n\n\n\nThis will run DLSS _Quality_ mode. `--temporal-upsampling 2.0` corresponds to _Performance_.\n\n\n\nPlease note that while DLSS works well for AAA-style content, it currently struggles with flat colors and smooth gradients. The built-in `kajiya` TAA and its temporal upsampling tends to look better there.\n", "file_path": "docs/using-dlss.md", "rank": 59, "score": 74958.4145268592 }, { "content": "#[allow(non_camel_case_types)]\n\ntype PFN_NVSDK_NGX_DLSS_GetOptimalSettingsCallback =\n\n extern \"cdecl\" fn(*mut NVSDK_NGX_Parameter) -> NVSDK_NGX_Result;\n", "file_path": "crates/lib/kajiya/src/renderers/dlss.rs", "rank": 60, "score": 74335.00059725236 }, { "content": "struct TempListInner<T> {\n\n payload: arrayvec::ArrayVec<[T; 8]>,\n\n next: Option<TempList<T>>,\n\n}\n\n\n\nimpl<T> Default for TempListInner<T> {\n\n fn default() -> Self {\n\n Self {\n\n payload: Default::default(),\n\n next: None,\n\n }\n\n }\n\n}\n\n\n\npub struct TempList<T>(UnsafeCell<Box<TempListInner<T>>>);\n\n\n\nimpl<T> Default for TempList<T> {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n", "file_path": "crates/lib/kajiya-backend/src/chunky_list.rs", "rank": 61, "score": 73449.707263836 }, { "content": "pub trait LookThroughCamera {\n\n fn through(self, lens: &CameraLens) -> CameraMatrices;\n\n}\n\n\n\nimpl<T> LookThroughCamera for T\n\nwhere\n\n T: IntoCameraBodyMatrices,\n\n{\n\n fn through(self, lens: &CameraLens) -> CameraMatrices {\n\n let body = self.into_camera_body_matrices();\n\n let lens = lens.calc_matrices();\n\n CameraMatrices {\n\n view_to_clip: lens.view_to_clip,\n\n clip_to_view: lens.clip_to_view,\n\n world_to_view: body.world_to_view,\n\n view_to_world: body.view_to_world,\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/lib/kajiya/src/camera.rs", "rank": 62, "score": 71933.80755539102 }, { "content": "pub trait Resource {\n\n type Desc: ResourceDesc;\n\n\n\n fn borrow_resource(res: &AnyRenderResource) -> &Self;\n\n}\n\n\n\nimpl Resource for Image {\n\n type Desc = ImageDesc;\n\n\n\n fn borrow_resource(res: &AnyRenderResource) -> &Self {\n\n match res.borrow() {\n\n AnyRenderResourceRef::Image(img) => img,\n\n _ => unimplemented!(),\n\n }\n\n }\n\n}\n\n\n\nimpl Resource for Buffer {\n\n type Desc = BufferDesc;\n\n\n", "file_path": "crates/lib/kajiya-rg/src/resource.rs", "rank": 63, "score": 71933.80755539102 }, { "content": "#[spirv(compute(threads(8, 8)))]\n\npub fn ssgi_cs(\n\n #[spirv(descriptor_set = 0, binding = 0)] gbuffer_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] half_depth_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 2)] view_normal_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 3)] prev_radiance_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 4)] reprojection_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 5)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(uniform, descriptor_set = 0, binding = 6)] constants: &SsgiConstants,\n\n #[spirv(uniform, descriptor_set = 2, binding = 0)] frame_constants: &FrameConstants,\n\n #[spirv(global_invocation_id)] px: UVec3,\n\n) {\n\n /* Settings */\n\n\n\n let uv = get_uv_u(px.xy(), constants.output_tex_size);\n\n\n\n let depth: Vec4 = half_depth_tex.fetch(px.xy());\n\n let depth = depth.x;\n\n if depth == 0.0 {\n\n unsafe {\n\n output_tex.write(px.xy(), vec4(0.0, 0.0, 0.0, 1.0));\n", "file_path": "crates/lib/rust-shaders/src/ssgi.rs", "rank": 64, "score": 71052.40218080992 }, { "content": "#[spirv(compute(threads(8, 8)))]\n\npub fn upsample_cs(\n\n #[spirv(descriptor_set = 0, binding = 0)] ssgi_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] depth_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 2)] gbuffer_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 3)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(global_invocation_id)] px: IVec3,\n\n) {\n\n let mut result;\n\n let mut w_sum = 0.0f32;\n\n\n\n let center_depth: Vec4 = depth_tex.fetch(px.xy());\n\n let center_depth = center_depth.x;\n\n\n\n if center_depth != 0.0 {\n\n let center_normal: Vec4 = gbuffer_tex.fetch(px.xy());\n\n let center_normal = unpack_normal_11_10_11(center_normal.y);\n\n\n\n let center_ssgi = Vec4::ZERO;\n\n w_sum = 0.0f32;\n\n result = center_ssgi;\n", "file_path": "crates/lib/rust-shaders/src/ssgi.rs", "rank": 65, "score": 71052.40218080992 }, { "content": "#[spirv(compute(threads(64, 1, 1)))] // 64 == GROUP_WIDTH\n\npub fn blur_cs(\n\n #[spirv(descriptor_set = 0, binding = 0)] input_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(workgroup)] vblur_out: &mut [Vec4; VBLUR_WINDOW_SIZE], // groupshared float4 vblur_out[VBLUR_WINDOW_SIZE];\n\n #[spirv(global_invocation_id)] px: UVec3,\n\n #[spirv(local_invocation_id)] px_within_group: UVec3,\n\n #[spirv(workgroup_id)] group_id: UVec3,\n\n) {\n\n let px = px.truncate();\n\n let group_id = group_id.truncate();\n\n let mut xfetch = px_within_group.x;\n\n while xfetch < VBLUR_WINDOW_SIZE as u32 {\n\n vblur_into_shmem(input_tex, vblur_out, px.as_ivec2(), xfetch as i32, group_id);\n\n xfetch += GROUP_WIDTH;\n\n }\n\n\n\n // GroupMemoryBarrierWithGroupSync();\n\n unsafe {\n\n control_barrier::<\n\n { Scope::Workgroup as u32 },\n", "file_path": "crates/lib/rust-shaders/src/blur.rs", "rank": 66, "score": 71052.29173420752 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn light_gbuffer(\n\n rg: &mut RenderGraph,\n\n gbuffer_depth: &GbufferDepth,\n\n shadow_mask: &rg::Handle<Image>,\n\n ssgi: &rg::Handle<Image>,\n\n rtr: &rg::Handle<Image>,\n\n rtdgi: &rg::Handle<Image>,\n\n temporal_output: &mut rg::Handle<Image>,\n\n output: &mut rg::Handle<Image>,\n\n csgi_volume: &super::csgi::CsgiVolume,\n\n sky_cube: &rg::Handle<Image>,\n\n convolved_sky_cube: &rg::Handle<Image>,\n\n bindless_descriptor_set: vk::DescriptorSet,\n\n debug_shading_mode: usize,\n\n) {\n\n SimpleRenderPass::new_compute(rg.add_pass(\"light gbuffer\"), \"/shaders/light_gbuffer.hlsl\")\n\n .read(&gbuffer_depth.gbuffer)\n\n .read_aspect(&gbuffer_depth.depth, vk::ImageAspectFlags::DEPTH)\n\n .read(shadow_mask)\n\n .read(ssgi)\n", "file_path": "crates/lib/kajiya/src/renderers/deferred.rs", "rank": 67, "score": 71047.2597491177 }, { "content": "pub trait ConstBlob {\n\n fn push_self(\n\n self: Box<Self>,\n\n dynamic_constants: &mut dynamic_constants::DynamicConstants,\n\n ) -> u32;\n\n}\n\n\n\nimpl<T> ConstBlob for T\n\nwhere\n\n T: Copy + 'static,\n\n{\n\n fn push_self(\n\n self: Box<Self>,\n\n dynamic_constants: &mut dynamic_constants::DynamicConstants,\n\n ) -> u32 {\n\n dynamic_constants.push(self.as_ref())\n\n }\n\n}\n\n\n", "file_path": "crates/lib/kajiya-rg/src/hl.rs", "rank": 68, "score": 71047.2597491177 }, { "content": "pub fn post_process(\n\n rg: &mut RenderGraph,\n\n input: &rg::Handle<Image>,\n\n //debug_input: &rg::Handle<Image>,\n\n bindless_descriptor_set: vk::DescriptorSet,\n\n ev_shift: f32,\n\n) -> rg::Handle<Image> {\n\n let blur_pyramid = blur_pyramid(rg, input);\n\n let rev_blur_pyramid = rev_blur_pyramid(rg, &blur_pyramid);\n\n\n\n let mut output = rg.create(input.desc().format(vk::Format::B10G11R11_UFLOAT_PACK32));\n\n\n\n //let blurred_luminance = edge_preserving_filter_luminance(rg, input);\n\n\n\n SimpleRenderPass::new_compute_rust(\n\n rg.add_pass(\"post combine\"),\n\n \"post_combine::post_combine_cs\",\n\n )\n\n /*SimpleRenderPass::new_compute(\n\n rg.add_pass(\"post combine\"),\n", "file_path": "crates/lib/kajiya/src/renderers/post.rs", "rank": 69, "score": 71047.2597491177 }, { "content": "// Integrate scattering over a ray for a single directional light source.\n\n// Also return the transmittance for the same ray as we are already calculating the optical depth anyway.\n\npub fn integrate_scattering(\n\n mut ray_start: Vec3,\n\n ray_dir: Vec3,\n\n mut ray_length: f32,\n\n light_dir: Vec3,\n\n light_color: Vec3,\n\n transmittance: &mut Vec3,\n\n) -> Vec3 {\n\n // We can reduce the number of atmospheric samples required to converge by spacing them exponentially closer to the camera.\n\n // This breaks space view however, so let's compensate for that with an exponent that \"fades\" to 1 as we leave the atmosphere.\n\n // let ray_height = atmosphere_height(ray_start);\n\n //float sample_distribution_exponent = 1 + saturate(1 - ray_height / ATMOSPHERE_HEIGHT) * 8; // Slightly arbitrary max exponent of 9\n\n //float sample_distribution_exponent = 1 + 8 * abs(ray_dir.y);\n\n let sample_distribution_exponent: f32 = 5.0;\n\n\n\n let intersection: Vec2 = atmosphere_intersection(ray_start, ray_dir);\n\n\n\n ray_length = ray_length.min(intersection.y);\n\n if intersection.x > 0.0 {\n\n // Advance ray to the atmosphere entry point\n", "file_path": "crates/lib/rust-shaders/src/atmosphere.rs", "rank": 70, "score": 71047.2597491177 }, { "content": "pub trait IntoCameraBodyMatrices {\n\n fn into_camera_body_matrices(self) -> CameraBodyMatrices;\n\n}\n\n\n\nimpl IntoCameraBodyMatrices for CameraBodyMatrices {\n\n fn into_camera_body_matrices(self) -> CameraBodyMatrices {\n\n self\n\n }\n\n}\n\n\n\nimpl IntoCameraBodyMatrices for (Vec3, Quat) {\n\n fn into_camera_body_matrices(self) -> CameraBodyMatrices {\n\n CameraBodyMatrices::from_position_rotation(self.0, self.1)\n\n }\n\n}\n\n\n", "file_path": "crates/lib/kajiya/src/camera.rs", "rank": 71, "score": 71047.2597491177 }, { "content": "#[spirv(compute(threads(8, 8)))]\n\npub fn spatial_filter_cs(\n\n #[spirv(descriptor_set = 0, binding = 0)] ssgi_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] depth_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 2)] normal_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 3)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(global_invocation_id)] px: IVec3,\n\n) {\n\n let mut result;\n\n let mut w_sum = 0.0f32;\n\n\n\n let center_depth: Vec4 = depth_tex.fetch(px.xy());\n\n let center_depth = center_depth.x;\n\n\n\n if center_depth != 0.0 {\n\n let center_normal: Vec4 = normal_tex.fetch(px.xy());\n\n let center_normal = center_normal.xyz();\n\n\n\n let center_ssgi: Vec4 = ssgi_tex.fetch(px.xy());\n\n w_sum = 1.0f32;\n\n result = center_ssgi;\n", "file_path": "crates/lib/rust-shaders/src/ssgi.rs", "rank": 72, "score": 70202.17082188654 }, { "content": "#[spirv(compute(threads(8, 8)))]\n\npub fn velocity_reduce_x(\n\n #[spirv(descriptor_set = 0, binding = 0)] input_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(global_invocation_id)] px: UVec3,\n\n) {\n\n let mut largest_velocity = Vec3::ZERO;\n\n\n\n for x in 0..16 {\n\n let v: Vec4 = input_tex.fetch(px.xy() * uvec2(16, 1) + uvec2(x, 0));\n\n let v = v.xy();\n\n let m2 = v.dot(v);\n\n largest_velocity = if m2 > largest_velocity.z {\n\n v.extend(m2)\n\n } else {\n\n largest_velocity\n\n };\n\n }\n\n\n\n unsafe {\n\n output_tex.write(px.truncate(), largest_velocity.xy());\n\n }\n\n}\n\n\n", "file_path": "crates/lib/rust-shaders/src/motion_blur.rs", "rank": 73, "score": 70202.17082188654 }, { "content": "#[spirv(compute(threads(8, 8)))]\n\npub fn velocity_dilate(\n\n #[spirv(descriptor_set = 0, binding = 0)] input_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(global_invocation_id)] px: IVec3,\n\n) {\n\n let mut largest_velocity = Vec3::ZERO;\n\n let dilate_amount = 2i32;\n\n\n\n for x in -dilate_amount..=dilate_amount {\n\n for y in -dilate_amount..=dilate_amount {\n\n let v: Vec4 = input_tex.fetch(px.xy() + IVec2::new(x, y));\n\n let v = v.xy();\n\n let m2 = v.dot(v);\n\n largest_velocity = if m2 > largest_velocity.z {\n\n v.extend(m2)\n\n } else {\n\n largest_velocity\n\n };\n\n }\n\n }\n\n\n\n unsafe {\n\n output_tex.write(px.truncate(), largest_velocity.xy());\n\n }\n\n}\n", "file_path": "crates/lib/rust-shaders/src/motion_blur.rs", "rank": 74, "score": 70202.17082188654 }, { "content": "#[spirv(compute(threads(8, 8)))]\n\npub fn motion_blur(\n\n #[spirv(descriptor_set = 0, binding = 0)] input_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] velocity_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 2)] tile_velocity_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 3)] depth_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 4)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(descriptor_set = 0, binding = 32)] sampler_lnc: &Sampler,\n\n #[spirv(descriptor_set = 0, binding = 33)] sampler_nnc: &Sampler,\n\n //#[spirv(descriptor_set = 0, binding = 33)] sampler_nnc: &Sampler,\n\n #[spirv(uniform, descriptor_set = 0, binding = 5)] constants: &Constants,\n\n #[spirv(uniform, descriptor_set = 2, binding = 0)] frame_constants: &FrameConstants,\n\n #[spirv(global_invocation_id)] px: UVec3,\n\n) {\n\n let uv = get_uv_u(px.xy(), constants.output_tex_size);\n\n let blur_scale = 0.5 * constants.motion_blur_scale;\n\n let depth_tex_size = constants.depth_tex_size.xy();\n\n let output_tex_size = constants.output_tex_size.xy();\n\n\n\n // Scramble tile coordinates to diffuse the tile quantization in noise\n\n let mut noise1: i32;\n", "file_path": "crates/lib/rust-shaders/src/motion_blur.rs", "rank": 75, "score": 70202.17082188654 }, { "content": "#[spirv(compute(threads(8, 8)))]\n\npub fn velocity_reduce_y(\n\n #[spirv(descriptor_set = 0, binding = 0)] input_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(global_invocation_id)] px: UVec3,\n\n) {\n\n let mut largest_velocity = Vec3::ZERO;\n\n\n\n for y in 0..16 {\n\n let v: Vec4 = input_tex.fetch(px.xy() * uvec2(1, 16) + uvec2(0, y));\n\n let v = v.xy();\n\n let m2 = v.dot(v);\n\n largest_velocity = if m2 > largest_velocity.z {\n\n v.extend(m2)\n\n } else {\n\n largest_velocity\n\n };\n\n }\n\n\n\n unsafe {\n\n output_tex.write(px.truncate(), largest_velocity.xy());\n\n }\n\n}\n\n\n", "file_path": "crates/lib/rust-shaders/src/motion_blur.rs", "rank": 76, "score": 70202.17082188654 }, { "content": "#[spirv(compute(threads(8, 8)))]\n\npub fn temporal_filter_cs(\n\n #[spirv(descriptor_set = 0, binding = 0)] input_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] history_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 2)] reprojection_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 3)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(uniform, descriptor_set = 0, binding = 4)] output_tex_size: &Vec4,\n\n #[spirv(descriptor_set = 0, binding = 32)] sampler_lnc: &Sampler,\n\n #[spirv(global_invocation_id)] px: UVec3,\n\n) {\n\n let uv = get_uv_u(px.xy(), *output_tex_size);\n\n let center: Vec4 = input_tex.fetch(px.xy());\n\n\n\n let reproj: Vec4 = reprojection_tex.fetch(px.xy());\n\n let history: Vec4 = history_tex.sample_by_lod(*sampler_lnc, uv + reproj.xy(), 0.0);\n\n\n\n let mut vsum = Vec4::ZERO;\n\n let mut vsum2 = Vec4::ZERO;\n\n let mut wsum = 0.0;\n\n\n\n let k = 2i32;\n", "file_path": "crates/lib/rust-shaders/src/ssgi.rs", "rank": 77, "score": 70202.17082188654 }, { "content": "pub fn raster_meshes(\n\n rg: &mut RenderGraph,\n\n render_pass: Arc<RenderPass>,\n\n gbuffer_depth: &mut GbufferDepth,\n\n velocity_img: &mut rg::Handle<Image>,\n\n mesh_data: RasterMeshesData<'_>,\n\n) {\n\n let mut pass = rg.add_pass(\"raster simple\");\n\n\n\n let pipeline = pass.register_raster_pipeline(\n\n &[\n\n PipelineShaderDesc::builder(ShaderPipelineStage::Vertex)\n\n // .rust_source(\"raster_simple::raster_simple_vs\")\n\n .hlsl_source(\"/shaders/raster_simple_vs.hlsl\")\n\n .build()\n\n .unwrap(),\n\n PipelineShaderDesc::builder(ShaderPipelineStage::Pixel)\n\n // .rust_source(\"raster_simple::raster_simple_fs\")\n\n .hlsl_source(\"/shaders/raster_simple_ps.hlsl\")\n\n .build()\n", "file_path": "crates/lib/kajiya/src/renderers/raster_meshes.rs", "rank": 78, "score": 70197.02839019432 }, { "content": "pub fn raster_sdf(\n\n rg: &mut RenderGraph,\n\n render_pass: Arc<RenderPass>,\n\n depth_img: &mut Handle<Image>,\n\n color_img: &mut Handle<Image>,\n\n raster_sdf_data: RasterSdfData<'_>,\n\n) {\n\n let mut pass = rg.add_pass();\n\n\n\n let pipeline = pass.register_raster_pipeline(\n\n &[\n\n RasterPipelineShader {\n\n code: \"/shaders/raster_simple_vs.hlsl\",\n\n desc: RasterShaderDesc::builder(RasterStage::Vertex)\n\n .build()\n\n .unwrap(),\n\n },\n\n RasterPipelineShader {\n\n code: \"/shaders/raster_simple_ps.hlsl\",\n\n desc: RasterShaderDesc::builder(RasterStage::Pixel)\n", "file_path": "crates/lib/kajiya/src/renderers/old/sdf.rs", "rank": 79, "score": 70197.02839019432 }, { "content": "pub fn calculate_reprojection_map(\n\n rg: &mut rg::TemporalRenderGraph,\n\n gbuffer_depth: &GbufferDepth,\n\n velocity_img: &rg::Handle<Image>,\n\n) -> rg::Handle<Image> {\n\n //let mut output_tex = rg.create(depth.desc().format(vk::Format::R16G16B16A16_SFLOAT));\n\n //let mut output_tex = rg.create(depth.desc().format(vk::Format::R32G32B32A32_SFLOAT));\n\n let mut output_tex = rg.create(\n\n gbuffer_depth\n\n .depth\n\n .desc()\n\n .format(vk::Format::R16G16B16A16_SNORM),\n\n );\n\n\n\n let mut prev_depth = rg\n\n .get_or_create_temporal(\n\n \"reprojection.prev_depth\",\n\n gbuffer_depth\n\n .depth\n\n .desc()\n", "file_path": "crates/lib/kajiya/src/renderers/reprojection.rs", "rank": 80, "score": 70197.02839019432 }, { "content": "pub fn motion_blur(\n\n rg: &mut RenderGraph,\n\n input: &rg::Handle<Image>,\n\n depth: &rg::Handle<Image>,\n\n reprojection_map: &rg::Handle<Image>,\n\n) -> rg::Handle<Image> {\n\n const VELOCITY_TILE_SIZE: u32 = 16;\n\n\n\n let mut velocity_reduced_x = rg.create(\n\n reprojection_map\n\n .desc()\n\n .div_up_extent([VELOCITY_TILE_SIZE, 1, 1])\n\n .format(vk::Format::R16G16_SFLOAT),\n\n );\n\n\n\n SimpleRenderPass::new_compute_rust(\n\n rg.add_pass(\"velocity reduce x\"),\n\n \"motion_blur::velocity_reduce_x\",\n\n )\n\n .read(reprojection_map)\n", "file_path": "crates/lib/kajiya/src/renderers/motion_blur.rs", "rank": 81, "score": 70197.02839019432 }, { "content": "pub trait BufferDataSource {\n\n fn as_bytes(&self) -> &[u8];\n\n fn alignment(&self) -> u64;\n\n}\n\n\n", "file_path": "crates/lib/kajiya/src/buffer_builder.rs", "rank": 82, "score": 70197.02839019432 }, { "content": "#[spirv(compute(threads(8, 8, 1)))]\n\npub fn comp_sky_cube_cs(\n\n #[spirv(descriptor_set = 0, binding = 0)] output_tex: &Image!(2D, format=rgba16f, sampled=false, arrayed=true),\n\n #[spirv(uniform, descriptor_set = 2, binding = 0)] frame_constants: &FrameConstants,\n\n #[spirv(global_invocation_id)] px: UVec3,\n\n) {\n\n let face = px.z;\n\n let uv = (Vec2::new(px.x as f32 + 0.5, px.y as f32 + 0.5)) / 32.0;\n\n let dir = CUBE_MAP_FACE_ROTATIONS[face as usize] * (uv * 2.0 - Vec2::ONE).extend(-1.0);\n\n\n\n let output = atmosphere_default(dir, frame_constants.sun_direction.truncate());\n\n unsafe {\n\n output_tex.write(px, output.extend(1.0));\n\n }\n\n}\n", "file_path": "crates/lib/rust-shaders/src/sky.rs", "rank": 83, "score": 69386.0692038508 }, { "content": "#[spirv(compute(threads(8, 8)))]\n\npub fn rev_blur_cs(\n\n #[spirv(descriptor_set = 0, binding = 0)] input_tail_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] input_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 2)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(descriptor_set = 0, binding = 32)] sampler_lnc: &Sampler,\n\n #[spirv(uniform, descriptor_set = 0, binding = 3)] constants: &Constants,\n\n #[spirv(global_invocation_id)] px: UVec3,\n\n) {\n\n let pyramid_col: Vec4 = input_tail_tex.fetch(px.truncate());\n\n\n\n let mut self_col: Vec4;\n\n let inv_size = Vec2::ONE\n\n / Vec2::new(\n\n constants.output_extent_x as f32,\n\n constants.output_extent_y as f32,\n\n );\n\n if true {\n\n // TODO: do a small Gaussian blur instead of this nonsense\n\n\n\n const K: i32 = 1;\n", "file_path": "crates/lib/rust-shaders/src/rev_blur.rs", "rank": 84, "score": 69386.0692038508 }, { "content": "#[spirv(compute(threads(8, 8, 1)))]\n\npub fn convolve_cube_cs(\n\n #[spirv(descriptor_set = 0, binding = 0)] input_tex: &Image!(cube, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] output_tex: &Image!(2D, format=rgba16f, sampled=false, arrayed=true),\n\n #[spirv(uniform, descriptor_set = 0, binding = 2)] constants: &Constants,\n\n #[spirv(descriptor_set = 0, binding = 33)] sampler_llr: &Sampler,\n\n #[spirv(global_invocation_id)] px: UVec3,\n\n) {\n\n let face = px.z as usize;\n\n let uv = (Vec2::new(px.x as f32 + 0.5, px.y as f32 + 0.5)) / constants.face_width as f32;\n\n\n\n let output_dir =\n\n (CUBE_MAP_FACE_ROTATIONS[face] * (uv * 2.0 - Vec2::ONE).extend(-1.0)).normalize();\n\n let basis: Mat3 = build_orthonormal_basis(output_dir);\n\n\n\n let sample_count: u32 = 256;\n\n\n\n let mut result: Vec4 = Vec4::ZERO;\n\n let mut i = 0;\n\n while i < sample_count {\n\n let urand: Vec2 = hammersley(i, sample_count);\n", "file_path": "crates/lib/rust-shaders/src/convolve_cube.rs", "rank": 85, "score": 69386.0692038508 }, { "content": "#[spirv(compute(threads(8, 8)))]\n\npub fn copy_depth_to_r_cs(\n\n #[spirv(descriptor_set = 0, binding = 0)] input_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 1)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(global_invocation_id)] id: UVec3,\n\n) {\n\n let color: Vec4 = input_tex.fetch(id.truncate());\n\n unsafe {\n\n output_tex.write(id.truncate(), color);\n\n }\n\n}\n", "file_path": "crates/lib/rust-shaders/src/copy_depth_to_r.rs", "rank": 86, "score": 69386.0692038508 }, { "content": "#[spirv(compute(threads(8, 8)))]\n\npub fn post_combine_cs(\n\n #[spirv(descriptor_set = 0, binding = 0)] input_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 2)] rev_blur_pyramid_tex: &Image!(2D, type=f32, sampled=true),\n\n #[spirv(descriptor_set = 0, binding = 3)] output_tex: &Image!(2D, type=f32, sampled=false),\n\n #[spirv(descriptor_set = 1, binding = 2)] bindless_textures: &RuntimeArray<\n\n Image!(2D, type=f32, sampled=true),\n\n >,\n\n\n\n #[spirv(descriptor_set = 0, binding = 32)] sampler_lnc: &Sampler,\n\n #[spirv(uniform, descriptor_set = 0, binding = 4)] constants: &Constants,\n\n #[spirv(uniform, descriptor_set = 2, binding = 0)] frame_constants: &FrameConstants,\n\n #[spirv(global_invocation_id)] px: UVec3,\n\n) {\n\n let uv = get_uv_u(px.truncate(), constants.output_tex_size);\n\n\n\n let glare_vec4: Vec4 = rev_blur_pyramid_tex.sample_by_lod(*sampler_lnc, uv, 0.0);\n\n let glare: Vec3 = glare_vec4.truncate();\n\n\n\n let col: Vec4 = input_tex.fetch(px.truncate());\n\n let mut col: Vec3 = col.truncate();\n", "file_path": "crates/lib/rust-shaders/src/post_combine.rs", "rank": 87, "score": 69386.0692038508 }, { "content": "pub trait BindRgRef {\n\n fn bind(&self) -> RenderPassBinding;\n\n}\n\n\n\nimpl BindRgRef for Ref<Image, GpuSrv> {\n\n fn bind(&self) -> RenderPassBinding {\n\n self.bind_view(ImageViewDescBuilder::default())\n\n }\n\n}\n\n\n\nimpl Ref<Image, GpuSrv> {\n\n pub fn bind_view(&self, view_desc: ImageViewDescBuilder) -> RenderPassBinding {\n\n RenderPassBinding::Image(RenderPassImageBinding {\n\n handle: self.handle,\n\n view_desc: view_desc.build().unwrap(),\n\n image_layout: vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,\n\n })\n\n }\n\n}\n\n\n", "file_path": "crates/lib/kajiya-rg/src/pass_api.rs", "rank": 88, "score": 69380.92677215856 }, { "content": "pub fn allocate_surfels(\n\n rg: &mut rg::TemporalRenderGraph,\n\n bent_normals: &rg::Handle<Image>,\n\n gbuffer_depth: &GbufferDepth,\n\n) -> SurfelGiRenderState {\n\n let gbuffer_desc = gbuffer_depth.gbuffer.desc();\n\n\n\n let mut state = SurfelGiRenderState {\n\n // 0: hash grid cell count\n\n // 1: surfel count\n\n surfel_meta_buf: temporal_storage_buffer(\n\n rg,\n\n \"surfel_gi.surfel_meta_buf\",\n\n size_of::<u32>() * 8,\n\n ),\n\n surfel_hash_key_buf: temporal_storage_buffer(\n\n rg,\n\n \"surfel_gi.surfel_hash_key_buf\",\n\n size_of::<u32>() * MAX_SURFEL_CELLS,\n\n ),\n", "file_path": "crates/lib/kajiya/src/renderers/old/surfel_gi.rs", "rank": 89, "score": 69380.92677215856 }, { "content": "pub fn trace_sun_shadow_mask(\n\n rg: &mut RenderGraph,\n\n gbuffer_depth: &GbufferDepth,\n\n tlas: &rg::Handle<RayTracingAcceleration>,\n\n bindless_descriptor_set: vk::DescriptorSet,\n\n) -> rg::Handle<Image> {\n\n let mut output_img = rg.create(gbuffer_depth.depth.desc().format(vk::Format::R8_UNORM));\n\n\n\n SimpleRenderPass::new_rt(\n\n rg.add_pass(\"trace shadow mask\"),\n\n ShaderSource::hlsl(\"/shaders/rt/trace_sun_shadow_mask.rgen.hlsl\"),\n\n [\n\n // Duplicated because `rt.hlsl` hardcodes miss index to 1\n\n ShaderSource::hlsl(\"/shaders/rt/shadow.rmiss.hlsl\"),\n\n ShaderSource::hlsl(\"/shaders/rt/shadow.rmiss.hlsl\"),\n\n ],\n\n std::iter::empty(),\n\n )\n\n .read_aspect(&gbuffer_depth.depth, vk::ImageAspectFlags::DEPTH)\n\n .read(&gbuffer_depth.geometric_normal)\n\n .write(&mut output_img)\n\n .raw_descriptor_set(1, bindless_descriptor_set)\n\n .trace_rays(tlas, output_img.desc().extent);\n\n\n\n output_img\n\n}\n", "file_path": "crates/lib/kajiya/src/renderers/shadows.rs", "rank": 90, "score": 69380.92677215856 }, { "content": "pub fn apply_bilinear_custom_weights(\n\n s00: Vec4,\n\n s10: Vec4,\n\n s01: Vec4,\n\n s11: Vec4,\n\n w: Vec4,\n\n normalize: bool,\n\n) -> Vec4 {\n\n let r = s00 * w.x + s10 * w.y + s01 * w.z + s11 * w.w;\n\n if normalize {\n\n r * w.dot(Vec4::ONE).recip()\n\n } else {\n\n r\n\n }\n\n}\n", "file_path": "crates/lib/rust-shaders/src/bilinear.rs", "rank": 91, "score": 69380.92677215856 }, { "content": "/// Import the image data referenced by a glTF document.\n\npub fn import_image_data(\n\n document: &Document,\n\n base: Option<&Path>,\n\n buffer_data: &[Bytes],\n\n) -> Result<Vec<ImageSource>> {\n\n let mut images = Vec::new();\n\n\n\n for image in document.images() {\n\n match image.source() {\n\n image::Source::Uri { uri, mime_type: _ } if base.is_some() => {\n\n let uri = urlencoding::decode(uri).map_err(|_| Error::UnsupportedScheme)?;\n\n let uri = uri.as_ref();\n\n\n\n match Scheme::parse(uri) {\n\n Scheme::Data(Some(_mime_type), base64) => {\n\n let bytes = base64::decode(&base64).map_err(Error::Base64)?;\n\n images.push(ImageSource::Memory(Bytes::from(bytes)));\n\n }\n\n Scheme::Data(None, ..) => return Err(Error::ExternalReferenceInSliceImport),\n\n Scheme::Unsupported => return Err(Error::UnsupportedScheme),\n", "file_path": "crates/lib/kajiya-asset/src/import_gltf.rs", "rank": 92, "score": 69380.92677215856 }, { "content": "/// Import the buffer data referenced by a glTF document.\n\npub fn import_buffer_data(\n\n document: &Document,\n\n base: Option<&Path>,\n\n mut blob: Option<Vec<u8>>,\n\n) -> Result<Vec<Bytes>> {\n\n let mut buffers = Vec::new();\n\n for buffer in document.buffers() {\n\n let mut data = match buffer.source() {\n\n buffer::Source::Uri(uri) => Scheme::read(base, uri),\n\n buffer::Source::Bin => blob.take().ok_or(Error::MissingBlob),\n\n }?;\n\n if data.len() < buffer.length() {\n\n return Err(Error::BufferLength {\n\n buffer: buffer.index(),\n\n expected: buffer.length(),\n\n actual: data.len(),\n\n });\n\n }\n\n while data.len() % 4 != 0 {\n\n data.push(0);\n\n }\n\n buffers.push(Bytes::from(data));\n\n }\n\n Ok(buffers)\n\n}\n\n\n", "file_path": "crates/lib/kajiya-asset/src/import_gltf.rs", "rank": 93, "score": 69380.92677215856 }, { "content": "pub fn create_raster_pipeline(\n\n device: &Device,\n\n shaders: &[PipelineShader<Bytes>],\n\n desc: &RasterPipelineDesc,\n\n) -> anyhow::Result<RasterPipeline> {\n\n let stage_layouts = shaders\n\n .iter()\n\n .map(|shader| {\n\n rspirv_reflect::Reflection::new_from_spirv(&shader.code)\n\n .unwrap()\n\n .get_descriptor_sets()\n\n .unwrap()\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let (descriptor_set_layouts, set_layout_info) = super::shader::create_descriptor_set_layouts(\n\n device,\n\n &merge_shader_stage_layouts(stage_layouts),\n\n vk::ShaderStageFlags::ALL_GRAPHICS,\n\n //desc.descriptor_set_layout_flags.unwrap_or(&[]), // TODO: merge flags\n", "file_path": "crates/lib/kajiya-backend/src/vulkan/shader.rs", "rank": 94, "score": 69380.92677215856 }, { "content": "pub fn create_compute_pipeline(\n\n device: &Device,\n\n spirv: &[u8],\n\n desc: &ComputePipelineDesc,\n\n) -> ComputePipeline {\n\n let (descriptor_set_layouts, set_layout_info) = super::shader::create_descriptor_set_layouts(\n\n device,\n\n &rspirv_reflect::Reflection::new_from_spirv(spirv)\n\n .unwrap()\n\n .get_descriptor_sets()\n\n .unwrap(),\n\n vk::ShaderStageFlags::COMPUTE,\n\n &desc.descriptor_set_opts,\n\n );\n\n\n\n // dbg!(&set_layout_info);\n\n\n\n let mut layout_create_info =\n\n vk::PipelineLayoutCreateInfo::builder().set_layouts(&descriptor_set_layouts);\n\n\n", "file_path": "crates/lib/kajiya-backend/src/vulkan/shader.rs", "rank": 95, "score": 69380.92677215856 }, { "content": "pub trait ImportExportToRenderGraph\n\nwhere\n\n Self: Resource + Sized,\n\n{\n\n fn import(\n\n self: Arc<Self>,\n\n rg: &mut RenderGraph,\n\n access_type_at_import_time: vk_sync::AccessType,\n\n ) -> Handle<Self>;\n\n\n\n fn export(\n\n resource: Handle<Self>,\n\n rg: &mut RenderGraph,\n\n access_type: vk_sync::AccessType,\n\n ) -> ExportedHandle<Self>;\n\n}\n\n\n\nimpl ImportExportToRenderGraph for Image {\n\n fn import(\n\n self: Arc<Self>,\n", "file_path": "crates/lib/kajiya-rg/src/graph.rs", "rank": 96, "score": 69380.92677215856 }, { "content": "struct VecBlob<T>(Vec<T>);\n\n\n\nimpl<T> ConstBlob for VecBlob<T>\n\nwhere\n\n T: Copy + 'static,\n\n{\n\n fn push_self(\n\n self: Box<Self>,\n\n dynamic_constants: &mut dynamic_constants::DynamicConstants,\n\n ) -> u32 {\n\n dynamic_constants.push_from_iter(self.0.into_iter())\n\n }\n\n}\n\n\n\npub struct SimpleRenderPassState<RgPipelineHandle> {\n\n pipeline: RgPipelineHandle,\n\n bindings: Vec<RenderPassBinding>,\n\n const_blobs: Vec<(usize, Box<dyn ConstBlob>)>,\n\n raw_descriptor_sets: Vec<(u32, vk::DescriptorSet)>,\n\n}\n", "file_path": "crates/lib/kajiya-rg/src/hl.rs", "rank": 97, "score": 69032.16463389076 }, { "content": "pub trait PhysicalDeviceList {\n\n fn with_presentation_support(self, surface: &Surface) -> Self;\n\n}\n\n\n\nimpl PhysicalDeviceList for Vec<PhysicalDevice> {\n\n fn with_presentation_support(self, surface: &Surface) -> Self {\n\n self.into_iter()\n\n .filter_map(|mut pdevice| {\n\n pdevice.presentation_requested = true;\n\n\n\n let supports_presentation =\n\n pdevice\n\n .queue_families\n\n .iter()\n\n .enumerate()\n\n .any(|(queue_index, info)| unsafe {\n\n info.properties\n\n .queue_flags\n\n .contains(vk::QueueFlags::GRAPHICS)\n\n && surface\n", "file_path": "crates/lib/kajiya-backend/src/vulkan/physical_device.rs", "rank": 98, "score": 68596.9402905876 }, { "content": "pub fn create_descriptor_set_layouts(\n\n device: &Device,\n\n descriptor_sets: &StageDescriptorSetLayouts,\n\n stage_flags: vk::ShaderStageFlags,\n\n set_opts: &[Option<(u32, DescriptorSetLayoutOpts)>; MAX_DESCRIPTOR_SETS],\n\n) -> (\n\n Vec<vk::DescriptorSetLayout>,\n\n Vec<HashMap<u32, vk::DescriptorType>>,\n\n) {\n\n // dbg!(&descriptor_sets);\n\n\n\n // Make a vector of Option<ref> to the original entries\n\n let mut set_opts = set_opts\n\n .iter()\n\n .map(|item| item.as_ref())\n\n .collect::<Vec<_>>();\n\n\n\n let samplers = TempList::new();\n\n\n\n // Find the number of sets in `descriptor_sets`\n", "file_path": "crates/lib/kajiya-backend/src/vulkan/shader.rs", "rank": 99, "score": 68596.9402905876 } ]
Rust
physx/src/aggregate.rs
FredrikNoren/physx-rs
92a09a98f3825d52c84d487d0fc575fb8cf04feb
use crate::{ actor::{Actor, ActorMap}, articulation::Articulation, articulation_base::ArticulationBase, articulation_link::ArticulationLink, articulation_reduced_coordinate::ArticulationReducedCoordinate, base::Base, bvh_structure::BvhStructure, owner::Owner, rigid_actor::RigidActor, rigid_dynamic::RigidDynamic, rigid_static::RigidStatic, traits::Class, }; use std::{marker::PhantomData, ptr::null}; use physx_sys::{ PxAggregate_addActor_mut, PxAggregate_addArticulation_mut, PxAggregate_getActors, PxAggregate_getMaxNbActors, PxAggregate_getNbActors, PxAggregate_getSelfCollision, PxAggregate_release_mut, PxAggregate_removeActor_mut, PxAggregate_removeArticulation_mut, }; #[repr(transparent)] pub struct PxAggregate<L, S, D, T, C> where L: ArticulationLink, S: RigidStatic, D: RigidDynamic, T: Articulation, C: ArticulationReducedCoordinate, { obj: physx_sys::PxAggregate, phantom_user_data: PhantomData<(*const L, *const S, *const D, *const T, *const C)>, } impl<L, S, D, T, C> Drop for PxAggregate<L, S, D, T, C> where L: ArticulationLink, S: RigidStatic, D: RigidDynamic, T: Articulation, C: ArticulationReducedCoordinate, { fn drop(&mut self) { unsafe { PxAggregate_release_mut(self.as_mut_ptr()); } } } unsafe impl<L, S, D, T, C> Send for PxAggregate<L, S, D, T, C> where L: ArticulationLink + Send, S: RigidStatic + Send, D: RigidDynamic + Send, T: Articulation + Send, C: ArticulationReducedCoordinate + Send, { } unsafe impl<L, S, D, T, C> Sync for PxAggregate<L, S, D, T, C> where L: ArticulationLink + Sync, S: RigidStatic + Sync, D: RigidDynamic + Sync, T: Articulation + Sync, C: ArticulationReducedCoordinate + Sync, { } unsafe impl<P, L, S, D, T, C> Class<P> for PxAggregate<L, S, D, T, C> where physx_sys::PxAggregate: Class<P>, L: ArticulationLink, S: RigidStatic, D: RigidDynamic, T: Articulation, C: ArticulationReducedCoordinate, { fn as_ptr(&self) -> *const P { self.obj.as_ptr() } fn as_mut_ptr(&mut self) -> *mut P { self.obj.as_mut_ptr() } } impl<L, S, D, T, C> Aggregate for PxAggregate<L, S, D, T, C> where L: ArticulationLink, S: RigidStatic, D: RigidDynamic, T: Articulation, C: ArticulationReducedCoordinate, { type ActorMap = ActorMap<L, S, D>; type ArticulationLink = L; type RigidStatic = S; type RigidDynamic = D; type Articulation = T; type ArticulationReducedCoordinate = C; } pub trait Aggregate: Class<physx_sys::PxAggregate> + Base { type ActorMap: RigidActor; type ArticulationLink: ArticulationLink; type RigidStatic: RigidStatic; type RigidDynamic: RigidDynamic; type Articulation: Articulation; type ArticulationReducedCoordinate: ArticulationReducedCoordinate; unsafe fn from_raw(ptr: *mut physx_sys::PxAggregate) -> Option<Owner<Self>> { Owner::from_raw(ptr as *mut Self) } fn add_articulation_link( &mut self, actor: &mut Self::ArticulationLink, bvh: Option<&BvhStructure>, ) -> bool { unsafe { PxAggregate_addActor_mut( self.as_mut_ptr(), actor.as_mut_ptr(), bvh.map_or(null(), Class::as_ptr), ) } } fn add_rigid_static( &mut self, actor: &mut Self::RigidStatic, bvh: Option<&BvhStructure>, ) -> bool { unsafe { PxAggregate_addActor_mut( self.as_mut_ptr(), actor.as_mut_ptr(), bvh.map_or(null(), Class::as_ptr), ) } } fn add_rigid_dynamic( &mut self, actor: &mut Self::RigidDynamic, bvh: Option<&BvhStructure>, ) -> bool { unsafe { PxAggregate_addActor_mut( self.as_mut_ptr(), actor.as_mut_ptr(), bvh.map_or(null(), Class::as_ptr), ) } } fn add_articulation(&mut self, articulation: &mut Self::Articulation) -> bool { unsafe { PxAggregate_addArticulation_mut(self.as_mut_ptr(), articulation.as_mut_ptr()) } } fn add_articulation_reduced_coordinate( &mut self, articulation: &mut Self::ArticulationReducedCoordinate, ) -> bool { unsafe { PxAggregate_addArticulation_mut(self.as_mut_ptr(), articulation.as_mut_ptr()) } } fn get_actors(&mut self) -> Vec<&mut Self::ActorMap> { let capacity = self.get_nb_actors(); let mut buffer: Vec<&mut Self::ActorMap> = Vec::with_capacity(capacity as usize); unsafe { let len = PxAggregate_getActors( self.as_mut_ptr(), buffer.as_mut_ptr() as *mut *mut physx_sys::PxActor, capacity, 0, ); buffer.set_len(len as usize); } buffer } fn get_max_nb_actors(&self) -> u32 { unsafe { PxAggregate_getMaxNbActors(self.as_ptr()) } } fn get_nb_actors(&self) -> u32 { unsafe { PxAggregate_getNbActors(self.as_ptr()) } } fn get_self_collision(&self) -> bool { unsafe { PxAggregate_getSelfCollision(self.as_ptr()) } } fn remove_actor(&mut self, actor: &mut impl Actor) -> bool { unsafe { PxAggregate_removeActor_mut(self.as_mut_ptr(), actor.as_mut_ptr()) } } fn remove_articulation(&mut self, articulation: &mut impl ArticulationBase) -> bool { unsafe { PxAggregate_removeArticulation_mut(self.as_mut_ptr(), articulation.as_mut_ptr()) } } }
use crate::{ actor::{Actor, ActorMap}, articulation::Articulation, articulation_base::ArticulationBase, articulation_link::ArticulationLink, articulation_reduced_coordinate::ArticulationReducedCoordinate, base::Base, bvh_structure::BvhStructure, owner::Owner, rigid_actor::RigidActor, rigid_dynamic::RigidDynamic, rigid_static::RigidStatic, traits::Class, }; use std::{marker::PhantomData, ptr::null}; use physx_sys::{ PxAggregate_addActor_mut, PxAggregate_addArticulation_mut, PxAggregate_getActors, PxAggregate_getMaxNbActors, PxAggregate_getNbActors, PxAggregate_getSelfCollision, PxAggregate_release_mut, PxAggregate_removeActor_mut, PxAggregate_removeArticulation_mut, }; #[repr(transparent)] pub struct PxAggregate<L, S, D, T, C> where L: ArticulationLink, S: RigidStatic, D: RigidDynamic, T: Articulation, C: ArticulationReducedCoordinate, { obj: physx_sys::PxAggregate, phantom_user_data: PhantomData<(*const L, *const S, *const D, *const T, *const C)>, } impl<L, S, D, T, C> Drop for PxAggregate<L, S, D, T, C> where L: ArticulationLink, S: RigidStatic, D: RigidDynamic, T: Articulation, C: ArticulationReducedCoordinate, { fn drop(&mut self) { unsafe { PxAggregate_release_mut(self.as_mut_ptr()); } } } unsafe impl<L, S, D, T, C> Send for PxAggregate<L, S, D, T, C> where L: ArticulationLink + Send, S: RigidStatic + Send, D: RigidDynamic + Send, T: Articulation + Send, C: ArticulationReducedCoordinate + Send, { } unsafe impl<L, S, D, T, C> Sync for PxAggregate<L, S, D, T, C> where L: ArticulationLink + Sync, S: RigidStatic + Sync, D: RigidDynamic + Sync, T: Articulation + Sync, C: ArticulationReducedCoordinate + Sync, { } unsafe impl<P, L, S, D, T, C> Class<P> for PxAggregate<L, S, D, T, C> where physx_sys::PxAggregate: Class<P>, L: ArticulationLink, S: RigidStatic, D: RigidDynamic, T: Articulation, C: ArticulationReducedCoordinate, { fn as_ptr(&self) -> *const P { self.obj.as_ptr() } fn as_mut_ptr(&mut self) -> *mut P { self.obj.as_mut_ptr() } } impl<L, S, D, T, C> Aggregate for PxAggregate<L, S, D, T, C> where L: ArticulationLink, S: RigidStatic, D: RigidDynamic, T: Articulation, C: ArticulationReducedCoordinate, { type ActorMap = ActorMap<L, S, D>; type ArticulationLink = L; type RigidStatic = S; type RigidDynamic = D; type Articulation = T; type ArticulationReducedCoordinate = C; } pub trait Aggregate: Class<physx_sys::PxAggregate> + Base { type ActorMap: RigidActor; type ArticulationLink: ArticulationLink; type RigidStatic: RigidStatic; type RigidDynamic: RigidDynamic; type Articulation: Articulation; type ArticulationReducedCoordinate: ArticulationReducedCoordinate; unsafe fn from_raw(ptr: *mut physx_sys::PxAggregate) -> Option<Owner<Self>> { Owner::from_raw(ptr as *mut Self) } fn add_articulation_link( &mut self, actor: &mut Self::ArticulationLink, bvh: Option<&BvhStructure>, ) -> bool { unsafe { PxAggregate_addActor_mut( self.as_mut_ptr(), actor.as_mut_ptr(), bvh.map_or(null(), Class::as_ptr), ) } } fn add_rigid_static( &mut self, actor: &mut Self::RigidStatic, bvh: Option<&BvhStructure>, ) -> bool { unsafe { PxAggregate_addActor_mut( self.as_mut_ptr(), actor.as_mut_ptr(), bvh.map_or(null(), Class::as_ptr), ) } } fn add_rigid_dynamic( &mut self, actor: &mut Self::RigidDynamic, bvh: Option<&BvhStructure>, ) -> bool { unsafe { PxAggregate_addActor_mut( self.as_mut_ptr(), actor.as_mut_ptr(), bvh.map_or(null(), Class::as_ptr), ) } } fn add_articulation(&mut self, articulation: &mut Self::Articulation) -> bool { unsafe { PxAggregate_addArticulation_mut(self.as_mut_ptr(), articulation.as_mut_ptr()) } } fn add_articulation_reduced_coordinate( &mut self, articulation: &mut Self::ArticulationReducedCoordinate, ) -> bool { unsafe { PxAggregate_addArticulation_mut(self.as_mut_ptr(), articulation.as_mut_ptr()) } } fn get_actors(&mut self) -> Vec<&mut Self::ActorMap> { let capacity = self.get_nb_actors(); let mut buffer: Vec<&mut Self::ActorMap> = Vec::with_capacity(capacity as usize);
fn get_max_nb_actors(&self) -> u32 { unsafe { PxAggregate_getMaxNbActors(self.as_ptr()) } } fn get_nb_actors(&self) -> u32 { unsafe { PxAggregate_getNbActors(self.as_ptr()) } } fn get_self_collision(&self) -> bool { unsafe { PxAggregate_getSelfCollision(self.as_ptr()) } } fn remove_actor(&mut self, actor: &mut impl Actor) -> bool { unsafe { PxAggregate_removeActor_mut(self.as_mut_ptr(), actor.as_mut_ptr()) } } fn remove_articulation(&mut self, articulation: &mut impl ArticulationBase) -> bool { unsafe { PxAggregate_removeArticulation_mut(self.as_mut_ptr(), articulation.as_mut_ptr()) } } }
unsafe { let len = PxAggregate_getActors( self.as_mut_ptr(), buffer.as_mut_ptr() as *mut *mut physx_sys::PxActor, capacity, 0, ); buffer.set_len(len as usize); } buffer }
function_block-function_prefix_line
[ { "content": "pub fn PxAggregate_addActor_mut(self_: *mut PxAggregate, actor: *mut PxActor, bvhStructure: *const PxBVHStructure, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 0, "score": 550958.7046268866 }, { "content": "pub fn PxAggregate_removeArticulation_mut(self_: *mut PxAggregate, articulation: *mut PxArticulationBase, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 1, "score": 501889.2459904133 }, { "content": "pub fn PxAggregate_addArticulation_mut(self_: *mut PxAggregate, articulation: *mut PxArticulationBase, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 2, "score": 501889.24599041324 }, { "content": "pub fn PxAggregate_removeActor_mut(self_: *mut PxAggregate, actor: *mut PxActor, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 3, "score": 496729.67092005536 }, { "content": "pub fn PxArticulationBase_getAggregate(self_: *const PxArticulationBase, ) -> *mut PxAggregate;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 4, "score": 469221.06721299223 }, { "content": "pub fn PxArticulationBase_isSleeping(self_: *const PxArticulationBase, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 5, "score": 441468.6666180843 }, { "content": "pub fn PxScene_addActor_mut(self_: *mut PxScene, actor: *mut PxActor, bvhStructure: *const PxBVHStructure, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 6, "score": 431233.69435716857 }, { "content": "pub fn PxActor_getAggregate(self_: *const PxActor, ) -> *mut PxAggregate;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 7, "score": 426376.0400166149 }, { "content": "pub fn PxArticulationBase_setName_mut(self_: *mut PxArticulationBase, name: *const i8, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 8, "score": 417348.53192675754 }, { "content": "pub fn PxAggregate_getActors(self_: *const PxAggregate, userBuffer: *mut *mut PxActor, bufferSize: u32, startIndex: u32, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 9, "score": 417198.81081105856 }, { "content": "pub fn PxArticulationBase_getScene(self_: *const PxArticulationBase, ) -> *mut PxScene;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 10, "score": 415147.02556914045 }, { "content": "pub fn PxArticulationBase_getLinks(self_: *const PxArticulationBase, userBuffer: *mut *mut PxArticulationLink, bufferSize: u32, startIndex: u32, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 11, "score": 412117.62687757984 }, { "content": "pub fn PxArticulationLink_getArticulation(self_: *const PxArticulationLink, ) -> *mut PxArticulationBase;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 12, "score": 407910.94599354407 }, { "content": "pub fn PxArticulationJointBase_getChildArticulationLink(self_: *const PxArticulationJointBase, ) -> *mut PxArticulationLink;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 13, "score": 404509.83352487587 }, { "content": "pub fn PxArticulationJointBase_getParentArticulationLink(self_: *const PxArticulationJointBase, ) -> *mut PxArticulationLink;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 14, "score": 404509.8335248759 }, { "content": "pub fn PxCollection_contains(self_: *const PxCollection, object: *mut PxBase, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 15, "score": 404254.5311990761 }, { "content": "pub fn PxScene_removeArticulation_mut(self_: *mut PxScene, articulation: *mut PxArticulationBase, wakeOnLostTouch: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 16, "score": 402322.84430369065 }, { "content": "pub fn PxScene_removeActors_mut(self_: *mut PxScene, actors: *const *mut PxActor, nbActors: u32, wakeOnLostTouch: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 17, "score": 401586.3342960847 }, { "content": "pub fn PxAggregate_getSelfCollision(self_: *const PxAggregate, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 18, "score": 399811.70583312766 }, { "content": "pub fn PxArticulationJointBase_setChildPose_mut(self_: *mut PxArticulationJointBase, pose: *const PxTransform, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 19, "score": 398467.5030523247 }, { "content": "pub fn PxArticulationJointBase_setParentPose_mut(self_: *mut PxArticulationJointBase, pose: *const PxTransform, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 20, "score": 398467.5030523247 }, { "content": "pub fn PxSerializer_registerReferences(self_: *const PxSerializer, obj: *mut PxBase, s: *mut PxSerializationContext, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 21, "score": 398311.0719437384 }, { "content": "pub fn PxScene_getArticulations(self_: *const PxScene, userBuffer: *mut *mut PxArticulationBase, bufferSize: u32, startIndex: u32, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 22, "score": 397451.13857403304 }, { "content": "pub fn PxRigidActor_setGlobalPose_mut(self_: *mut PxRigidActor, pose: *const PxTransform, autowake: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 23, "score": 397068.8682802273 }, { "content": "pub fn PxArticulationBase_createLink_mut(self_: *mut PxArticulationBase, parent: *mut PxArticulationLink, pose: *const PxTransform, ) -> *mut PxArticulationLink;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 24, "score": 396945.0254735558 }, { "content": "pub fn PxScene_getActors(self_: *const PxScene, types: PxActorTypeFlags, userBuffer: *mut *mut PxActor, bufferSize: u32, startIndex: u32, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 25, "score": 396942.72272228537 }, { "content": "pub fn PxScene_removeActor_mut(self_: *mut PxScene, actor: *mut PxActor, wakeOnLostTouch: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 26, "score": 396601.7199579143 }, { "content": "pub fn PxBase_isReleasable(self_: *const PxBase, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 27, "score": 395970.93060980557 }, { "content": "pub fn PxArticulationLink_getInboundJoint(self_: *const PxArticulationLink, ) -> *mut PxArticulationJointBase;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 28, "score": 393331.66944830475 }, { "content": "pub fn PxArticulationBase_wakeUp_mut(self_: *mut PxArticulationBase, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 29, "score": 389004.6566938267 }, { "content": "pub fn PxArticulationBase_createArticulationJoint_mut(self_: *mut PxArticulationBase, parent: *mut PxArticulationLink, parentFrame: *const PxTransform, child: *mut PxArticulationLink, childFrame: *const PxTransform, ) -> *mut PxArticulationJointBase;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 30, "score": 388867.7735166121 }, { "content": "pub fn PxArticulationReducedCoordinate_teleportRootLink_mut(self_: *mut PxArticulationReducedCoordinate, pose: *const PxTransform, autowake: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 31, "score": 388348.8297973342 }, { "content": "pub fn PxCooking_cookBVHStructure(self_: *const PxCooking, desc: *const PxBVHStructureDesc, stream: *mut PxOutputStream, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 32, "score": 385051.6671270296 }, { "content": "pub fn PxPvd_isConnected_mut(self_: *mut PxPvd, useCachedStatus: bool, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 33, "score": 384333.02935105766 }, { "content": "pub fn PxArticulationBase_putToSleep_mut(self_: *mut PxArticulationBase, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 34, "score": 384147.7544062311 }, { "content": "pub fn PxArticulationBase_getImpl_mut(self_: *mut PxArticulationBase, ) -> *mut PxArticulationImpl;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 35, "score": 382863.3663451217 }, { "content": "pub fn PxArticulationBase_getName(self_: *const PxArticulationBase, ) -> *const i8;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 36, "score": 379815.7551921457 }, { "content": "pub fn PxArticulationBase_releaseArticulationJoint_mut(self_: *mut PxArticulationBase, joint: *mut PxArticulationJointBase, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 37, "score": 379204.2343419524 }, { "content": "pub fn PxArticulationBase_getImpl(self_: *const PxArticulationBase, ) -> *const PxArticulationImpl;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 38, "score": 378834.3635582336 }, { "content": "pub fn PxArticulationBase_getSolverIterationCounts(self_: *const PxArticulationBase, minPositionIters: *mut u32, minVelocityIters: *mut u32, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 39, "score": 377670.38026085 }, { "content": "pub fn PxAggregate_release_mut(self_: *mut PxAggregate, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 40, "score": 377327.7649047071 }, { "content": "pub fn PxCollection_find(self_: *const PxCollection, id: usize, ) -> *mut PxBase;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 41, "score": 377314.1668365584 }, { "content": "pub fn PxBVHStructureDesc_isValid_1(self_: *const PxBVHStructureDesc, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 42, "score": 375735.25076122803 }, { "content": "pub fn PxBVHStructureDesc_isValid(self_: *const PxBVHStructureDesc, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 43, "score": 375735.25076122803 }, { "content": "pub fn PxActor_setName_mut(self_: *mut PxActor, name: *const i8, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 44, "score": 373281.8386482859 }, { "content": "pub fn PxRigidActor_attachShape_mut(self_: *mut PxRigidActor, shape: *mut PxShape, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 45, "score": 372469.4275100417 }, { "content": "pub fn PxRenderBuffer_append_mut(self_: *mut PxRenderBuffer, other: *const PxRenderBuffer, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 46, "score": 372371.67931540153 }, { "content": "pub fn PxScene_getRenderBuffer_mut(self_: *mut PxScene, ) -> *const PxRenderBuffer;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 47, "score": 371697.2144480006 }, { "content": "pub fn PxArticulationJointBase_getImpl_mut(self_: *mut PxArticulationJointBase, ) -> *mut PxArticulationJointImpl;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 48, "score": 371359.7010469617 }, { "content": "pub fn PxScene_removeAggregate_mut(self_: *mut PxScene, aggregate: *mut PxAggregate, wakeOnLostTouch: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 49, "score": 370857.58082314895 }, { "content": "pub fn PxArticulationJoint_getSwingLimitEnabled(self_: *const PxArticulationJoint, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 50, "score": 370824.37498141476 }, { "content": "pub fn PxArticulationJoint_getTwistLimitEnabled(self_: *const PxArticulationJoint, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 51, "score": 370824.3749814149 }, { "content": "pub fn PxActor_getScene(self_: *const PxActor, ) -> *mut PxScene;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 52, "score": 370772.0755998202 }, { "content": "pub fn PxScene_addActors_mut(self_: *mut PxScene, actors: *const *mut PxActor, nbActors: u32, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 53, "score": 369226.4173860276 }, { "content": "pub fn PxArticulationJoint_setTwistLimitEnabled_mut(self_: *mut PxArticulationJoint, enabled: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 54, "score": 368625.88986154296 }, { "content": "pub fn PxArticulationJoint_setSwingLimitEnabled_mut(self_: *mut PxArticulationJoint, enabled: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 55, "score": 368625.88986154296 }, { "content": "pub fn PxScene_getAggregates(self_: *const PxScene, userBuffer: *mut *mut PxAggregate, bufferSize: u32, startIndex: u32, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 56, "score": 368581.77823663526 }, { "content": "pub fn PxArticulationReducedCoordinate_computeLambda(self_: *const PxArticulationReducedCoordinate, cache: *mut PxArticulationCache, initialState: *mut PxArticulationCache, jointTorque: *const f32, maxIter: u32, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 57, "score": 367034.51760823966 }, { "content": "pub fn PxArticulationJointBase_getImpl(self_: *const PxArticulationJointBase, ) -> *const PxArticulationJointImpl;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 58, "score": 366754.46513882536 }, { "content": "pub fn PxScene_addArticulation_mut(self_: *mut PxScene, articulation: *mut PxArticulationBase, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 59, "score": 366392.2375007854 }, { "content": "pub fn PxShape_getActor(self_: *const PxShape, ) -> *mut PxRigidActor;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 60, "score": 365992.9393208418 }, { "content": "pub fn PxController_setPosition_mut(self_: *mut PxController, position: *const PxExtendedVec3, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 61, "score": 365714.4832694147 }, { "content": "pub fn PxAggregate_getConcreteTypeName(self_: *const PxAggregate, ) -> *const i8;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 62, "score": 365287.74730408867 }, { "content": "pub fn PxArticulationLink_getChildren(self_: *const PxArticulationLink, userBuffer: *mut *mut PxArticulationLink, bufferSize: u32, startIndex: u32, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 63, "score": 365265.1767447151 }, { "content": "pub fn PxBase_getConcreteTypeName(self_: *const PxBase, ) -> *const i8;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 64, "score": 365154.5273390926 }, { "content": "pub fn PxConstraintConnector_updatePvdProperties(self_: *const PxConstraintConnector, pvdConnection: *mut PvdDataStream, c: *const PxConstraint, updateType: PxPvdUpdateType::Enum, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 65, "score": 364731.58397011936 }, { "content": "pub fn PxPhysics_createAggregate_mut(self_: *mut PxPhysics, maxSize: u32, enableSelfCollision: bool, ) -> *mut PxAggregate;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 66, "score": 364286.9951688743 }, { "content": "pub fn PxConstraint_getActors(self_: *const PxConstraint, actor0: *mut *mut PxRigidActor, actor1: *mut *mut PxRigidActor, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 67, "score": 363359.0891001708 }, { "content": "pub fn PxJoint_getActors(self_: *const PxJoint, actor0: *mut *mut PxRigidActor, actor1: *mut *mut PxRigidActor, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 68, "score": 363359.0891001708 }, { "content": "pub fn PxScene_flushSimulation_mut(self_: *mut PxScene, sendPendingReports: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 69, "score": 362677.4070340594 }, { "content": "pub fn PxController_setFootPosition_mut(self_: *mut PxController, position: *const PxExtendedVec3, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 70, "score": 361602.4524991737 }, { "content": "pub fn PxPhysics_getBVHStructures(self_: *const PxPhysics, userBuffer: *mut *mut PxBVHStructure, bufferSize: u32, startIndex: u32, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 71, "score": 360749.6821704473 }, { "content": "pub fn PxRigidActor_getConstraints(self_: *const PxRigidActor, userBuffer: *mut *mut PxConstraint, bufferSize: u32, startIndex: u32, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 72, "score": 360654.51512776746 }, { "content": "pub fn PxRigidActor_getShapes(self_: *const PxRigidActor, userBuffer: *mut *mut PxShape, bufferSize: u32, startIndex: u32, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 73, "score": 360654.51512776746 }, { "content": "pub fn PxScene_resetFiltering_mut_1(self_: *mut PxScene, actor: *mut PxRigidActor, shapes: *const *mut PxShape, shapeCount: u32, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 74, "score": 359544.96600564977 }, { "content": "pub fn PxActor_getType(self_: *const PxActor, ) -> PxActorType::Enum;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 75, "score": 359309.8913257295 }, { "content": "pub fn PxPlane_contains(self_: *const PxPlane, p: *const PxVec3, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 76, "score": 358204.6507621214 }, { "content": "pub fn PxControllerFilterCallback_filter_mut(self_: *mut PxControllerFilterCallback, a: *const PxController, b: *const PxController, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 77, "score": 358050.9883230879 }, { "content": "pub fn PxArticulationBase_getStabilizationThreshold(self_: *const PxArticulationBase, ) -> f32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 78, "score": 357504.7143646085 }, { "content": "pub fn PxArticulationBase_getNbLinks(self_: *const PxArticulationBase, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 79, "score": 357504.7143646085 }, { "content": "pub fn PxArticulationBase_getSleepThreshold(self_: *const PxArticulationBase, ) -> f32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 80, "score": 357504.7143646085 }, { "content": "pub fn PxArticulationBase_getWakeCounter(self_: *const PxArticulationBase, ) -> f32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 81, "score": 357504.7143646085 }, { "content": "pub fn PxArticulationBase_setStabilizationThreshold_mut(self_: *mut PxArticulationBase, threshold: f32, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 82, "score": 357185.82303648384 }, { "content": "pub fn PxArticulationBase_setSleepThreshold_mut(self_: *mut PxArticulationBase, threshold: f32, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 83, "score": 357185.82303648384 }, { "content": "pub fn PxArticulationReducedCoordinate_createCache(self_: *const PxArticulationReducedCoordinate, ) -> *mut PxArticulationCache;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 84, "score": 356490.60846276814 }, { "content": "pub fn PxBVHStructure_getConcreteTypeName(self_: *const PxBVHStructure, ) -> *const i8;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 85, "score": 356300.05381079577 }, { "content": "pub fn PxArticulationJoint_getConcreteTypeName(self_: *const PxArticulationJoint, ) -> *const i8;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 86, "score": 356005.5997320798 }, { "content": "pub fn PxArticulationLink_getConcreteTypeName(self_: *const PxArticulationLink, ) -> *const i8;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 87, "score": 356005.5997320798 }, { "content": "pub fn PxArticulationJoint_setTargetOrientation_mut(self_: *mut PxArticulationJoint, orientation: *const PxQuat, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 88, "score": 355794.16075074224 }, { "content": "pub fn PxArticulationJoint_setTargetVelocity_mut(self_: *mut PxArticulationJoint, velocity: *const PxVec3, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 89, "score": 355794.16075074224 }, { "content": "pub fn PxShapeExt_overlap_mut(shape: *const PxShape, actor: *const PxRigidActor, otherGeom: *const PxGeometry, otherGeomPose: *const PxTransform, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 90, "score": 355590.88288325653 }, { "content": "pub fn PxRigidActor_detachShape_mut(self_: *mut PxRigidActor, shape: *mut PxShape, wakeOnLostTouch: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 91, "score": 355105.6043983034 }, { "content": "pub fn PxScene_fetchResultsStart_mut(self_: *mut PxScene, contactPairs: *mut *const PxContactPairHeader, nbContactPairs: *mut u32, block: bool, ) -> bool;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 92, "score": 354701.599571326 }, { "content": "pub fn PxActor_setActorFlag_mut(self_: *mut PxActor, flag: PxActorFlag::Enum, value: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 93, "score": 354465.4486108105 }, { "content": "pub fn PxBase_setBaseFlag_mut(self_: *mut PxBase, flag: PxBaseFlag::Enum, value: bool, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 94, "score": 354435.4520553234 }, { "content": "pub trait Actor: Class<PxActor> + Base {\n\n /*\n\n fixme[tolsson]: when I tried implementing these it had no effect, and it\n\n introduces a risk of memory leaks since physx does not copy the data, so\n\n we need to convert a string into an OsString and then not recover it\n\n until some later point. For types that define a userData, it is much\n\n safer to define it there.\n\n\n\n pub fn setName(&mut self, name: &str) {PxActor_setName_mut(self.as_mut_ptr(), name) }\n\n pub fn getName(&self) -> *const i8 {PxActor_getName(self.as_ptr())}\n\n */\n\n /// Get the concrete type of the actor\n\n fn get_type(&self) -> ActorType {\n\n unsafe { PxActor_getType(self.as_ptr()).into() }\n\n }\n\n\n\n /// Get the world bounds of this actor\n\n fn get_world_bounds(&self, inflation: f32) -> PxBounds3 {\n\n unsafe { PxActor_getWorldBounds(self.as_ptr(), inflation).into() }\n\n }\n", "file_path": "physx/src/actor.rs", "rank": 95, "score": 353803.5686976869 }, { "content": "pub fn PxCollection_getObjects(self_: *const PxCollection, userBuffer: *mut *mut PxBase, bufferSize: u32, startIndex: u32, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 96, "score": 353545.84707494464 }, { "content": "pub fn PxPruningStructure_getRigidActors(self_: *const PxPruningStructure, userBuffer: *mut *mut PxRigidActor, bufferSize: u32, startIndex: u32, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 97, "score": 353266.68181959016 }, { "content": "pub fn PxScene_resetFiltering_mut(self_: *mut PxScene, actor: *mut PxActor, ) -> ();\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 98, "score": 352989.52917843673 }, { "content": "pub fn PxAggregate_getNbActors(self_: *const PxAggregate, ) -> u32;\n", "file_path": "physx-sys/src/physx_generated.rs", "rank": 99, "score": 352749.7071675557 } ]
Rust
crates/aleph-target-build/src/platform.rs
nathanvoglsam/aleph
00f12548f7f50ade0f60343b6c5001bc0ddde6cd
#[derive(Copy, Clone, PartialEq, Debug)] pub enum Platform { UniversalWindowsGNU, UniversalWindowsMSVC, WindowsGNU, WindowsMSVC, Linux, Android, Unknown, } impl Platform { pub fn print_host_cargo_cfg(self) { match self { Platform::UniversalWindowsGNU => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_gnu"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_universal_windows"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_windows"); } Platform::UniversalWindowsMSVC => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_msvc"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_universal_windows"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_windows"); } Platform::WindowsGNU => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_gnu"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_windows"); } Platform::WindowsMSVC => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_msvc"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_windows"); } Platform::Linux => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_linux"); } Platform::Android => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_android"); } Platform::Unknown => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_unknown"); } } } pub fn print_target_cargo_cfg(self) { match self { Platform::UniversalWindowsGNU => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_gnu"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_windows"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_universal_windows"); } Platform::UniversalWindowsMSVC => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_msvc"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_windows"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_universal_windows"); } Platform::WindowsGNU => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_gnu"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_windows"); } Platform::WindowsMSVC => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_msvc"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_windows"); } Platform::Linux => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_linux"); } Platform::Android => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_android"); } Platform::Unknown => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_unknown"); } } } pub const fn name(self) -> &'static str { match self { Platform::UniversalWindowsGNU => "uwp-gnu", Platform::UniversalWindowsMSVC => "uwp-msvc", Platform::WindowsGNU => "windows-gnu", Platform::WindowsMSVC => "windows-msvc", Platform::Linux => "linux", Platform::Android => "android", Platform::Unknown => "unknown", } } pub const fn pretty_name(self) -> &'static str { match self { Platform::UniversalWindowsGNU => "Universal Windows GNU", Platform::UniversalWindowsMSVC => "Universal Windows MSVC", Platform::WindowsGNU => "Windows GNU", Platform::WindowsMSVC => "Windows MSVC", Platform::Linux => "Linux", Platform::Android => "Android", Platform::Unknown => "Unknown", } } pub const fn is_win32(self) -> bool { match self { Platform::WindowsMSVC | Platform::WindowsGNU => true, _ => false, } } pub const fn is_windows(self) -> bool { match self { Platform::WindowsMSVC | Platform::WindowsGNU | Platform::UniversalWindowsGNU | Platform::UniversalWindowsMSVC => true, _ => false, } } pub const fn is_uwp(self) -> bool { match self { Platform::UniversalWindowsGNU | Platform::UniversalWindowsMSVC => true, _ => false, } } pub const fn is_linux(self) -> bool { match self { Platform::Linux => true, _ => false, } } pub const fn is_msvc(self) -> bool { match self { Platform::WindowsMSVC => true, Platform::UniversalWindowsMSVC => true, _ => false, } } pub const fn is_gnu(self) -> bool { match self { Platform::WindowsGNU => true, Platform::UniversalWindowsGNU => true, _ => false, } } pub const fn is_android(self) -> bool { match self { Platform::Android => true, _ => false, } } pub const fn is_unknown(self) -> bool { match self { Platform::Unknown => true, _ => false, } } } #[inline] pub fn get_platform_from(triple: &str) -> Platform { let target = triple; if target.contains("pc-windows") { if target.contains("msvc") { Platform::WindowsMSVC } else if target.contains("gnu") { Platform::WindowsGNU } else { Platform::Unknown } } else if target.contains("uwp-windows") { if target.contains("msvc") { Platform::UniversalWindowsMSVC } else if target.contains("gnu") { Platform::UniversalWindowsGNU } else { Platform::Unknown } } else if target.contains("android") { Platform::Android } else if target.contains("linux") { Platform::Linux } else { Platform::Unknown } }
#[derive(Copy, Clone, PartialEq, Debug)] pub enum Platform { UniversalWindowsGNU, UniversalWindowsMSVC, WindowsGNU, WindowsMSVC, Linux, Android, Unknown, } impl Platform { pub fn print_host_cargo_cfg(self) { match self { Platform::UniversalWindowsGNU => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_gnu"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_universal_windows"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_windows"); } Platform::UniversalWindowsMSVC => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_msvc"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_universal_windows"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_windows"); } Platform::WindowsGNU => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_gnu"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_windows"); } Platform::WindowsMSVC => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_msvc"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_windows"); } Platform::Linux => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_linux"); } Platform::Android => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_android"); } Platform::Unknown => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_TARGET_is_unknown"); } } } pub fn print_target_cargo_cfg(self) { match self { Platform::UniversalWindowsGNU => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_gnu"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_windows"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_universal_windows"); } Platform::UniversalWindowsMSVC => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_msvc"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_windows"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_universal_windows"); } Platform::WindowsGNU => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_gnu"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_windows"); } Platform::WindowsMSVC => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_msvc"); println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_windows"); } Platform::Linux => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_linux"); } Platform::Android => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_android"); } Platform::Unknown => { println!("cargo:rustc-cfg=ALEPH_BUILD_PLATFORM_HOST_is_unknown"); } } } pub const fn name(self) -> &'static str { match self { Platform::UniversalWindowsGNU => "uwp-gnu", Platform::UniversalWindowsMSVC => "uwp-msvc", Platform::WindowsGNU => "windows-gnu", Platform::WindowsMSVC => "windows-msvc", Platform::Linux => "linux", Platform::Android => "android", Platform::Unknown => "unknown", } } pub const fn pretty_name(self) -> &'static str { match self { Platform::UniversalWindowsGNU => "Universal Windows GNU", Platform::UniversalWindowsMSVC => "Universal Windows MSVC", Platform::WindowsGNU => "Windows GNU", Platform::WindowsMSVC => "Windows MSVC", Platform::Linux => "Linux", Platform::Android => "Android", Platform::Unknown => "Unknown", } } pub const fn is_win32(self) -> bool { match self { Platform::WindowsMSVC | Platform::WindowsGNU => true, _ => false, } } pub const fn is_windows(self) -> bool { match self { Platform::WindowsMSVC | Platform::WindowsGNU | Platform::UniversalWindowsGNU | Platform::UniversalWindowsMSVC => true, _ => false, } } pub const fn is_uwp(self) -> bool { match self { Platform::UniversalWindowsGNU | Platform::UniversalWindowsMSVC => true, _ => false, } } pub const fn is_linux(self) -> bool { match self { Platform::Linux => true, _ => false, } } pub const fn is_msvc(self) -> bool { match self { Platform::WindowsMSVC => true, Platform::UniversalWindowsMSVC => true, _ => false, } } pub cons
} else { Platform::Unknown } } else if target.contains("android") { Platform::Android } else if target.contains("linux") { Platform::Linux } else { Platform::Unknown } }
t fn is_gnu(self) -> bool { match self { Platform::WindowsGNU => true, Platform::UniversalWindowsGNU => true, _ => false, } } pub const fn is_android(self) -> bool { match self { Platform::Android => true, _ => false, } } pub const fn is_unknown(self) -> bool { match self { Platform::Unknown => true, _ => false, } } } #[inline] pub fn get_platform_from(triple: &str) -> Platform { let target = triple; if target.contains("pc-windows") { if target.contains("msvc") { Platform::WindowsMSVC } else if target.contains("gnu") { Platform::WindowsGNU } else { Platform::Unknown } } else if target.contains("uwp-windows") { if target.contains("msvc") { Platform::UniversalWindowsMSVC } else if target.contains("gnu") { Platform::UniversalWindowsGNU
random
[ { "content": "///\n\n/// Gets the vendor string for the current CPU\n\n///\n\n/// # Warning\n\n///\n\n/// At the moment this only works on x86 and x86_64. Otherwise it will just return an \"Unknown CPU\n\n/// Vendor\" string.\n\n///\n\npub fn cpu_vendor() -> &'static str {\n\n CPU_VENDOR_STRING.as_str()\n\n}\n\n\n", "file_path": "crates/aleph-sys-info/src/lib.rs", "rank": 1, "score": 276792.73189338157 }, { "content": "///\n\n/// Gets the brand string for the current CPU\n\n///\n\n/// # Warning\n\n///\n\n/// At the moment this only works on x86 and x86_64 that support an extended part of the __cpuid\n\n/// instruction. Otherwise it will just return an \"Unknown CPU\" string.\n\n///\n\npub fn cpu_brand() -> &'static str {\n\n CPU_BRAND_STRING.as_str()\n\n}\n\n\n", "file_path": "crates/aleph-sys-info/src/lib.rs", "rank": 2, "score": 276792.574677607 }, { "content": "///\n\n/// This interface should be used by plugins that wish to register themselves as the engine's window\n\n/// provider. Anything that implements this should correctly handle creating and destroying an OS\n\n/// window, and should be able to give out an `AnyArc<IWindow>` to allow others to retrieve\n\n/// information about and manipulate the window.\n\n///\n\npub trait IWindowProvider: IAny + 'static {\n\n ///\n\n /// Returns an `AnyArc` that holds a window interface.\n\n ///\n\n /// This will always return the same `IWindow` instance as `IWindowProvider` only supports\n\n /// handling a single OS window.\n\n ///\n\n /// A return value of `None` should signal that the functionality is not supported.\n\n ///\n\n fn get_window(&self) -> Option<AnyArc<dyn IWindow>>;\n\n}\n\n\n", "file_path": "crates/aleph-interfaces/src/platform/window.rs", "rank": 3, "score": 244062.0224633542 }, { "content": "fn message_type_string(mtype: DebugUtilsMessageTypeFlagsEXT) -> &'static str {\n\n if mtype == DebugUtilsMessageTypeFlagsEXT::GENERAL_EXT {\n\n \"GENERAL\"\n\n } else if mtype == DebugUtilsMessageTypeFlagsEXT::VALIDATION_EXT {\n\n \"VALIDATION\"\n\n } else if mtype == DebugUtilsMessageTypeFlagsEXT::PERFORMANCE_EXT {\n\n \"PERFORMANCE\"\n\n } else {\n\n \"NONE\"\n\n }\n\n}\n\n\n\nunsafe fn print_message(callback_data: &DebugUtilsMessengerCallbackDataEXT, level: Level) {\n\n let message = CStr::from_ptr(callback_data.p_message).to_str().unwrap();\n\n let message = console::style(message).italic();\n\n\n\n let message_header = console::style(\"Message\").cyan().bold();\n\n log!(level, \"================{}=================\", message_header);\n\n log!(level, \"{}\", message);\n\n}\n", "file_path": "crates/aleph-vulkan-core/src/debug/messenger.rs", "rank": 4, "score": 234607.43620755995 }, { "content": "#[inline]\n\npub fn host_platform() -> Platform {\n\n get_platform_from(&env::var(\"HOST\").unwrap())\n\n}\n\n\n\n///\n\n/// Returns the target platform (operating system)\n\n///\n\n/// # Warning\n\n///\n\n/// Only works in a build script\n\n///\n", "file_path": "crates/aleph-target-build/src/build.rs", "rank": 5, "score": 228178.68029957786 }, { "content": "#[inline]\n\npub fn target_platform() -> Platform {\n\n get_platform_from(&env::var(\"TARGET\").unwrap())\n\n}\n", "file_path": "crates/aleph-target-build/src/build.rs", "rank": 6, "score": 228178.68029957786 }, { "content": "///\n\n/// This interface should be implemented as the interface to an OS window. Ideally access to an\n\n/// `IWindow` instance will be provided from an `IWindowProvider`.\n\n///\n\npub trait IWindow: IAny + HasRawWindowHandle + Send + Sync + 'static {\n\n ///\n\n /// Returns whether the window has been resized since the last time this function was called.\n\n ///\n\n /// # Info\n\n ///\n\n /// This interface was created to provide a very simple, one shot function that can be called\n\n /// once per frame to check if the window has been resized since last time it was checked.\n\n ///\n\n /// If the window has been resized then this will return true once, and only once, until the\n\n /// window is resized again.\n\n ///\n\n /// # Warning\n\n ///\n\n /// This API will probably be useless to anyone other than the core engine implementers as the\n\n /// function will only yield the true result once per frame. The intended use for this API is\n\n /// for triggering a swap chain rebuild and this consumption based model makes the most sense\n\n /// for that use case.\n\n ///\n\n /// If you're using the engine, and not implementing it, then you should look at the\n", "file_path": "crates/aleph-interfaces/src/platform/window.rs", "rank": 7, "score": 226679.67923521835 }, { "content": "///\n\n/// Gets the name of the dll/so file that will need to be copied around\n\n///\n\nfn dll_name() -> &'static str {\n\n match target::build::target_platform() {\n\n Platform::WindowsGNU\n\n | Platform::WindowsMSVC\n\n | Platform::UniversalWindowsGNU\n\n | Platform::UniversalWindowsMSVC => \"SDL2.dll\",\n\n Platform::Linux | Platform::Android => \"libSDL2.so\",\n\n Platform::Unknown => panic!(\"Unsupported Platform\"),\n\n }\n\n}\n\n\n", "file_path": "plugins/aleph-sdl2/build.rs", "rank": 8, "score": 224211.5018062627 }, { "content": "#[inline]\n\npub fn optional_ref_to_ptr<T>(option: Option<&T>) -> *const T {\n\n option.map(|v| v as *const T).unwrap_or(std::ptr::null())\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! flags_bitwise_impl {\n\n ($t:ident) => {\n\n impl $t {\n\n #[inline]\n\n pub fn is_single_flag(&self) -> bool {\n\n self.0.count_ones() == 1\n\n }\n\n\n\n #[inline]\n\n pub fn intersects(&self, other: &Self) -> bool {\n\n (*self | *other).0 != 0\n\n }\n\n\n\n #[inline]\n\n pub fn is_subset_of(&self, other: &Self) -> bool {\n", "file_path": "crates/aleph-windows/src/utils.rs", "rank": 9, "score": 218913.92042308513 }, { "content": "///\n\n/// Embedded bytes of CascadiaCode.ttf\n\n///\n\npub fn cascadia_code() -> &'static [u8] {\n\n include_bytes!(\"../fonts/CascadiaCode.ttf\")\n\n}\n\n\n", "file_path": "plugins/aleph-egui/src/fonts.rs", "rank": 10, "score": 208675.28398412632 }, { "content": "#[inline]\n\npub fn optional_slice_to_num_ptr_pair<T>(slice: Option<&[T]>) -> (u32, *const T) {\n\n if let Some(slice) = slice {\n\n if slice.is_empty() {\n\n let num = 0;\n\n let ptr = std::ptr::null();\n\n (num, ptr)\n\n } else {\n\n let num = slice.len() as u32;\n\n let ptr = slice.as_ptr();\n\n (num, ptr)\n\n }\n\n } else {\n\n let num = 0;\n\n let ptr = std::ptr::null();\n\n (num, ptr)\n\n }\n\n}\n\n\n", "file_path": "crates/aleph-windows/src/utils.rs", "rank": 11, "score": 206496.12093338024 }, { "content": "#[allow(unused)]\n\npub fn standard_vert_shader() -> &'static [u8] {\n\n include_bytes!(\"../shaders/compiled/standard/standard.vert.dxil\")\n\n}\n\n\n\n///\n\n/// Gets the raw dxil bytes for the standard vertex shader\n\n///\n\n//#[allow(unused)]\n\n//pub fn tonemapping_frag_shader() -> (&'static [u8], &'static [u32]) {\n\n// include_bytes!(\"../shaders/compiled/postprocess/tonemapping.frag.dxil\")\n\n//}\n\n\n\n///\n\n/// Gets the raw dxil bytes for the standard vertex shader\n\n///\n", "file_path": "plugins/aleph-render/src/shaders.rs", "rank": 12, "score": 205718.43962365482 }, { "content": "///\n\n/// Embedded bytes of NotoSans-Regular.ttf\n\n///\n\npub fn noto_sans_regular() -> &'static [u8] {\n\n include_bytes!(\"../fonts/NotoSans-Regular.ttf\")\n\n}\n", "file_path": "plugins/aleph-egui/src/fonts.rs", "rank": 13, "score": 205718.43962365482 }, { "content": "#[allow(unused)]\n\npub fn egui_vert_shader() -> &'static [u8] {\n\n include_bytes!(\"../shaders/compiled/egui/egui.vert.dxil\")\n\n}\n\n\n\n///\n\n/// Gets the raw dxil bytes for the standard fragment shader\n\n///\n", "file_path": "plugins/aleph-render/src/shaders.rs", "rank": 14, "score": 205718.43962365482 }, { "content": "///\n\n/// Embedded bytes of JetBrainsMono-Regular.ttf\n\n///\n\npub fn jetbrains_mono_regular() -> &'static [u8] {\n\n include_bytes!(\"../fonts/JetBrainsMono-Regular.ttf\")\n\n}\n\n\n", "file_path": "plugins/aleph-egui/src/fonts.rs", "rank": 15, "score": 205718.43962365482 }, { "content": "#[allow(unused)]\n\npub fn egui_frag_shader() -> &'static [u8] {\n\n include_bytes!(\"../shaders/compiled/egui/egui.frag.dxil\")\n\n}\n\n\n\n///\n\n/// Gets the raw dxil bytes for the egui vertex shader\n\n///\n", "file_path": "plugins/aleph-render/src/shaders.rs", "rank": 16, "score": 205718.43962365482 }, { "content": "#[allow(unused)]\n\npub fn standard_frag_shader() -> &'static [u8] {\n\n include_bytes!(\"../shaders/compiled/standard/standard.frag.dxil\")\n\n}\n\n\n\n///\n\n/// Gets the raw dxil bytes for the standard vertex shader\n\n///\n", "file_path": "plugins/aleph-render/src/shaders.rs", "rank": 17, "score": 205718.43962365482 }, { "content": "///\n\n/// Embedded bytes of NotoEmoji-Regular.ttf\n\n///\n\npub fn noto_emoji_regular() -> &'static [u8] {\n\n include_bytes!(\"../fonts/NotoEmoji-Regular.ttf\")\n\n}\n\n\n", "file_path": "plugins/aleph-egui/src/fonts.rs", "rank": 18, "score": 205718.43962365482 }, { "content": "///\n\n/// Embedded bytes of emoji-icon-font.ttf\n\n///\n\npub fn emoji_icon_font() -> &'static [u8] {\n\n include_bytes!(\"../fonts/emoji-icon-font.ttf\")\n\n}\n\n\n", "file_path": "plugins/aleph-egui/src/fonts.rs", "rank": 19, "score": 205718.43962365482 }, { "content": "pub fn print_error(source: &str, error: ParseError<&str>) {\n\n // Get the error\n\n let err_pos = error.position.0 as usize - source.as_ptr() as usize;\n\n\n\n let (pos, highlight) = highlight_code(source, err_pos).unwrap();\n\n\n\n println!(\"Error: Error on line {}, column {}\", pos.line, pos.column);\n\n print!(\"{}\", highlight);\n\n\n\n for error in error.errors.into_iter() {\n\n match error {\n\n Error::Unexpected(info) => match info {\n\n Info::Token(token) => {\n\n if token.is_control() || token.is_ascii_control() {\n\n println!(\" - Unexpected token '{}'\", token.escape_default())\n\n } else {\n\n println!(\" - Unexpected token '{}'\", token)\n\n }\n\n }\n\n Info::Range(range) => println!(\" - Unexpected range '{}'\", range),\n", "file_path": "crates/aleph-combine-utils/src/lib.rs", "rank": 20, "score": 204646.16170220764 }, { "content": "#[allow(unused)]\n\npub fn fullscreen_quad_vert_shader() -> &'static [u8] {\n\n include_bytes!(\"../shaders/compiled/fullscreen_quad/fullscreen_quad.vert.dxil\")\n\n}\n", "file_path": "plugins/aleph-render/src/shaders.rs", "rank": 21, "score": 202887.80849640403 }, { "content": "#[inline]\n\npub fn get_architecture_from(triple: &str) -> Architecture {\n\n let target = triple;\n\n\n\n if target.contains(\"x86_64\") {\n\n Architecture::X8664\n\n } else if target.contains(\"aarch64\") {\n\n Architecture::AARCH64\n\n } else {\n\n Architecture::Unknown\n\n }\n\n}\n", "file_path": "crates/aleph-target-build/src/architecture.rs", "rank": 22, "score": 198034.35141905604 }, { "content": "#[inline]\n\npub fn is_format_prohibited(format: VkFormat) -> bool {\n\n match format {\n\n VkFormat::R8_USCALED\n\n | VkFormat::R8_SSCALED\n\n | VkFormat::R8G8_USCALED\n\n | VkFormat::R8G8_SSCALED\n\n | VkFormat::R8G8B8_USCALED\n\n | VkFormat::R8G8B8_SSCALED\n\n | VkFormat::B8G8R8_USCALED\n\n | VkFormat::B8G8R8_SSCALED\n\n | VkFormat::R8G8B8A8_USCALED\n\n | VkFormat::R8G8B8A8_SSCALED\n\n | VkFormat::B8G8R8A8_USCALED\n\n | VkFormat::B8G8R8A8_SSCALED\n\n | VkFormat::A8B8G8R8_UNORM_PACK32\n\n | VkFormat::A8B8G8R8_SNORM_PACK32\n\n | VkFormat::A8B8G8R8_USCALED_PACK32\n\n | VkFormat::A8B8G8R8_SSCALED_PACK32\n\n | VkFormat::A8B8G8R8_UINT_PACK32\n\n | VkFormat::A8B8G8R8_SINT_PACK32\n", "file_path": "crates/aleph-ktx/src/format/support.rs", "rank": 23, "score": 195460.69744017342 }, { "content": "#[inline]\n\npub fn is_format_unsupported(format: VkFormat) -> bool {\n\n match format {\n\n VkFormat::UNDEFINED => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n///\n\n/// A list of all formats allowed by the KTX2 spec\n\n///\n\npub const ALLOWED_FORMATS: [VkFormat; 176] = [\n\n VkFormat::UNDEFINED,\n\n VkFormat::R4G4_UNORM_PACK8,\n\n VkFormat::R4G4B4A4_UNORM_PACK16,\n\n VkFormat::B4G4R4A4_UNORM_PACK16,\n\n VkFormat::R5G6B5_UNORM_PACK16,\n\n VkFormat::B5G6R5_UNORM_PACK16,\n\n VkFormat::R5G5B5A1_UNORM_PACK16,\n\n VkFormat::B5G5R5A1_UNORM_PACK16,\n\n VkFormat::A1R5G5B5_UNORM_PACK16,\n", "file_path": "crates/aleph-ktx/src/format/support.rs", "rank": 24, "score": 195460.69744017342 }, { "content": "#[test]\n\npub fn test_valid_enum() {\n\n let mut first_variant = EnumVariant::default();\n\n first_variant\n\n .fields\n\n .push(FieldType::Primitive(PrimitiveType::I16).into());\n\n\n\n let mut second_variant = EnumVariant::default();\n\n second_variant\n\n .fields\n\n .push(FieldType::Primitive(PrimitiveType::I32).into());\n\n second_variant\n\n .fields\n\n .push(FieldType::Primitive(PrimitiveType::F64).into());\n\n\n\n let mut static_mesh_enum = Enum::default();\n\n static_mesh_enum.attributes.push(\n\n sexpr::ast::ListBuilder::new()\n\n .add_word(\"!doc\", None)\n\n .add_string(\" A static mesh\", None)\n\n .build(),\n", "file_path": "crates/aleph-schema/src/tests.rs", "rank": 25, "score": 194469.16467629373 }, { "content": "#[inline]\n\npub fn is_format_rgbds_ordered(format: VkFormat) -> bool {\n\n match format {\n\n VkFormat::R4G4_UNORM_PACK8\n\n | VkFormat::R4G4B4A4_UNORM_PACK16\n\n | VkFormat::R5G6B5_UNORM_PACK16\n\n | VkFormat::R5G5B5A1_UNORM_PACK16\n\n | VkFormat::A1R5G5B5_UNORM_PACK16\n\n | VkFormat::R8_UNORM\n\n | VkFormat::R8_SNORM\n\n | VkFormat::R8_UINT\n\n | VkFormat::R8_SINT\n\n | VkFormat::R8_SRGB\n\n | VkFormat::R8G8_UNORM\n\n | VkFormat::R8G8_SNORM\n\n | VkFormat::R8G8_UINT\n\n | VkFormat::R8G8_SINT\n\n | VkFormat::R8G8_SRGB\n\n | VkFormat::R8G8B8_UNORM\n\n | VkFormat::R8G8B8_SNORM\n\n | VkFormat::R8G8B8_UINT\n", "file_path": "crates/aleph-ktx/src/format/component_order.rs", "rank": 26, "score": 190359.16023720655 }, { "content": "#[inline]\n\npub fn get_build_type_from(profile: &str) -> BuildType {\n\n if profile == \"release\" {\n\n BuildType::Release\n\n } else if profile == \"debug\" {\n\n BuildType::Debug\n\n } else {\n\n BuildType::Unknown\n\n }\n\n}\n", "file_path": "crates/aleph-target-build/src/build_type.rs", "rank": 27, "score": 190219.6520492456 }, { "content": "///\n\n/// The generic interface expected of types that can be used as a label. A label is just a generic\n\n/// identifier or name that can be used in some context to identify something.\n\n///\n\n/// For example, the scheduler uses labels to identify execution stages.\n\n///\n\n/// A dynamic, generic system is more friendly to FFI bindings where special FFI friendly\n\n/// implementations of [`Label`] can be created while rust friendly interfaces can be used by pure\n\n/// rust code.\n\n///\n\npub trait Label: DynHash + Debug + Send + Sync + 'static {\n\n #[doc(hidden)]\n\n fn dyn_clone(&self) -> Box<dyn Label>;\n\n}\n\n\n\nimpl PartialEq for dyn Label {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.dyn_eq(other.as_dyn_eq())\n\n }\n\n}\n\n\n\nimpl Eq for dyn Label {}\n\n\n\nimpl Hash for dyn Label {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.dyn_hash(state);\n\n }\n\n}\n\n\n\nimpl Clone for Box<dyn Label> {\n", "file_path": "crates/aleph-label/src/lib.rs", "rank": 28, "score": 189822.461567678 }, { "content": "#[inline]\n\npub fn is_format_alpha_first_ordered(format: VkFormat) -> bool {\n\n match format {\n\n VkFormat::A1R5G5B5_UNORM_PACK16\n\n | VkFormat::A2R10G10B10_UNORM_PACK32\n\n | VkFormat::A2R10G10B10_SNORM_PACK32\n\n | VkFormat::A2R10G10B10_UINT_PACK32\n\n | VkFormat::A2R10G10B10_SINT_PACK32\n\n | VkFormat::A2B10G10R10_UNORM_PACK32\n\n | VkFormat::A2B10G10R10_SNORM_PACK32\n\n | VkFormat::A2B10G10R10_UINT_PACK32\n\n | VkFormat::A2B10G10R10_SINT_PACK32 => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n///\n\n/// This returns if the format is laid out in RGBDS order.\n\n///\n\n/// # Info\n\n///\n\n/// This refers to how the name describes the format, not necessarily how it is laid out in memory.\n\n/// That is, R4G4B4A4_UNORM_PACK16 and R8G8B8_UNORM are considered rgb ordered because of the name\n\n/// regardless of data layout.\n\n///\n\n/// This also handles ordering of formats where depth is before stencil.\n\n///\n", "file_path": "crates/aleph-ktx/src/format/component_order.rs", "rank": 29, "score": 187958.8654207763 }, { "content": "///\n\n/// This interface should be used by plugins that wish to register themselves as the engine's\n\n/// event provider. Anything that implements this should correctly handle creating and\n\n/// destroying whatever is needed to access the system's event queue, and should be able to give out\n\n/// an `AnyArc<IEvents>` to allow others to interface with the events system.\n\n///\n\npub trait IEventsProvider: IAny + 'static {\n\n ///\n\n /// Returns an `AnyArc` that holds an `IEvents` interface.\n\n ///\n\n /// This will always return the same `IEvents` instance as `IEventsProvider` only supports\n\n /// handling a single events instance.\n\n ///\n\n /// A return value of `None` should signal that the functionality is not supported.\n\n ///\n\n fn get_events(&self) -> Option<AnyArc<dyn IEvents>>;\n\n}\n\n\n", "file_path": "crates/aleph-interfaces/src/platform/events.rs", "rank": 30, "score": 186081.24511014594 }, { "content": "///\n\n/// This interface should be used by plugins that wish to register themselves as the engine's mouse\n\n/// provider. Anything that implements this should correctly handle creating and destroying whatever\n\n/// is needed to access the system's mouse, and should be able to give out an `AnyArc<IMouse>` to\n\n/// allow others to retrieve information about and manipulate the mouse.\n\n///\n\npub trait IMouseProvider: IAny + 'static {\n\n ///\n\n /// Returns an `AnyArc` that holds an `IMouse` interface.\n\n ///\n\n /// This will always return the same `IMouse` instance as `IMouseProvider` only supports\n\n /// handling a single mouse device.\n\n ///\n\n /// A return value of `None` should signal that the functionality is not supported.\n\n ///\n\n fn get_mouse(&self) -> Option<AnyArc<dyn IMouse>>;\n\n}\n\n\n", "file_path": "crates/aleph-interfaces/src/platform/mouse.rs", "rank": 31, "score": 186081.24511014594 }, { "content": "///\n\n/// This interface should be used by plugins that wish to register themselves as the engine's\n\n/// keyboard provider. Anything that implements this should correctly handle creating and destroying\n\n/// whatever is needed to access the system's keyboard, and should be able to give out an\n\n/// `AnyArc<IKeyboard>` to allow others to retrieve information about and manipulate the keyboard.\n\n///\n\npub trait IKeyboardProvider: IAny + 'static {\n\n ///\n\n /// Returns an `AnyArc` that holds an `IKeyboard` interface.\n\n ///\n\n /// This will always return the same `IKeyboard` instance as `IKeyboardProvider` only supports\n\n /// handling a single keyboard device.\n\n ///\n\n /// A return value of `None` should signal that the functionality is not supported.\n\n ///\n\n fn get_keyboard(&self) -> Option<AnyArc<dyn IKeyboard>>;\n\n}\n\n\n", "file_path": "crates/aleph-interfaces/src/platform/keyboard.rs", "rank": 32, "score": 186081.24511014594 }, { "content": "///\n\n/// This interface should be used by plugins that wish to register themselves as the engine's\n\n/// clipboard provider. Anything that implements this should correctly handle creating and\n\n/// destroying whatever is needed to access the system's clipboard, and should be able to give out\n\n/// an `AnyArc<IClipboard>` to allow others to retrieve information about and manipulate the\n\n/// clipboard.\n\n///\n\npub trait IClipboardProvider: IAny + 'static {\n\n ///\n\n /// Returns an `AnyArc` that holds an `IClipboard` interface.\n\n ///\n\n /// This will always return the same `IClipboard` instance as `IClipboardProvider` only supports\n\n /// handling a single mouse device.\n\n ///\n\n /// A return value of `None` should signal that the functionality is not supported.\n\n ///\n\n fn get_clipboard(&self) -> Option<AnyArc<dyn IClipboard>>;\n\n}\n\n\n", "file_path": "crates/aleph-interfaces/src/platform/clipboard.rs", "rank": 33, "score": 186081.24511014594 }, { "content": "///\n\n/// Checks for the minimum required functions for vma allocator\n\n///\n\npub fn allocator_functions_valid(funcs: &raw::VmaVulkanFunctions) -> bool {\n\n if funcs.vkGetPhysicalDeviceProperties.is_none() {\n\n return false;\n\n }\n\n if funcs.vkGetPhysicalDeviceMemoryProperties.is_none() {\n\n return false;\n\n }\n\n if funcs.vkAllocateMemory.is_none() {\n\n return false;\n\n }\n\n if funcs.vkFreeMemory.is_none() {\n\n return false;\n\n }\n\n if funcs.vkMapMemory.is_none() {\n\n return false;\n\n }\n\n if funcs.vkUnmapMemory.is_none() {\n\n return false;\n\n }\n\n if funcs.vkFlushMappedMemoryRanges.is_none() {\n", "file_path": "crates/aleph-vulkan-alloc/src/utils.rs", "rank": 34, "score": 183300.57274235156 }, { "content": "pub trait IFrameTimerProvider: IAny + 'static {\n\n fn get_frame_timer(&self) -> Option<AnyArc<dyn IFrameTimer>>;\n\n}\n\n\n", "file_path": "crates/aleph-interfaces/src/platform/frame_timer.rs", "rank": 35, "score": 180288.1140731824 }, { "content": "///\n\n/// This interface is used to provide access to the list of window events for the current frame.\n\n///\n\n/// Some implementations may need to lock a mutex or read/write lock to provide access to the list\n\n/// safely so this interface is passed to wrap the lock guard\n\n///\n\npub trait IWindowEventsLock {\n\n fn events(&self) -> &[WindowEvent];\n\n}\n", "file_path": "crates/aleph-interfaces/src/platform/window.rs", "rank": 36, "score": 177069.37304964918 }, { "content": "///\n\n/// This interface represents the API expected of something that gives the engine access to a\n\n/// device's mouse.\n\n///\n\npub trait IMouse: IAny + Send + Sync + 'static {\n\n ///\n\n /// Get the current state of the mouse, last updated at the beginning of the frame\n\n ///\n\n fn get_state(&self) -> MouseState;\n\n\n\n ///\n\n /// Set the position of the mouse\n\n ///\n\n fn set_pos(&self, x: i32, y: i32);\n\n\n\n ///\n\n /// Sets the mouse cursor\n\n ///\n\n fn set_cursor(&self, cursor: Cursor);\n\n\n\n ///\n\n /// Makes the cursor visible\n\n ///\n\n fn show_cursor(&self);\n", "file_path": "crates/aleph-interfaces/src/platform/mouse.rs", "rank": 37, "score": 176250.87301040822 }, { "content": "///\n\n/// This interface represents the API expected of something that gives the engine access to a\n\n/// device's keyboard.\n\n///\n\npub trait IKeyboard: IAny + Send + Sync + 'static {\n\n ///\n\n /// Get the current state of the mouse, last updated at the beginning of the frame\n\n ///\n\n /// # Warning\n\n ///\n\n /// This will likely lock an RwLock so trying to hold on to this between frames will deadlock\n\n /// the engine.\n\n ///\n\n fn get_state<'a>(&'a self) -> Box<dyn IKeyboardStateLock + 'a>;\n\n\n\n ///\n\n /// Get read only access to this frame's list of mouse events.\n\n ///\n\n /// # Warning\n\n ///\n\n /// This will likely lock an RwLock so trying to hold on to this between frames will deadlock\n\n /// the engine.\n\n ///\n\n fn events<'a>(&'a self) -> Box<dyn IKeyboardEventsLock + 'a>;\n\n}\n\n\n", "file_path": "crates/aleph-interfaces/src/platform/keyboard.rs", "rank": 38, "score": 176250.87301040822 }, { "content": "///\n\n/// This interface represents the API expected of something that gives the engine access to a\n\n/// device's clipboard.\n\n///\n\npub trait IClipboard: IAny + Send + Sync + 'static {\n\n ///\n\n /// Gets the current clipboard text, if there is some.\n\n ///\n\n fn get(&self) -> Option<String>;\n\n\n\n ///\n\n /// Gets the current clipboard text, if there is some. This will still allocate, but the null\n\n /// terminator will be preserved in case it is needed.\n\n ///\n\n fn get_null_terminated(&self) -> Option<CString>;\n\n\n\n ///\n\n /// Sets the current clipboard text\n\n ///\n\n fn set(&self, value: &str);\n\n\n\n ///\n\n /// Sets the current clipboard text with an already null terminated string.\n\n ///\n\n /// This could potentially save on an allocation if needed\n\n ///\n\n fn set_null_terminated(&self, value: &CStr);\n\n}\n", "file_path": "crates/aleph-interfaces/src/platform/clipboard.rs", "rank": 39, "score": 176250.87301040822 }, { "content": "///\n\n/// This interface represents the API expected of something that gives the engine access to a\n\n/// device's event queue.\n\n///\n\npub trait IEvents: IAny + Send + Sync + 'static {\n\n fn get<'a>(&'a self) -> Box<dyn IEventsLock + 'a>;\n\n}\n\n\n", "file_path": "crates/aleph-interfaces/src/platform/events.rs", "rank": 40, "score": 176250.87301040822 }, { "content": "fn main() {\n\n if std::env::var(\"TARGET\").unwrap() != \"x86_64-uwp-windows-msvc\" {\n\n return;\n\n }\n\n\n\n let dir = std::env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n\n\n println!(\n\n \"cargo:rustc-link-search=native={}\",\n\n std::path::Path::new(&dir).join(\"lib\").display()\n\n );\n\n}\n", "file_path": "crates/aleph-windows-x86_64_msvc_uwp/build.rs", "rank": 41, "score": 171954.07930515107 }, { "content": "pub trait IFrameTimer: IAny + Send + Sync + 'static {\n\n fn delta_time(&self) -> f64;\n\n\n\n fn elapsed_time(&self) -> f64;\n\n}\n", "file_path": "crates/aleph-interfaces/src/platform/frame_timer.rs", "rank": 42, "score": 170702.49510940776 }, { "content": "///\n\n/// This parser will parse the first token after an optional prefix in a number literal. The first\n\n/// token after a prefix is special as it must not be an '_' token.\n\n///\n\npub fn number_first<Input: MyStream>() -> impl Parser<Input, Output = char> {\n\n digit()\n\n}\n\n\n", "file_path": "crates/aleph-schema/src/parser/parsers/number.rs", "rank": 43, "score": 170207.5503554045 }, { "content": "///\n\n/// This parser will parse a single token which is valid to find as part of a number after the\n\n/// optional prefix and the first number part.\n\n///\n\n/// This parser will yield the next digit, skipping over any '_' tokens which are considered\n\n/// thousands separators. The '_' tokens are discarded as they are purely visual aids for humans\n\n/// and have no meaning.\n\n///\n\npub fn number_rest<Input: MyStream>() -> impl Parser<Input, Output = char> {\n\n skip_many(token(char::thousands_separator()))\n\n .and(digit())\n\n .map(|v| v.1)\n\n}\n", "file_path": "crates/aleph-schema/src/parser/parsers/number.rs", "rank": 44, "score": 170207.5503554045 }, { "content": "///\n\n/// A parser that parses out an entire decimal number. This includes a negation prefix, the whole\n\n/// number part, the decimal point and the fractional part.\n\n///\n\npub fn decimal<Input: MyStream>() -> impl Parser<Input, Output = CompactString> {\n\n // A parser that parses the fractional part of the float, taking a decimal point and optionally\n\n // after another string of digits\n\n let fractional = token(char::decimal_point()).and(optional(whole_number_body()));\n\n\n\n // Combine all the parsers\n\n whole_number()\n\n .and(fractional)\n\n .map(\n\n |(mut first, (_, rest)): (CompactString, (_, Option<CompactString>))| {\n\n first.push(char::decimal_point());\n\n if let Some(rest) = rest {\n\n first.push_str(rest.as_str());\n\n }\n\n first\n\n },\n\n )\n\n .expected(\"decimal number\")\n\n}\n\n\n", "file_path": "crates/aleph-schema/src/parser/parsers/number.rs", "rank": 45, "score": 170207.5503554045 }, { "content": "///\n\n/// A parser that parses out an entire number, including a negation prefix and all the numbers of\n\n/// a base10 whole number.\n\n///\n\npub fn whole_number<Input: MyStream>() -> impl Parser<Input, Output = CompactString> {\n\n whole_number_prefix()\n\n .and(whole_number_body())\n\n .map(|(prefix, body)| {\n\n if let Some(prefix) = prefix {\n\n let mut out = CompactString::new();\n\n out.push(prefix);\n\n out.push_str(&body);\n\n out\n\n } else {\n\n body\n\n }\n\n })\n\n .expected(\"whole number\")\n\n}\n\n\n", "file_path": "crates/aleph-schema/src/parser/parsers/number.rs", "rank": 46, "score": 167987.09746463745 }, { "content": "#[inline]\n\npub fn blob_to_shader(blob: &[u8]) -> D3D12_SHADER_BYTECODE {\n\n D3D12_SHADER_BYTECODE {\n\n pShaderBytecode: blob.as_ptr() as _,\n\n BytecodeLength: blob.len() as _,\n\n }\n\n}\n\n\n", "file_path": "crates/aleph-windows/src/utils.rs", "rank": 47, "score": 167936.24907259113 }, { "content": "///\n\n/// A parser that parses out a base10 whole number.\n\n///\n\n/// # Note\n\n///\n\n/// This parser does not handle a negation prefix and will fail if it encounters one.\n\n///\n\npub fn whole_number_body<Input: MyStream>() -> impl Parser<Input, Output = CompactString> {\n\n number_first().and(optional(many(number_rest()))).map(\n\n |(first, rest): (char, Option<CompactString>)| {\n\n let mut out = CompactString::new();\n\n out.push(first);\n\n if let Some(rest) = rest {\n\n out.push_str(&rest);\n\n }\n\n out\n\n },\n\n )\n\n}\n\n\n", "file_path": "crates/aleph-schema/src/parser/parsers/number.rs", "rank": 48, "score": 165849.10538342764 }, { "content": "#[inline]\n\npub fn blob_to_cached_pso(blob: &[u8]) -> D3D12_CACHED_PIPELINE_STATE {\n\n D3D12_CACHED_PIPELINE_STATE {\n\n pCachedBlob: blob.as_ptr() as _,\n\n CachedBlobSizeInBytes: blob.len() as _,\n\n }\n\n}\n\n\n", "file_path": "crates/aleph-windows/src/utils.rs", "rank": 49, "score": 163041.02745730605 }, { "content": "///\n\n/// This parser handles an optional negation prefix for a number. It is intended to be used to parse\n\n/// a leading prefix for a number literal\n\n///\n\npub fn whole_number_prefix<Input: MyStream>() -> impl Parser<Input, Output = Option<char>> {\n\n optional(token(char::negation_prefix()))\n\n}\n\n\n", "file_path": "crates/aleph-schema/src/parser/parsers/number.rs", "rank": 50, "score": 163017.1259055496 }, { "content": "#[inline]\n\npub fn optional_blob_to_shader(blob: Option<&[u8]>) -> D3D12_SHADER_BYTECODE {\n\n match blob {\n\n None => D3D12_SHADER_BYTECODE {\n\n pShaderBytecode: std::ptr::null_mut(),\n\n BytecodeLength: 0,\n\n },\n\n Some(blob) => blob_to_shader(blob),\n\n }\n\n}\n\n\n", "file_path": "crates/aleph-windows/src/utils.rs", "rank": 51, "score": 160076.34663397702 }, { "content": "///\n\n/// This trait represents the core trait for the aleph interface system.\n\n///\n\n/// The is the core interface that all valid interface objects must implement. It provides the\n\n/// central function `__query_interface` that is used to cast one interface to another.\n\n///\n\n/// You should not have to implement this trait directly. Instead use the `declare_interfaces!`\n\n/// macro provided by this crate.\n\n///\n\npub trait IAny: 'static {\n\n ///\n\n /// The `query_interface` function that should only be accessed through the `AnyRef` wrapper.\n\n ///\n\n /// This function should return a `TraitObject` for the given `TypeId` if, and only if, the\n\n /// concrete type behind the `IAny` implements (or actually is) the given type.\n\n ///\n\n /// This is *very* unsafe to implement manually, so don't. Unless there's a *very* good reason,\n\n /// just use `declare_interfaces!`.\n\n ///\n\n fn __query_interface(&self, _target: TypeId) -> Option<TraitObject> {\n\n None\n\n }\n\n}\n\n\n", "file_path": "crates/aleph-any/src/any.rs", "rank": 52, "score": 156541.3898645183 }, { "content": "#[inline]\n\npub fn optional_blob_to_cached_pso(blob: Option<&[u8]>) -> D3D12_CACHED_PIPELINE_STATE {\n\n match blob {\n\n None => D3D12_CACHED_PIPELINE_STATE {\n\n pCachedBlob: std::ptr::null_mut(),\n\n CachedBlobSizeInBytes: 0,\n\n },\n\n Some(blob) => blob_to_cached_pso(blob),\n\n }\n\n}\n\n\n", "file_path": "crates/aleph-windows/src/utils.rs", "rank": 53, "score": 155551.59103577244 }, { "content": "///\n\n/// Initializes whatever log backend is being used for the platform\n\n///\n\npub fn init() {\n\n init_internal();\n\n}\n", "file_path": "crates/aleph-logger/src/lib.rs", "rank": 54, "score": 152423.1686775753 }, { "content": "#[test]\n\npub fn arc_test_1() {\n\n // Our counter for running the test\n\n let counter = Arc::new(AtomicUsize::default());\n\n\n\n // Wrap our counter in an AnyArc\n\n let test = Test(counter.clone());\n\n let mut test = AnyArc::new(test);\n\n\n\n // Get our interface casted to another interface\n\n let mut test_other = test.query_interface::<dyn ITestOther>().unwrap();\n\n\n\n test.test_fn();\n\n assert_eq!(counter.load(Ordering::Relaxed), 1);\n\n\n\n test_other.test_fn_other();\n\n assert_eq!(counter.load(Ordering::Relaxed), 6);\n\n\n\n test.test_fn();\n\n assert_eq!(counter.load(Ordering::Relaxed), 7);\n\n\n", "file_path": "crates/aleph-any/src/tests.rs", "rank": 55, "score": 152417.31910815468 }, { "content": "#[test]\n\npub fn box_test_1() {\n\n // Our counter for running the test\n\n let counter = Arc::new(AtomicUsize::default());\n\n\n\n // Wrap our counter in an AnyArc\n\n let test = Test(counter.clone());\n\n let test = Box::new(test);\n\n\n\n test.test_fn();\n\n assert_eq!(counter.load(Ordering::Relaxed), 1);\n\n\n\n test.test_fn_other();\n\n assert_eq!(counter.load(Ordering::Relaxed), 6);\n\n\n\n test.test_fn();\n\n assert_eq!(counter.load(Ordering::Relaxed), 7);\n\n\n\n test.test_fn_other();\n\n assert_eq!(counter.load(Ordering::Relaxed), 12);\n\n\n", "file_path": "crates/aleph-any/src/tests.rs", "rank": 56, "score": 152417.31910815468 }, { "content": "pub fn translate_mouse_event(\n\n event: &MouseEvent,\n\n modifiers: &egui::Modifiers,\n\n) -> Option<egui::Event> {\n\n match event {\n\n MouseEvent::MouseMotion(e) => {\n\n let pos = egui::Pos2::new(e.x as f32, e.y as f32);\n\n let event = egui::Event::PointerMoved(pos);\n\n Some(event)\n\n }\n\n MouseEvent::MouseButtonDown(e) => {\n\n let pos = egui::Pos2::new(e.x as f32, e.y as f32);\n\n let button = translate_mouse_button(&e.button)?;\n\n let event = egui::Event::PointerButton {\n\n pos,\n\n button,\n\n pressed: true,\n\n modifiers: modifiers.clone(),\n\n };\n\n Some(event)\n", "file_path": "plugins/aleph-egui/src/utils.rs", "rank": 57, "score": 148006.2822806884 }, { "content": "#[test]\n\npub fn test_valid_struct() {\n\n let f32_field: Field<'_> = FieldType::Primitive(PrimitiveType::F32).into();\n\n let mut vector2 = Struct::default();\n\n vector2.fields.push((\"x\".into(), f32_field.clone()));\n\n vector2.fields.push((\"y\".into(), f32_field.clone()));\n\n vector2.attributes.push(\n\n sexpr::ast::ListBuilder::new()\n\n .add_word(\"!doc\", None)\n\n .add_string(\" A two component vector of floats\", None)\n\n .build(),\n\n );\n\n\n\n let mut aleph = Module::default();\n\n aleph.children.push((\"Vector2\".into(), vector2.into()));\n\n\n\n let mut expected = Module::default();\n\n expected.children.push((\"aleph\".into(), aleph.into()));\n\n\n\n test_valid_file(\"./schemas/valid_struct.schema\", expected);\n\n}\n\n\n", "file_path": "crates/aleph-schema/src/tests.rs", "rank": 58, "score": 148006.2822806884 }, { "content": "///\n\n/// Takes the source string and an offset within the source string and constructs a string of the\n\n/// form:\n\n/// ```ignore\n\n/// 4| )\n\n/// 5| (def-struct Vector3\n\n/// 6| (field x f32 (default 0.0!))\n\n/// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^\n\n/// ```\n\n///\n\n/// This function is intended to be used to highlight a position inside the source string, typically\n\n/// for error reporting.\n\n///\n\n/// The function returns the line number\n\n///\n\npub fn highlight_code(\n\n source: &str,\n\n position: usize,\n\n) -> Result<(SourcePosition, String), std::fmt::Error> {\n\n use std::fmt::Write;\n\n\n\n // The string to write into and output\n\n let mut output = String::new();\n\n\n\n // Identify which line the error is on\n\n let line_history = produce_error_line_history(source, position);\n\n\n\n // Get the width in characters of the widest line number so we know how many characters to\n\n // justify to when printing line numbers\n\n let width: usize = line_history.iter().fold(1, |width, v| {\n\n if let Some((line_number, _, _)) = v {\n\n let new_width = 1 + (*line_number as f64).log10() as usize;\n\n if width < new_width {\n\n new_width\n\n } else {\n", "file_path": "crates/aleph-combine-utils/src/lib.rs", "rank": 59, "score": 148006.2822806884 }, { "content": "pub fn get_egui_input(\n\n window: &dyn IWindow,\n\n mouse: &dyn IMouse,\n\n keyboard: &dyn IKeyboard,\n\n frame_timer: &dyn IFrameTimer,\n\n events: &dyn IEvents,\n\n) -> egui::RawInput {\n\n let window_size = window.size();\n\n\n\n let scroll_delta = get_egui_scroll_delta(mouse);\n\n\n\n let screen_rect = egui::Pos2::new(window_size.0 as f32, window_size.1 as f32);\n\n let screen_rect = Some(egui::Rect::from_min_max(Default::default(), screen_rect));\n\n\n\n // TODO: Integrate with SDL2 hdpi stuff\n\n let pixels_per_point = Some(1.0);\n\n\n\n let time = Some(frame_timer.elapsed_time());\n\n\n\n let predicted_dt = 1.0 / window.refresh_rate() as f32;\n", "file_path": "plugins/aleph-egui/src/utils.rs", "rank": 60, "score": 148006.2822806884 }, { "content": "#[test]\n\npub fn test_valid_table() {\n\n let mut transform_field: Field<'_> = FieldType::StructRef(\"Transform\".into()).into();\n\n transform_field.attributes.push(\n\n sexpr::ast::ListBuilder::new()\n\n .add_word(\"!doc\", None)\n\n .add_string(\" The human\\'s position in world space\", None)\n\n .build(),\n\n );\n\n\n\n let mut string_field: Field<'_> = FieldType::StructRef(\"string\".into()).into();\n\n string_field.attributes.push(\n\n sexpr::ast::ListBuilder::new()\n\n .add_word(\"!doc\", None)\n\n .add_string(\" The human\\'s name\", None)\n\n .build(),\n\n );\n\n\n\n let mut f32_field: Field<'_> = FieldType::Primitive(PrimitiveType::F32).into();\n\n f32_field.attributes.push(\n\n sexpr::ast::ListBuilder::new()\n", "file_path": "crates/aleph-schema/src/tests.rs", "rank": 61, "score": 148006.2822806884 }, { "content": "#[test]\n\npub fn test_invalid_duplicate_field() {\n\n test_invalid_file(\n\n \"./schemas/invalid_duplicate_field.schema\",\n\n Error::DuplicateEntity {\n\n span: 81..82,\n\n duplicate: 59..60,\n\n },\n\n );\n\n}\n\n\n", "file_path": "crates/aleph-schema/src/tests.rs", "rank": 62, "score": 145948.27457962552 }, { "content": "#[test]\n\npub fn test_valid_nested_modules() {\n\n let mesh = Module::default();\n\n\n\n let mut aleph = Module::default();\n\n aleph.children.push((\"mesh\".into(), mesh.into()));\n\n\n\n let mut expected = Module::default();\n\n expected.children.push((\"aleph\".into(), aleph.into()));\n\n\n\n test_valid_file(\"./schemas/valid_nested_modules.schema\", expected);\n\n}\n\n\n", "file_path": "crates/aleph-schema/src/tests.rs", "rank": 63, "score": 145948.27457962552 }, { "content": "///\n\n/// The interface used by plugins to manipulate their initialization and execution order.\n\n///\n\n/// The methods declared directly on this trait are not meant to be used directly. There are wrapper\n\n/// functions declared that make them easier to use. This level of indirection is required to make\n\n/// this trait object safe.\n\n///\n\n/// The methods on this trait are wrapped with generic functions that ask for generic type\n\n/// parameters instead of the raw `TypeId`. Example wrapper:\n\n///\n\n/// ```ignore\n\n/// use std::any::TypeId;\n\n/// use aleph_interfaces::any::IAny;\n\n///\n\n/// pub trait IPluginRegistrar {\n\n/// /// Object safe implementation\n\n/// fn __depends_on(&mut self, dependency: TypeId);\n\n/// }\n\n/// impl dyn IPluginRegistrar {\n\n/// /// Generic wrapper\n\n/// pub fn depends_on<T: IAny>(&mut self) {\n\n/// self.__depends_on(TypeId::of::<T>())\n\n/// }\n\n/// }\n\n/// ```\n\n///\n\n/// The `TypeId`/type parameter can either be a concrete type, such as a specific plugin\n\n/// implementation, or an abstract interface like `IWindowProvider`. This way it is possible for a\n\n/// plugin to depend on both specific plugins (i.e `WindowProviderSDL2`) or they can declare a\n\n/// dependency that is generic over arbitrary plugins that provide an abstract interface\n\n/// (i.e `IWindowProvider`) implementation.\n\n///\n\npub trait IPluginRegistrar: 'static {\n\n /// Object safe implementation of `depends_on`. See wrapper for more info.\n\n fn __depends_on(&mut self, dependency: TypeId);\n\n\n\n /// Object safe implementation of `provides_interface`. See wrapper for more info.\n\n fn __provides_interface(&mut self, provides: TypeId);\n\n\n\n /// Object safe implementation of `must_init_after`. See wrapper for more info.\n\n fn __must_init_after(&mut self, requires: TypeId);\n\n\n\n /// Object safe implementation of `must_update_after`. See wrapper for more info.\n\n fn __must_update_after(&mut self, requires: TypeId);\n\n\n\n /// Register that the plugin should have their update function called.\n\n fn should_update(&mut self);\n\n}\n\n\n\nimpl dyn IPluginRegistrar {\n\n /// Declares that the plugin depends on the existence of another plugin given by the type\n\n /// parameter. This can be used to declare that one plugin requires another plugin, or another\n", "file_path": "crates/aleph-interfaces/src/plugin/mod.rs", "rank": 64, "score": 145208.62904704822 }, { "content": "///\n\n/// An abstract interface over any potential concrete implementation of an accessor into the plugin\n\n/// registry. This can be used to retrieve interface implementations, request the main loop exit,\n\n/// etc.\n\n///\n\npub trait IRegistryAccessor: 'static {\n\n /// Object safe implementation of `get_interface`. See wrapper for more info.\n\n fn __get_interface(&self, interface: TypeId) -> Option<AnyArc<dyn IAny>>;\n\n\n\n /// Registry quit handle which can be freely sent to other threads. The object is used to\n\n /// request the engine/plugin registry to exit.\n\n fn quit_handle(&self) -> AnyArc<dyn IQuitHandle>;\n\n}\n\n\n\nimpl dyn IRegistryAccessor {\n\n /// Get a reference counted handle to the interface with the type given by the `T` type\n\n /// parameter.\n\n pub fn get_interface<T: IAny + ?Sized>(&self) -> Option<AnyArc<T>> {\n\n self.__get_interface(TypeId::of::<T>())\n\n .map(|v| v.query_interface::<T>().unwrap())\n\n }\n\n}\n\n\n", "file_path": "crates/aleph-interfaces/src/plugin/mod.rs", "rank": 65, "score": 145187.09203017873 }, { "content": "/// A thread safe cell that is used to pass ownership of a [Schedule] around to different users.\n\n///\n\n/// # Implementor Note\n\n///\n\n/// It is assumed that an implementation of this interface *does not* use a lock\n\n/// (Mutex, RwLock, etc). This interface was designed with an implementation backed by an AtomicCell\n\n/// which can hand a pointer sized object between threads in a thread safe way without locks.\n\n///\n\n/// It would be best to respect this expectation for performance reasons.\n\npub trait IScheduleCell: 'static {\n\n /// Take ownership of the schedule and remove it from the cell, leaving the cell empty.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Will panic if [IScheduleCell::take] is called while the cell is empty.\n\n fn take(&self) -> Box<Schedule>;\n\n\n\n /// Return ownership of the schedule to the cell, placing the given schedule back into the cell.\n\n ///\n\n /// # Warning\n\n ///\n\n /// While technically a different schedule can be placed back into an empty cell, it is likely a\n\n /// very bad idea to do so.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Will panic if the cell is not empty.\n\n fn store(&self, schedule: Box<Schedule>);\n\n\n", "file_path": "crates/aleph-interfaces/src/schedule/mod.rs", "rank": 66, "score": 145187.09203017873 }, { "content": "///\n\n/// A generic interface expected of a type that describes the components and resources accessed by\n\n/// something.\n\n///\n\n/// A task would use this interface to declare the resources it accesses and the scheduler will then\n\n/// use the declared accesses to schedule tasks in parallel so their access conditions are met.\n\n///\n\npub trait AccessDescriptor: 'static {\n\n /// Caller uses this to declare a shared/read access to the given component type\n\n fn reads_component_with_id(&mut self, component: ComponentTypeId);\n\n\n\n /// Caller uses this to declare a exclusive/write access to the given component type\n\n fn writes_component_with_id(&mut self, component: ComponentTypeId);\n\n\n\n /// Caller uses this to declare a shared/read access to the given resource\n\n fn reads_resource_with_id(&mut self, resource: ResourceId);\n\n\n\n /// Caller uses this to declare a exclusive/write access to the given resource\n\n fn writes_resource_with_id(&mut self, resource: ResourceId);\n\n\n\n /// Caller uses this to declare the label of another system that `self` should run before\n\n fn runs_before_label(&mut self, system: Box<dyn Label>);\n\n\n\n /// Caller uses this to declare the label of another system that `self` should run after\n\n fn runs_after_label(&mut self, system: Box<dyn Label>);\n\n}\n\n\n", "file_path": "crates/aleph-ecs/src/scheduler/stage.rs", "rank": 67, "score": 145187.09203017873 }, { "content": "#[inline]\n\npub fn format_bytes_for_image(\n\n format: VkFormat,\n\n width: usize,\n\n height: usize,\n\n depth: usize,\n\n) -> Option<usize> {\n\n let bytes_per_block = format_bytes_per_block(format)? as usize;\n\n let block_width = format.block_width() as usize;\n\n let block_height = format.block_height() as usize;\n\n let block_depth = format.block_depth() as usize;\n\n\n\n let width = width / block_width;\n\n let height = height / block_height;\n\n let depth = depth / block_depth;\n\n\n\n Some(width * height * depth * bytes_per_block)\n\n}\n\n\n\n///\n\n/// Returns the number of bits in the depth component of the given format.\n\n///\n\n/// Returns `None` if there is no depth component\n\n///\n", "file_path": "crates/aleph-ktx/src/format/bit_count.rs", "rank": 68, "score": 143980.02891954058 }, { "content": "///\n\n/// The interface expected of a [`System`] object.\n\n///\n\npub trait System: Any + 'static {\n\n /// An arbitrary type that can be passed into [`System::execute`].\n\n type In;\n\n\n\n /// The return type of the [`System::execute`] function.\n\n type Out;\n\n\n\n /// Will be called by a scheduler once to retrieve the set of components and resources the\n\n /// system accesses. It **will** be called before [`System::execute`].\n\n ///\n\n /// When the function is called a [`AccessDescriptor`] is passed in. The implementation must\n\n /// use the [`AccessDescriptor`] interface to declare the components and resources the system\n\n /// want's to access.\n\n ///\n\n /// The [`AccessDescriptor`] is used to by a scheduler to order system execution. A scheduler\n\n /// *may* execute systems in parallel if their [`AccessDescriptor`] do not intersect with the\n\n /// exclusive accesses of other systems.\n\n fn declare_access(&mut self, access: &mut dyn AccessDescriptor);\n\n\n\n // /// This function will be called once by a scheduler before any call to [`System::execute`] to\n", "file_path": "crates/aleph-ecs/src/system/mod.rs", "rank": 69, "score": 142931.67002426335 }, { "content": "///\n\n/// The interface expected of an execution stage\n\n///\n\npub trait Stage: Any + 'static {\n\n /// This will be called by a scheduler exactly once during an execution cycle.\n\n fn run(&mut self, world: &mut World);\n\n}\n\n\n\nimpl dyn Stage {\n\n /// A vendored in version of [`Any::is`]\n\n #[inline]\n\n pub fn is<T: Stage>(&self) -> bool {\n\n // Get `TypeId` of the type this function is instantiated with.\n\n let t = TypeId::of::<T>();\n\n\n\n // Get `TypeId` of the type in the trait object (`self`).\n\n let concrete = self.type_id();\n\n\n\n // Compare both `TypeId`s on equality.\n\n t == concrete\n\n }\n\n\n\n /// A vendored in version of [`Any::downcast_ref`]\n", "file_path": "crates/aleph-ecs/src/scheduler/stage.rs", "rank": 70, "score": 142931.67002426335 }, { "content": "///\n\n/// An output directory that is specific to this crate inside the `target` directory.\n\n///\n\npub fn cargo_out_dir() -> PathBuf {\n\n let out_dir = std::env::var(\"OUT_DIR\").unwrap();\n\n Path::new(&out_dir).to_path_buf()\n\n}\n\n\n", "file_path": "crates/aleph-compile/src/lib.rs", "rank": 71, "score": 141171.8932371229 }, { "content": "#[inline]\n\npub fn target_architecture() -> Architecture {\n\n get_architecture_from(&env::var(\"TARGET\").unwrap())\n\n}\n\n\n\n///\n\n/// Returns the host build profile\n\n///\n\n/// # Warning\n\n///\n\n/// Only works in a build script\n\n///\n", "file_path": "crates/aleph-target-build/src/build.rs", "rank": 72, "score": 141171.8932371229 }, { "content": "///\n\n/// The location of the `Cargo.toml` for the current crate\n\n///\n\npub fn manifest_dir() -> PathBuf {\n\n let manifest_dir = std::env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n Path::new(&manifest_dir).to_path_buf()\n\n}\n\n\n", "file_path": "crates/aleph-compile/src/lib.rs", "rank": 73, "score": 141171.8932371229 }, { "content": "#[inline]\n\npub fn host_architecture() -> Architecture {\n\n get_architecture_from(&env::var(\"HOST\").unwrap())\n\n}\n\n\n\n///\n\n/// Returns the target architecture\n\n///\n\n/// # Warning\n\n///\n\n/// Only works in a build script\n\n///\n", "file_path": "crates/aleph-target-build/src/build.rs", "rank": 74, "score": 141171.8932371229 }, { "content": "///\n\n/// Where to place build artifacts like .dll or .so files for this build. This will always be inside\n\n/// cargo's `target` directory.\n\n///\n\npub fn artifacts_dir() -> PathBuf {\n\n let mut out_dir = cargo_target_dir();\n\n out_dir.push(\"artifacts\");\n\n out_dir\n\n}\n\n\n", "file_path": "crates/aleph-compile/src/lib.rs", "rank": 75, "score": 141171.8932371229 }, { "content": "///\n\n/// The cargo output directory, usually `target/debug` or `target/release`\n\n///\n\npub fn cargo_target_dir() -> PathBuf {\n\n let mut out_dir = cargo_out_dir();\n\n assert!(out_dir.pop());\n\n assert!(out_dir.pop());\n\n assert!(out_dir.pop());\n\n out_dir\n\n}\n\n\n", "file_path": "crates/aleph-compile/src/lib.rs", "rank": 76, "score": 139209.33348441572 }, { "content": "///\n\n/// Returns the number of physical cores (non SMT cores) on the current host\n\n///\n\npub fn physical_core_count() -> u64 {\n\n num_cpus::get_physical() as u64\n\n}\n\n\n", "file_path": "crates/aleph-sys-info/src/lib.rs", "rank": 77, "score": 139203.64757703798 }, { "content": "///\n\n/// Returns the number of logical cores (physical + SMT cores) on the current host\n\n///\n\npub fn logical_core_count() -> u64 {\n\n num_cpus::get() as u64\n\n}\n\n\n", "file_path": "crates/aleph-sys-info/src/lib.rs", "rank": 78, "score": 139203.64757703798 }, { "content": "pub trait BindingMapperFn: Fn(&Binding) -> BufferBindingType {}\n\nimpl<T: Fn(&Binding) -> BufferBindingType> BindingMapperFn for T {}\n\n\n\n///\n\n/// Represents the reflection of a single descriptor set\n\n///\n\n#[derive(Clone, Hash, PartialEq, Eq, Debug)]\n\npub struct DescriptorSetReflection {\n\n set: u32,\n\n bindings: Vec<Binding>,\n\n}\n\n\n\nimpl DescriptorSetReflection {\n\n ///\n\n /// Returns the list of bindings in this descriptor set\n\n ///\n\n pub fn bindings(&self) -> &[Binding] {\n\n &self.bindings\n\n }\n\n\n", "file_path": "crates/aleph-vulkan/src/reflect/set.rs", "rank": 79, "score": 137936.55027922973 }, { "content": "#[inline]\n\nfn max_val(is_float: bool, is_signed: bool, bits: u8) -> u32 {\n\n let bits_clamped = u32::min(bits as u32, 32);\n\n if is_float {\n\n 0x7F800000\n\n } else if is_signed {\n\n !0 ^ (1 << (bits_clamped - 1))\n\n } else {\n\n let max = 0xFFFFFFFFu64 << bits_clamped as u64;\n\n let max = (max & 0xFFFFFFFFu64) as u32;\n\n max ^ 0xFFFFFFFFu32\n\n }\n\n}\n", "file_path": "crates/aleph-ktx/src/data_format_descriptor/sample_info.rs", "rank": 80, "score": 137758.70756133244 }, { "content": "#[inline]\n\nfn min_val(is_float: bool, is_signed: bool, bits: u8) -> u32 {\n\n let bits_clamped = u32::min(bits as u32, 32);\n\n if is_float {\n\n 0xBF800000\n\n } else if is_signed {\n\n 1 << (bits_clamped - 1)\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "crates/aleph-ktx/src/data_format_descriptor/sample_info.rs", "rank": 81, "score": 137758.70756133244 }, { "content": "#[inline]\n\npub fn target_build_type() -> BuildType {\n\n get_build_type_from(&env::var(\"PROFILE\").unwrap())\n\n}\n\n\n\n///\n\n/// Returns the host platform (operating system)\n\n///\n\n/// # Warning\n\n///\n\n/// Only works in a build script\n\n///\n", "file_path": "crates/aleph-target-build/src/build.rs", "rank": 82, "score": 137319.41670374834 }, { "content": "#[inline]\n\npub fn host_build_type() -> BuildType {\n\n target_build_type()\n\n}\n\n\n\n///\n\n/// Returns the target build profile\n\n///\n\n/// # Warning\n\n///\n\n/// Only works in a build script\n\n///\n", "file_path": "crates/aleph-target-build/src/build.rs", "rank": 83, "score": 137319.41670374834 }, { "content": "///\n\n/// Returns the amount of memory installed in the system in bytes. A `None` value indicates that\n\n/// the information could not be retrieved successfully\n\n///\n\npub fn installed_memory() -> Option<u64> {\n\n *SYSTEM_MEMORY\n\n}\n", "file_path": "crates/aleph-sys-info/src/lib.rs", "rank": 84, "score": 135181.80055486225 }, { "content": "fn vector3() -> crate::ast::ListBuilder<'static> {\n\n crate::ast::ListBuilder::new()\n\n .add_word(\"def-struct\", None)\n\n .add_word(\"Vector3\", None)\n\n .add_list(float_field_default_0(\"x\"), None)\n\n .add_list(float_field_default_0(\"y\"), None)\n\n .add_list(float_field_default_0(\"z\"), None)\n\n}\n\n\n", "file_path": "crates/aleph-sexpr/src/tests.rs", "rank": 85, "score": 133779.7425568437 }, { "content": "fn monster() -> crate::ast::ListBuilder<'static> {\n\n let default_name = default(crate::ast::Atom::string(\"default-monster\"));\n\n crate::ast::ListBuilder::new()\n\n .add_word(\"def-table\", None)\n\n .add_word(\"Monster\", None)\n\n .add_list(field(\"position\", \"Vector3\"), None)\n\n .add_list(field(\"target\", \"Vector3\"), None)\n\n .add_list(field_default(\"name\", \"string\", default_name), None)\n\n}\n\n\n", "file_path": "crates/aleph-sexpr/src/tests.rs", "rank": 86, "score": 133779.7425568437 }, { "content": "fn vector2() -> crate::ast::ListBuilder<'static> {\n\n crate::ast::ListBuilder::new()\n\n .add_word(\"def-struct\", None)\n\n .add_word(\"Vector2\", None)\n\n .add_list(float_field_default_0(\"x\"), None)\n\n .add_list(float_field_default_0(\"y\"), None)\n\n}\n\n\n", "file_path": "crates/aleph-sexpr/src/tests.rs", "rank": 87, "score": 133779.7425568437 }, { "content": "fn soldier() -> crate::ast::ListBuilder<'static> {\n\n let default_health = default(crate::ast::Atom::word(\"1_000_000.0\"));\n\n crate::ast::ListBuilder::new()\n\n .add_word(\"def-table\", None)\n\n .add_word(\"Soldier\", None)\n\n .add_list(field(\"position\", \"aleph::Vector3\"), None)\n\n .add_list(field(\"target\", \"aleph::Vector3\"), None)\n\n .add_list(field_default(\"health\", \"f64\", default_health), None)\n\n}\n\n\n", "file_path": "crates/aleph-sexpr/src/tests.rs", "rank": 88, "score": 133779.7425568437 }, { "content": "fn __action10<'input>((_, s, _): (usize, &'input str, usize)) -> crate::ast::Atom<'input> {\n\n crate::ast::Atom::string(s)\n\n}\n\n\n", "file_path": "crates/aleph-sexpr/src/parser/mod.rs", "rank": 89, "score": 133346.889312962 }, { "content": "fn __action11<'input>((_, w, _): (usize, &'input str, usize)) -> crate::ast::Atom<'input> {\n\n crate::ast::Atom::word(w)\n\n}\n\n\n", "file_path": "crates/aleph-sexpr/src/parser/mod.rs", "rank": 90, "score": 133346.889312962 }, { "content": "/// A thread safe cell that is used to pass ownership of a [World] around to different users.\n\n///\n\n/// # Implementor Note\n\n///\n\n/// It is assumed that an implementation of this interface *does not* use a lock\n\n/// (Mutex, RwLock, etc). This interface was designed with an implementation backed by an AtomicCell\n\n/// which can hand a pointer sized object between threads in a thread safe way without locks.\n\n///\n\n/// It would be best to respect this expectation for performance reasons.\n\npub trait IWorldCell: Send + Sync + 'static {\n\n /// Take ownership of the world and remove it from the cell, leaving the cell empty.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Will panic if [IWorldCell::take] is called while the cell is empty.\n\n fn take(&self) -> Box<World>;\n\n\n\n /// Return ownership of the world to the cell, placing the given world back into the cell.\n\n ///\n\n /// # Warning\n\n ///\n\n /// While technically a different world can be placed back into an empty cell, it is likely a\n\n /// very bad idea to do so.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Will panic if the cell is not empty.\n\n fn store(&self, world: Box<World>);\n\n\n", "file_path": "crates/aleph-interfaces/src/world/mod.rs", "rank": 91, "score": 133317.94224693265 }, { "content": "fn produce_error_line_history(source: &str, position: usize) -> [Option<(usize, usize, &str)>; 3] {\n\n let mut line_history = [None; 3];\n\n for (line_number, line) in source.lines().enumerate() {\n\n // We need to know the span within the original string this line represents to compare with\n\n // our reported error pos\n\n let line_pos = line.as_ptr() as usize - source.as_ptr() as usize;\n\n let line_end = line_pos + line.len();\n\n\n\n // Append to the line history, pushing old entries out of the list\n\n line_history[2] = line_history[1].take();\n\n line_history[1] = line_history[0].take();\n\n line_history[0] = Some((line_number, line_pos, line));\n\n\n\n // Check if the error is on this line\n\n if position >= line_pos && position <= line_end {\n\n break;\n\n }\n\n }\n\n line_history\n\n}\n\n\n", "file_path": "crates/aleph-combine-utils/src/lib.rs", "rank": 92, "score": 133300.08070524922 }, { "content": "pub fn display_id(f: &mut Formatter<'_>, val: NonZeroU64) -> Result {\n\n let chunks = chunks(val.get());\n\n f.write_fmt(format_args!(\n\n \"{:X}-{:X}-{:X}-{:X}\",\n\n chunks[0].clone(),\n\n chunks[1].clone(),\n\n chunks[2].clone(),\n\n chunks[3].clone()\n\n ))\n\n}\n\n\n", "file_path": "crates/aleph-interfaces/src/archive/utils.rs", "rank": 93, "score": 132472.377543124 }, { "content": "///\n\n/// This trait specifies the requirements of a type that will be used as a [`Resource`] within the\n\n/// scheduler.\n\n///\n\n/// Resources will be scheduled for parallel access so the must implement [`Send`] and [`Sync`] as\n\n/// they **will** be shared between threads. Resources will also be type-erased and so [`Any`] is\n\n/// needed so the concrete type can be recovered safely at runtime.\n\n///\n\n/// This trait will be automatically implemented on any type that meets these requirements.\n\n///\n\npub trait Resource: Any + Send + Sync + 'static {}\n\n\n\nimpl<T: Any + Send + Sync + 'static> Resource for T {}\n\n\n\n///\n\n/// The type that is used for identifying a component type by ID.\n\n///\n\n/// The Rust friendly interface uses the [`TypeId`] as the [`ResourceId`] as this built-in feature\n\n/// meets the requirements of an ephemeral ID (doesn't need to be stable between compilations) that\n\n/// uniquely identifies a [`Resource`]. We unwrap the internal `u64` in [`TypeId`] for FFI purposes.\n\n/// External resources can be provided via FFI, where the FFI caller provides their own\n\n/// [`ResourceId`] too. Therefore we need to pin down the memory layout of this type.\n\n///\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]\n\n#[repr(transparent)]\n\npub struct ResourceId(u64);\n\n\n\nimpl ResourceId {\n\n /// Returns the [`ResourceId`] of the given resource.\n\n ///\n", "file_path": "crates/aleph-ecs/src/world/resource.rs", "rank": 94, "score": 132023.57550790726 }, { "content": "///\n\n/// A trait that can be implemented on types that support having debug names attached with their\n\n/// handles\n\n///\n\npub trait DebugName {\n\n ///\n\n /// Adds the given debug name to the handle.\n\n ///\n\n /// Unsafe as there's no way to guarantee a valid handle is being passed\n\n ///\n\n unsafe fn add_debug_name(&self, device: &Device, name: &CStr);\n\n}\n\n\n\nmacro_rules! implement_debug_name_for {\n\n ($handle:ty, $obj_type:expr) => {\n\n impl $crate::DebugName for $handle {\n\n unsafe fn add_debug_name(&self, device: &$crate::Device, name: &std::ffi::CStr) {\n\n use erupt::extensions::ext_debug_utils::DebugUtilsObjectNameInfoEXTBuilder;\n\n let ext_loaded = device.instance().enabled().ext_debug_utils;\n\n if ext_loaded && *self != Default::default() {\n\n let name_info = DebugUtilsObjectNameInfoEXTBuilder::new()\n\n .object_handle(self.0 as u64)\n\n .object_type($obj_type)\n\n .object_name(name);\n", "file_path": "crates/aleph-vulkan-core/src/debug/name.rs", "rank": 95, "score": 131930.46515842018 }, { "content": "fn default_0_float() -> crate::ast::ListBuilder<'static> {\n\n default(crate::ast::Atom::word(\"0.0\"))\n\n}\n\n\n", "file_path": "crates/aleph-sexpr/src/tests.rs", "rank": 96, "score": 131892.1848317905 }, { "content": "fn egui_font_definitions(jetbrains: bool) -> egui::FontDefinitions {\n\n let mut font_data = BTreeMap::new();\n\n let mut fonts_for_family = BTreeMap::new();\n\n\n\n let jetbrains_mono_name = \"JetbrainsMono\";\n\n let jetbrains_mono = crate::fonts::jetbrains_mono_regular();\n\n let cascadia_code_name = \"CascadiaCode\";\n\n let cascadia_code = crate::fonts::cascadia_code();\n\n let noto_sans_name = \"NotoSans-Regular\";\n\n let noto_sans = crate::fonts::noto_sans_regular();\n\n let noto_emoji_name = \"NotoEmoji-Regular\";\n\n let noto_emoji = crate::fonts::noto_emoji_regular();\n\n let emoji_icons_name = \"emoji-icon-font\";\n\n let emoji_icons = crate::fonts::emoji_icon_font();\n\n\n\n let monospace_name = if jetbrains {\n\n font_data.insert(\n\n jetbrains_mono_name.to_owned(),\n\n Cow::Borrowed(jetbrains_mono),\n\n );\n", "file_path": "plugins/aleph-egui/src/traits.rs", "rank": 97, "score": 130719.20192676628 }, { "content": "///\n\n/// This trait needs to be implemented by any type that wishes to be used as a component\n\n///\n\npub trait Component: 'static + Sized + Send + Sync {\n\n fn get_type_description() -> ComponentTypeDescription;\n\n}\n\n\n\nimpl<T: 'static + Sized + Send + Sync> Component for T {\n\n #[inline]\n\n fn get_type_description() -> ComponentTypeDescription {\n\n // Depending on whether or not `T` has a drop implementation we produce the virtual drop\n\n // functions\n\n let fn_drop = if std::mem::needs_drop::<T>() {\n\n unsafe extern \"C\" fn drop_fn<U: Component>(v: *mut u8) {\n\n (v as *mut U).drop_in_place()\n\n }\n\n\n\n let fn_drop: unsafe extern \"C\" fn(*mut u8) = drop_fn::<T>;\n\n\n\n Some(fn_drop)\n\n } else {\n\n None\n\n };\n", "file_path": "crates/aleph-ecs/src/world/component.rs", "rank": 98, "score": 130134.1018517485 }, { "content": "///\n\n/// `IArchive` provides the required interface for an archive format to integrate with the aleph\n\n/// framework.\n\n///\n\npub trait IArchive: IAny + Send + Sync + 'static {\n\n /// Returns the archive's identifier. This is used as part of `AssetID` and must be a stable\n\n /// value that is inseparable from the archive.\n\n fn identifier(&self) -> ArchiveID;\n\n\n\n /// Will lookup a value by its ID that is local to this specific archive.\n\n fn lookup(&self, id: AssetLocalID) -> Result<AssetDescriptor, AssetLookupError>;\n\n}\n\n\n", "file_path": "crates/aleph-interfaces/src/archive/archive/mod.rs", "rank": 99, "score": 130134.1018517485 } ]
Rust
desktop/src/gui/styles.rs
koompi/koompi-desktop
497a16b68befd3e99cd75169f4d7800312bde2e8
use iced::{button, container, slider, checkbox, pick_list, Color, Vector}; pub const BACKGROUND: Color = Color::from_rgb(238.0 / 255.0, 238.0 / 255.0, 238.0 / 255.0); pub const FOREGROUND: Color = Color::from_rgb(224.0 / 255.0, 224.0 / 255.0, 224.0 / 255.0); pub const HOVERED: Color = Color::from_rgb(66.0 / 255.0, 66.0 / 255.0, 66.0 / 255.0); pub const PRIMARY: Color = Color::from_rgb(12.0 / 255.0, 46.0 / 251.0, 179.0 / 255.0); pub const SECONDARY: Color = Color::from_rgb(112.0 / 255.0, 16.0 / 251.0, 191.0 / 255.0); pub enum CustomButton { Default, Text, Primary, Secondary, Transparent, Selected, Hovered, } impl button::StyleSheet for CustomButton { fn active(&self) -> button::Style { use CustomButton::*; button::Style { text_color: match self { Primary => PRIMARY, Secondary => SECONDARY, Transparent | Selected => Color::WHITE, _ => Color::BLACK, }, background: Some( match self { Default => Color::WHITE, Selected => Color { a: 0.5, ..PRIMARY }, Primary => Color { a: 0.3, ..PRIMARY }, Secondary => Color { a: 0.3, ..SECONDARY }, Hovered => Color { a: 0.3, ..HOVERED }, _ => Color::TRANSPARENT, } .into(), ), border_radius: 7.0, border_color: Color::TRANSPARENT, border_width: 1.0, shadow_offset: match self { Default => Vector::new(0.5, 0.5), _ => Vector::new(0.0, 0.0) }, } } fn hovered(&self) -> button::Style { use CustomButton::*; let active = self.active(); button::Style { background: match self { Transparent => Some(Color { a: 0.3, ..PRIMARY }.into()), Text => Some(Color { a: 0.3, ..HOVERED }.into()), Primary | Secondary | Hovered => Some(active.text_color.into()), _ => active.background, }, text_color: match self { Primary | Secondary | Hovered => Color::WHITE, _ => active.text_color, }, ..active } } } pub enum CustomContainer { Foreground, } impl container::StyleSheet for CustomContainer { fn style(&self) -> container::Style { use CustomContainer::*; container::Style { background: Some(match self { Foreground => FOREGROUND, }.into()), border_radius: 7.0, ..container::Style::default() } } } pub struct CustomSelect; impl pick_list::StyleSheet for CustomSelect { fn menu(&self) -> iced_style::menu::Style { let default = Default::default(); iced_style::menu::Style { selected_background: PRIMARY.into(), ..default } } fn active(&self) -> pick_list::Style { pick_list::Style { text_color: Color::BLACK, background: Color { a: 0.3, ..PRIMARY }.into(), icon_size: 0.5, border_color: PRIMARY, border_radius: 5.0, border_width: 0.0, } } fn hovered(&self) -> pick_list::Style { self.active() } } pub struct CustomSlider; impl slider::StyleSheet for CustomSlider { fn active(&self) -> slider::Style { slider::Style { rail_colors: (Color{ a: 0.5, ..HOVERED }, Color::TRANSPARENT), handle: slider::Handle { shape: slider::HandleShape::Circle { radius: 9.0 }, color: PRIMARY, border_width: 0.0, border_color: Color::TRANSPARENT, }, } } fn hovered(&self) -> slider::Style { self.active() } fn dragging(&self) -> slider::Style { self.hovered() } } pub struct CustomCheckbox; impl checkbox::StyleSheet for CustomCheckbox { fn active(&self, is_checked: bool) -> checkbox::Style { checkbox::Style { background: if is_checked { PRIMARY } else { Color::WHITE }.into(), checkmark_color: Color::WHITE, border_radius: 5.0, border_width: 1.5, border_color: if is_checked { PRIMARY } else { HOVERED }.into(), } } fn hovered(&self, is_checked: bool) -> checkbox::Style { self.active(is_checked) } } pub struct CustomTooltip; impl container::StyleSheet for CustomTooltip { fn style(&self) -> container::Style { container::Style { background: Some(Color::WHITE.into()), ..container::Style::default() } } } pub struct ContainerFill(pub Color); impl container::StyleSheet for ContainerFill { fn style(&self) -> container::Style { container::Style { background: Some(self.0.into()), ..container::Style::default() } } }
use iced::{button, container, slider, checkbox, pick_list, Color, Vector}; pub const BACKGROUND: Color = Color::from_rgb(238.0 / 255.0, 238.0 / 255.0, 238.0 / 255.0); pub const FOREGROUND: Color = Color::from_rgb(224.0 / 255.0, 224.0 / 255.0, 224.0 / 255.0); pub const HOVERED: Color = Color::from_rgb(66.0 / 255.0, 66.0 / 255.0, 66.0 / 255.0); pub const PRIMARY: Color = Color::from_rgb(12.0 / 255.0, 46.0 / 251.0, 179.0 / 255.0); pub const SECONDARY: Color = Color::from_rgb(112.0 / 255.0, 16.0 / 251.0, 191.0 / 255.0); pub enum CustomButton { Default, Text, Primary, Secondary, Transparent, Selected, Hovered, } impl button::StyleSheet for CustomButton { fn active(&self) -> button::Style { use CustomButton::*; button::Style { text_color: match self { Primary => PRIMARY, Secondary => SECONDARY, Transparent | Selected => Color::WHITE, _ => Color::BLACK, }, background: Some( match self { Default => Color::WHITE, Selected => Color { a: 0.5, ..PRIMARY }, Primary => Color { a: 0.3, ..PRIMARY }, Secondary => Color { a: 0.3, ..SECONDARY }, Hovered => Color { a: 0.3, ..HOVERED }, _ => Color::TRANSPARENT, } .into(), ), border_radius: 7.0, border_color: Color::TRANSPARENT, border_width: 1.0, shadow_offset: match self { Default => Vector::new(0.5, 0.5), _ => Vector::new(0.0, 0.0) }, } } fn hovered(&self) -> button::Style { use CustomButton::*; let active = self.active(); button::Style { background: match self { Transparent => Some(Color { a: 0.3, ..PRIMARY }.into()), Text => Some(Color { a: 0.3, ..HOVERED }.into()), Primary | Secondary | Hovered => Some(active.text_color.into()), _ => active.background, }, text_color: match self { Primary | Secondary | Hovered => Color::WHITE, _ => active.text_color, }, ..active } } } pub enum CustomContainer { Foreground, } impl container::StyleSheet for CustomContainer { fn style(&self) -> con
struct ContainerFill(pub Color); impl container::StyleSheet for ContainerFill { fn style(&self) -> container::Style { container::Style { background: Some(self.0.into()), ..container::Style::default() } } }
tainer::Style { use CustomContainer::*; container::Style { background: Some(match self { Foreground => FOREGROUND, }.into()), border_radius: 7.0, ..container::Style::default() } } } pub struct CustomSelect; impl pick_list::StyleSheet for CustomSelect { fn menu(&self) -> iced_style::menu::Style { let default = Default::default(); iced_style::menu::Style { selected_background: PRIMARY.into(), ..default } } fn active(&self) -> pick_list::Style { pick_list::Style { text_color: Color::BLACK, background: Color { a: 0.3, ..PRIMARY }.into(), icon_size: 0.5, border_color: PRIMARY, border_radius: 5.0, border_width: 0.0, } } fn hovered(&self) -> pick_list::Style { self.active() } } pub struct CustomSlider; impl slider::StyleSheet for CustomSlider { fn active(&self) -> slider::Style { slider::Style { rail_colors: (Color{ a: 0.5, ..HOVERED }, Color::TRANSPARENT), handle: slider::Handle { shape: slider::HandleShape::Circle { radius: 9.0 }, color: PRIMARY, border_width: 0.0, border_color: Color::TRANSPARENT, }, } } fn hovered(&self) -> slider::Style { self.active() } fn dragging(&self) -> slider::Style { self.hovered() } } pub struct CustomCheckbox; impl checkbox::StyleSheet for CustomCheckbox { fn active(&self, is_checked: bool) -> checkbox::Style { checkbox::Style { background: if is_checked { PRIMARY } else { Color::WHITE }.into(), checkmark_color: Color::WHITE, border_radius: 5.0, border_width: 1.5, border_color: if is_checked { PRIMARY } else { HOVERED }.into(), } } fn hovered(&self, is_checked: bool) -> checkbox::Style { self.active(is_checked) } } pub struct CustomTooltip; impl container::StyleSheet for CustomTooltip { fn style(&self) -> container::Style { container::Style { background: Some(Color::WHITE.into()), ..container::Style::default() } } } pub
random
[ { "content": "pub fn search() -> Text {\n\n icon('\\u{f002}')\n\n}\n\n\n", "file_path": "panel/src/views/common.rs", "rank": 0, "score": 132919.60925981292 }, { "content": "pub fn wifi() -> Text {\n\n icon('\\u{f1eb}')\n\n}\n", "file_path": "panel/src/views/common.rs", "rank": 1, "score": 132919.60925981292 }, { "content": "pub fn refresh() -> Text {\n\n icon('\\u{f2f1}')\n\n}\n", "file_path": "panel/src/views/common.rs", "rank": 2, "score": 132919.60925981292 }, { "content": "pub fn key() -> Text {\n\n icon('\\u{f084}')\n\n}\n", "file_path": "panel/src/views/common.rs", "rank": 3, "score": 132919.60925981292 }, { "content": "pub fn unlock() -> Text {\n\n icon('\\u{f09c}')\n\n}\n", "file_path": "panel/src/views/common.rs", "rank": 4, "score": 132919.60925981292 }, { "content": "pub fn condition(level: f32) -> Text {\n\n let to_i32 = level as i32;\n\n match to_i32 {\n\n 0..=10 => battery_full(),\n\n 11..=30 => battery_quarter(),\n\n 31..=50 => battery_half(),\n\n 51..=80 => battery_three_quarter(),\n\n 81..=100 => battery_full(),\n\n _ => battery_empty(),\n\n }\n\n}\n", "file_path": "panel/src/views/common.rs", "rank": 5, "score": 120988.56661624924 }, { "content": "pub fn icon(unicode: char) -> Text {\n\n Text::new(&unicode.to_string())\n\n .font(ICONS)\n\n .width(Length::Units(16))\n\n .vertical_alignment(VerticalAlignment::Center)\n\n .horizontal_alignment(HorizontalAlignment::Center)\n\n .size(18)\n\n}\n\nconst ICONS: Font = Font::External {\n\n name: \"Line Awesome\",\n\n bytes: include_bytes!(\"../assets/font/la-solid-900.ttf\"),\n\n};\n", "file_path": "panel/src/views/common.rs", "rank": 6, "score": 120988.56661624924 }, { "content": "pub fn setttings(text_size: u16) -> Settings {\n\n Settings {\n\n default_text_size: text_size,\n\n ..Settings::default()\n\n }\n\n}\n", "file_path": "panel/src/main.rs", "rank": 7, "score": 107962.17877425479 }, { "content": "fn battery_quarter() -> Text {\n\n icon('\\u{f243}')\n\n}\n", "file_path": "panel/src/views/common.rs", "rank": 8, "score": 88750.95583895054 }, { "content": "fn battery_half() -> Text {\n\n icon('\\u{f242}')\n\n}\n", "file_path": "panel/src/views/common.rs", "rank": 9, "score": 88750.95583895054 }, { "content": "fn wifi_icon() -> Text {\n\n icon('\\u{f1eb}')\n\n}\n", "file_path": "panel/src/views/panel.rs", "rank": 10, "score": 88750.95583895054 }, { "content": "fn monitor_icon() -> Text {\n\n icon('\\u{f108}')\n\n}\n\n\n", "file_path": "panel/src/views/panel.rs", "rank": 11, "score": 88750.95583895054 }, { "content": "fn sound_icon() -> Text {\n\n icon('\\u{f028}')\n\n}\n", "file_path": "panel/src/views/panel.rs", "rank": 12, "score": 88750.95583895054 }, { "content": "fn battery_empty() -> Text {\n\n icon('\\u{f244}')\n\n}\n", "file_path": "panel/src/views/common.rs", "rank": 13, "score": 88750.95583895054 }, { "content": "fn battery_full() -> Text {\n\n icon('\\u{f240}')\n\n}\n\n\n", "file_path": "panel/src/views/common.rs", "rank": 14, "score": 88750.95583895054 }, { "content": "fn battery_three_quarter() -> Text {\n\n icon('\\u{f241}')\n\n}\n", "file_path": "panel/src/views/common.rs", "rank": 15, "score": 86765.02316700449 }, { "content": "pub trait PersistentData: DeserializeOwned + Serialize + Default {\n\n fn relative_path() -> PathBuf;\n\n\n\n fn path() -> Result<PathBuf, DesktopError> {\n\n let path = dirs_next::config_dir().unwrap().join(Self::relative_path());\n\n if let Some(dir) = path.parent() {\n\n fs::create_dir_all(dir)?;\n\n }\n\n\n\n Ok(path)\n\n }\n\n\n\n fn load() -> Result<Self, DesktopError> {\n\n let file = Self::path()?;\n\n\n\n if file.exists() { \n\n if file.is_file() {\n\n Ok(toml::from_str(&fs::read_to_string(file)?)?)\n\n } else {\n\n Err(DesktopError::ConfigNotFound(file.display().to_string()))\n", "file_path": "desktop/src/configs/persistent_data.rs", "rank": 16, "score": 81530.94710515818 }, { "content": "pub fn requests_exit(event: &WindowEvent<'_>, _modifiers: ModifiersState) -> bool {\n\n match event {\n\n WindowEvent::CloseRequested => true,\n\n #[cfg(target_os = \"macos\")]\n\n WindowEvent::KeyboardInput {\n\n input:\n\n KeyboardInput {\n\n virtual_keycode: Some(winit::event::VirtualKeyCode::Q),\n\n state: ElementState::Pressed,\n\n ..\n\n },\n\n ..\n\n } if _modifiers.logo() => true,\n\n _ => false,\n\n }\n\n}\n", "file_path": "desktop/src/window_state.rs", "rank": 17, "score": 79562.00444857856 }, { "content": "pub fn handle_window(win: &Window, pos: &mut u32) {\n\n if let Some(display) = win.primary_monitor() {\n\n let width = display.size().width;\n\n win.set_inner_size(PhysicalSize::new(width, WINDOW_HEIGHT));\n\n *pos = width - 400;\n\n }\n\n win.set_outer_position(PhysicalPosition::new(0, 0));\n\n}\n\n\n", "file_path": "panel/src/main.rs", "rank": 18, "score": 72886.32209966434 }, { "content": "fn find_active_window(\n\n conn: &impl Connection,\n\n root: Window,\n\n net_active_window: Atom,\n\n hash_map: &mut HashMap<&str, Option<Window>>,\n\n) -> Result<(Window, bool), Box<dyn Error>> {\n\n let window: Window = AtomEnum::ANY.into();\n\n let active_window = conn\n\n .get_property(false, root, net_active_window, window, 0, 1)?\n\n .reply()?;\n\n if active_window.format == 32 && active_window.length == 1 {\n\n // Things will be so much easier with the next release:\n\n let widnow_id = u32::try_parse(&active_window.value)?.0;\n\n let focus_changed = widnow_id != hash_map[\"xid\"].unwrap();\n\n hash_map.insert(\"xid\", Some(widnow_id));\n\n Ok((u32::try_parse(&active_window.value)?.0, focus_changed))\n\n } else {\n\n // Query the input focus\n\n Ok((conn.get_input_focus()?.reply()?.focus, false))\n\n }\n\n}\n", "file_path": "panel/src/task_manager/taskmanager.rs", "rank": 19, "score": 71328.59375579013 }, { "content": "pub fn handle_visible_pos(win: &mut State<Applets>, kind: ControlType, is_visible: bool, pos: u32) {\n\n win.win_state.queue_message(AppletsMsg::SwitchView(kind));\n\n if is_visible {\n\n win.window.set_visible(true);\n\n win.window.set_always_on_top(true);\n\n } else {\n\n win.window.set_visible(false);\n\n win.win_state\n\n .queue_message(AppletsMsg::SwitchView(ControlType::Default));\n\n }\n\n win.window\n\n .set_outer_position(PhysicalPosition::new(pos, 32));\n\n}\n\n\n\n// async fn run_instance<E>(\n\n// mut desktop_state: State<DesktopPanel>,\n\n// mut context_menu_state: State<Applets>,\n\n// mut runtime: Runtime<E, Proxy<ProxyMessage>, ProxyMessage>,\n\n// mut receiver: mpsc::UnboundedReceiver<winit::event::Event<'_, ProxyMessage>>,\n\n// context_menu_size: PhysicalSize<f64>,\n", "file_path": "panel/src/main.rs", "rank": 20, "score": 58986.478607162 }, { "content": "fn print_type_of<T>(_: &T) {\n\n println!(\"{}\", type_name::<T>())\n\n}\n", "file_path": "panel/src/views/battery.rs", "rank": 21, "score": 54640.053842622 }, { "content": "enum DynWinState {\n\n DesktopConfig(WindowState<DesktopConfigUI>),\n\n BgConfig(WindowState<BackgroundConfigUI>),\n\n}\n", "file_path": "desktop/src/main.rs", "rank": 22, "score": 53786.60002797359 }, { "content": "pub trait HasChanged {\n\n fn has_changed(&self) -> bool;\n\n}", "file_path": "desktop/src/gui/has_changed.rs", "rank": 23, "score": 49209.83884581829 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "src/main.rs", "rank": 24, "score": 47000.52876719274 }, { "content": "fn main() {\n\n std::env::set_var(\"WINIT_X11_SCALE_FACTOR\", \"1.25\");\n\n match DesktopManager::new() {\n\n Ok(mut desktop_manager) => {\n\n let mut old_desktop_conf = desktop_manager.config().to_owned();\n\n let desktop_conf = Rc::new(RefCell::new(desktop_manager.config().to_owned()));\n\n let desktop_items = Rc::new(RefCell::new(desktop_manager.desktop_items().to_owned()));\n\n let wallpaper_items =\n\n Rc::new(RefCell::new(desktop_manager.wallpaper_items().to_owned()));\n\n // .into_iter().map(|item| (button::State::new(), item.to_owned())).collect::<Vec<(button::State, DesktopItem)>>()\n\n\n\n // Instance\n\n let mut windows = HashMap::new();\n\n let event_loop = EventLoop::with_user_event();\n\n let event_proxy = event_loop.create_proxy();\n\n let instance = wgpu::Instance::new(wgpu::BackendBit::PRIMARY);\n\n let mut runtime = {\n\n let proxy = Proxy::new(event_loop.create_proxy());\n\n let executor = executor::Default::new().expect(\"Failed to create executor\");\n\n Runtime::new(executor, proxy)\n", "file_path": "desktop/src/main.rs", "rank": 25, "score": 45688.78687792549 }, { "content": "fn main() {\n\n std::env::set_var(\"WINIT_X11_SCALE_FACTOR\", \"1.25\");\n\n\n\n let event_loop = EventLoop::with_user_event();\n\n // uncomment to be able to test task manager.\n\n // let task_manager = task_manager::taskmanager::TaskManager::new();\n\n // match task_manager {\n\n // Ok(()) => {}\n\n // Err(e) => println!(\"Error: {:?}\", e),\n\n // }\n\n let instance = wgpu::Instance::new(wgpu::BackendBit::PRIMARY);\n\n let window = WindowBuilder::new()\n\n .with_x11_window_strut(vec![XWindowStrut::Strut(RESERVE_SIZE)])\n\n .with_x11_window_type(vec![XWindowType::Dock])\n\n .build(&event_loop)\n\n .unwrap();\n\n\n\n let popup_menu = WindowBuilder::new()\n\n .with_x11_window_type(vec![XWindowType::PopupMenu, XWindowType::Menu])\n\n .with_decorations(false)\n", "file_path": "panel/src/main.rs", "rank": 26, "score": 45688.78687792549 }, { "content": "fn get_prefered_position(\n\n cursor_position: PhysicalPosition<f64>,\n\n window_size: PhysicalSize<f64>,\n\n monitor_size: PhysicalSize<u32>,\n\n) -> PhysicalPosition<f64> {\n\n let prefered_x = if window_size.width + cursor_position.x > monitor_size.width as f64 {\n\n cursor_position.x - window_size.width\n\n } else {\n\n cursor_position.x\n\n };\n\n let prefered_y = if window_size.height + cursor_position.y > monitor_size.height as f64 {\n\n cursor_position.y - window_size.height\n\n } else {\n\n cursor_position.y\n\n };\n\n\n\n PhysicalPosition::new(prefered_x, prefered_y)\n\n}\n\n\n", "file_path": "desktop/src/main.rs", "rank": 27, "score": 43337.89483113933 }, { "content": "fn build_user_interface<'a, P: Program>(\n\n program: &'a mut P,\n\n cache: Cache,\n\n renderer: &mut P::Renderer,\n\n size: Size,\n\n debug: &mut Debug,\n\n) -> UserInterface<'a, P::Message, P::Renderer> {\n\n debug.view_started();\n\n let view = program.view();\n\n debug.view_finished();\n\n\n\n debug.layout_started();\n\n let user_interface = UserInterface::build(view, size, cache, renderer);\n\n debug.layout_finished();\n\n\n\n user_interface\n\n}\n\n\n", "file_path": "desktop/src/window_state.rs", "rank": 28, "score": 37489.966927444235 }, { "content": "fn get_list_ssid() -> Vec<WifiProperty> {\n\n let ssid_info = get_accesspoints();\n\n let mut initial_list: Vec<WifiProperty> = Vec::new();\n\n match ssid_info {\n\n Ok(data) => {\n\n for accesspoint in data {\n\n let mut wifi_props: WifiProperty = WifiProperty::new();\n\n wifi_props.ssid = accesspoint.ssid;\n\n wifi_props.detail = button::State::new();\n\n wifi_props.settings_icon = '\\u{f084}';\n\n wifi_props.settings = button::State::new();\n\n wifi_props.connect = button::State::new();\n\n wifi_props.input_passwd = text_input::State::new();\n\n wifi_props.status = true;\n\n wifi_props.show_passwd_btn = button::State::new();\n\n initial_list.push(wifi_props);\n\n // initial_button::State::new(), true, '\\u{f1eb}', accesspoint.ssid, button::State::new()list.push(button::State::new(), true, '\\u{f1eb}', accesspoint.ssid, button::State::new());\n\n }\n\n }\n\n Err(e) => println!(\"Error: {:?}\", e),\n", "file_path": "panel/src/views/wireless.rs", "rank": 29, "score": 37407.60027841623 }, { "content": "mod wallpaper_error;\n\nmod wallpaper_item;\n\n\n\npub use wallpaper_error::WallpaperError;\n\npub use wallpaper_item::WallpaperItem;\n", "file_path": "desktop/src/background.rs", "rank": 30, "score": 35404.726557851485 }, { "content": "fn get_battery() -> battery::Result<(BatteryInfo, Manager)> {\n\n let manager = battery::Manager::new()?;\n\n let battery = match manager.batteries()?.next() {\n\n Some(Ok(battery)) => battery,\n\n Some(Err(e)) => {\n\n eprintln!(\"Unable to access battery information\");\n\n return Err(e);\n\n }\n\n None => {\n\n eprintln!(\"Unable to find any batteries\");\n\n return Err(Error::from(ErrorKind::NotFound).into());\n\n }\n\n };\n\n\n\n Ok((battery, manager))\n\n}\n\n\n", "file_path": "panel/src/views/battery.rs", "rank": 31, "score": 35131.20631328605 }, { "content": "use iced::{\n\n slider::{Handle, HandleShape, Style, StyleSheet},\n\n Background, Color,\n\n};\n\n\n\npub enum SliderType {\n\n Default,\n\n}\n\nimpl StyleSheet for SliderType {\n\n fn active(&self) -> Style {\n\n Style {\n\n rail_colors: ([0.6, 0.6, 0.6, 0.5].into(), Color::WHITE),\n\n handle: Handle {\n\n shape: HandleShape::Circle { radius: 8.0 },\n\n color: Color::from_rgb(0.95, 0.95, 0.95),\n\n border_color: Color::from_rgb(0.6, 0.6, 0.6),\n\n border_width: 10.0,\n\n },\n\n }\n\n }\n", "file_path": "panel/src/styles/slider.rs", "rank": 32, "score": 34511.52305267595 }, { "content": " fn hovered(&self) -> Style {\n\n let active = self.active();\n\n Style {\n\n handle: Handle {\n\n color: Color::from_rgb(0.90, 0.90, 0.90),\n\n ..active.handle\n\n },\n\n ..active\n\n }\n\n }\n\n fn dragging(&self) -> Style {\n\n let active = self.active();\n\n Style {\n\n handle: Handle {\n\n color: Color::from_rgb(0.85, 0.85, 0.85),\n\n ..active.handle\n\n },\n\n ..active\n\n }\n\n }\n\n}\n", "file_path": "panel/src/styles/slider.rs", "rank": 33, "score": 34499.83582292308 }, { "content": "pub mod buttons {\n\n use iced::{button, Background, Color, Vector};\n\n pub enum ButtonStyle {\n\n Default,\n\n Circular(u8, u8, u8, f32),\n\n BigCircular(u8, u8, u8, f32),\n\n CircleRadius(u8, u8, u8, f32, f32, Color),\n\n Transparent,\n\n }\n\n\n\n impl button::StyleSheet for ButtonStyle {\n\n fn active(&self) -> button::Style {\n\n button::Style {\n\n shadow_offset: Vector::new(0.0, 0.0),\n\n background: match self {\n\n ButtonStyle::Default => Some(Background::Color([0.87, 0.87, 0.87].into())),\n\n ButtonStyle::Circular(c1, c2, c3, p)\n\n | ButtonStyle::CircleRadius(c1, c2, c3, p, _, _)\n\n | ButtonStyle::BigCircular(c1, c2, c3, p) => {\n\n Some(Background::Color(Color::from_rgba8(*c1, *c2, *c3, *p)))\n", "file_path": "panel/src/styles/buttonstyle.rs", "rank": 34, "score": 34493.55014222747 }, { "content": " }\n\n ButtonStyle::Transparent => Some(Background::Color(Color::TRANSPARENT)),\n\n },\n\n border_radius: match self {\n\n ButtonStyle::Default | ButtonStyle::Circular(_, _, _, _) => 4.0,\n\n ButtonStyle::BigCircular(_, _, _, _) => 25.0,\n\n ButtonStyle::Transparent => 0.0,\n\n ButtonStyle::CircleRadius(_, _, _, _, r, _) => *r,\n\n },\n\n border_width: 0.0,\n\n border_color: [0.7, 0.7, 0.7].into(),\n\n text_color: match self {\n\n ButtonStyle::Default\n\n | ButtonStyle::BigCircular(_, _, _, _)\n\n | ButtonStyle::Circular(_, _, _, _) => Color::WHITE,\n\n ButtonStyle::Transparent => Color::BLACK,\n\n ButtonStyle::CircleRadius(_, _, _, _, _, color) => *color,\n\n },\n\n }\n\n }\n", "file_path": "panel/src/styles/buttonstyle.rs", "rank": 35, "score": 34487.366207828236 }, { "content": " fn hovered(&self) -> button::Style {\n\n let active = self.active();\n\n button::Style {\n\n shadow_offset: active.shadow_offset + Vector::new(0.0, 1.0),\n\n background: Some(Background::Color(Color::WHITE)),\n\n ..active\n\n }\n\n }\n\n }\n\n}\n", "file_path": "panel/src/styles/buttonstyle.rs", "rank": 36, "score": 34487.04910258713 }, { "content": " Success,\n\n Warning,\n\n Transparent(Color),\n\n}\n\n\n\nimpl container::StyleSheet for CustomContainer {\n\n fn style(&self) -> container::Style {\n\n use CustomContainer::*;\n\n container::Style {\n\n background: Some(\n\n match self {\n\n Background | Header => BACKGROUND,\n\n ForegroundWhite => Color::WHITE,\n\n ForegroundGray | Segment => FOREGROUND,\n\n Hovered => Color {\n\n a: 0.2,\n\n ..Color::BLACK\n\n },\n\n FadedBrightForeground => Color {\n\n a: 0.8,\n", "file_path": "panel/src/styles/containers.rs", "rank": 37, "score": 34479.37726639614 }, { "content": "#![allow(dead_code)]\n\nuse iced::{container, Color};\n\n\n\npub const BACKGROUND: Color = Color::from_rgb(238.0 / 255.0, 238.0 / 255.0, 238.0 / 255.0);\n\npub const FOREGROUND: Color = Color::from_rgb(224.0 / 255.0, 224.0 / 255.0, 224.0 / 255.0);\n\npub const HOVERED: Color = Color::from_rgb(129.0 / 255.0, 129.0 / 255.0, 129.0 / 255.0);\n\npub const ACCENT: Color = Color::from_rgb(15.0 / 255.0, 85.0 / 255.0, 179.0 / 255.0);\n\npub const SUCCESS: Color = Color::from_rgb(31.0 / 255.0, 139.0 / 255.0, 36.0 / 255.0);\n\npub const WARNING: Color = Color::from_rgb(212.0 / 255.0, 176.0 / 255.0, 17.0 / 255.0);\n\npub const ERROR: Color = Color::from_rgb(218.0 / 255.0, 16.0 / 255.0, 11.0 / 255.0);\n\n\n\npub enum CustomContainer {\n\n Background,\n\n ForegroundWhite,\n\n ForegroundGray,\n\n Header,\n\n Segment,\n\n FadedBrightForeground,\n\n Hovered,\n\n Primary,\n", "file_path": "panel/src/styles/containers.rs", "rank": 38, "score": 34476.41549335265 }, { "content": " ..FOREGROUND\n\n },\n\n Primary => Color { a: 0.7, ..ACCENT },\n\n Success => SUCCESS,\n\n Warning => WARNING,\n\n Transparent(color) => Color {\n\n a: 0.35,\n\n ..(*color)\n\n },\n\n }\n\n .into(),\n\n ),\n\n border_radius: match self {\n\n Segment => 10.0,\n\n ForegroundGray | Hovered => 0.0,\n\n FadedBrightForeground => 4.0,\n\n Success | Warning | Primary => 5.0,\n\n _ => 0.0,\n\n },\n\n border_width: match self {\n", "file_path": "panel/src/styles/containers.rs", "rank": 39, "score": 34471.019652871444 }, { "content": " Header | Segment => 1.0,\n\n Primary => 0.5,\n\n _ => 0.0,\n\n },\n\n border_color: match self {\n\n Header => Color::TRANSPARENT,\n\n Primary => Color::BLACK,\n\n _ => BACKGROUND,\n\n },\n\n ..container::Style::default()\n\n }\n\n }\n\n}\n", "file_path": "panel/src/styles/containers.rs", "rank": 40, "score": 34470.709599951 }, { "content": "fn parse_string_property(property: &GetPropertyReply) -> &str {\n\n std::str::from_utf8(&property.value).unwrap_or(\"Invalid utf8\")\n\n}\n", "file_path": "panel/src/task_manager/taskmanager.rs", "rank": 41, "score": 34299.32326279913 }, { "content": "use iced_wgpu::Renderer;\n\nuse iced_winit::{text_input, Align, Color, Element, Row, Text, TextInput};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ColorConfigUI {\n\n color_state: text_input::State,\n\n text: String,\n\n color: Color,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum ColorConfigMsg {\n\n ColorChanged(String),\n\n}\n\n\n\nimpl ColorConfigUI {\n\n pub fn new(color: Color) -> Self {\n\n Self {\n\n color,\n\n text: String::from(\"sample test\"),\n", "file_path": "desktop/src/gui/color_config.rs", "rank": 42, "score": 33271.47305695486 }, { "content": " color_state: text_input::State::new(),\n\n }\n\n }\n\n\n\n pub fn update(&mut self, msg: ColorConfigMsg) {\n\n match msg {\n\n ColorConfigMsg::ColorChanged(val) => self.text = val,\n\n }\n\n }\n\n\n\n pub fn view(&mut self) -> Element<ColorConfigMsg, Renderer> {\n\n let lb_color = Text::new(\"Color: \");\n\n let txt_color = TextInput::new(\n\n &mut self.color_state,\n\n \"\",\n\n &self.text,\n\n ColorConfigMsg::ColorChanged,\n\n )\n\n .padding(7);\n\n Row::new()\n\n .spacing(15)\n\n .align_items(Align::Center)\n\n .push(lb_color)\n\n .push(txt_color)\n\n .into()\n\n }\n\n}\n", "file_path": "desktop/src/gui/color_config.rs", "rank": 43, "score": 33264.041468884025 }, { "content": "fn parse_wm_class(property: &GetPropertyReply) -> (&str, &str) {\n\n if property.format != 8 {\n\n return (\n\n \"Malformed property: wrong format\",\n\n \"Malformed property: wrong format\",\n\n );\n\n }\n\n let value = &property.value;\n\n // The property should contain two null-terminated strings. Find them.\n\n if let Some(middle) = value.iter().position(|&b| b == 0) {\n\n let (instance, class) = value.split_at(middle);\n\n // Skip the null byte at the beginning\n\n let mut class = &class[1..];\n\n // Remove the last null byte from the class, if it is there.\n\n if class.last() == Some(&0) {\n\n class = &class[..class.len() - 1];\n\n }\n\n let instance = std::str::from_utf8(instance);\n\n let class = std::str::from_utf8(class);\n\n (\n", "file_path": "panel/src/task_manager/taskmanager.rs", "rank": 44, "score": 32940.583028893394 }, { "content": "use super::wallpaper_conf::WallpaperConf;\n\nuse de::deserialize_color_hex_string;\n\nuse ser::serialize_color_hex;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::fmt::{self, Display, Formatter};\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct BackgroundConf {\n\n pub kind: BackgroundType,\n\n #[serde(\n\n deserialize_with = \"deserialize_color_hex_string\",\n\n serialize_with = \"serialize_color_hex\"\n\n )]\n\n pub color_background: iced_winit::Color,\n\n #[serde(rename = \"Wallpaper_Config\")]\n\n pub wallpaper_conf: WallpaperConf,\n\n}\n\n\n\nimpl Default for BackgroundConf {\n\n fn default() -> Self {\n", "file_path": "desktop/src/configs/background_conf.rs", "rank": 45, "score": 32879.3473427357 }, { "content": "use std::{cell::RefCell, rc::Rc};\n\nuse crate::constants::THUMBNAIL_SIZE;\n\nuse crate::proxy_message::ProxyMessage;\n\nuse crate::configs::{\n\n background_conf::BackgroundType, wallpaper_conf::Placement, DesktopConf, PersistentData,\n\n};\n\nuse crate::background::WallpaperItem;\n\nuse super::styles::{CustomButton, CustomTooltip, CustomContainer, BACKGROUND, CustomSelect};\n\nuse super::has_changed::HasChanged;\n\nuse iced::Image;\n\nuse iced_wgpu::Renderer;\n\nuse iced_winit::{\n\n winit, pick_list, button, scrollable, text_input, tooltip, Program, Command, Element, Row, Container, Clipboard,\n\n Text, Scrollable, Button, Space, Length, Align, Column, Application, TextInput, Tooltip, PickList, Grid, Color,\n\n};\n\nuse winit::event_loop::EventLoopProxy;\n\nconst MIN_THUMNAIL_SIZE: (u32, u32) = (640, 480);\n\n\n\n#[derive(Debug)]\n\npub struct BackgroundConfigUI {\n", "file_path": "desktop/src/gui/background_config.rs", "rank": 46, "score": 32875.540919039784 }, { "content": "impl Program for BackgroundConfigUI {\n\n type Message = BackgroundConfMsg;\n\n type Renderer = Renderer;\n\n type Clipboard = Clipboard;\n\n\n\n fn update(&mut self, msg: Self::Message, _clipboard: &mut Clipboard) -> Command<Self::Message> {\n\n use BackgroundConfMsg::*;\n\n\n\n let mut had_changed = false;\n\n let wallpaper_items = self.wallpaper_items.borrow();\n\n let mut desktop_conf = self.desktop_conf.borrow_mut();\n\n let bg_conf = &mut desktop_conf.background_conf;\n\n let wallpaper_conf = &mut bg_conf.wallpaper_conf;\n\n\n\n match msg {\n\n BackgroundTypeChanged(val) => bg_conf.kind = val,\n\n ColorChanged(val) => self.text = val,\n\n PlacementChanged(val) => wallpaper_conf.placement = val,\n\n WallpaperChanged(idx) => {\n\n self.selected_wallpaper = Some(idx);\n", "file_path": "desktop/src/gui/background_config.rs", "rank": 47, "score": 32875.06854415413 }, { "content": " Self {\n\n kind: BackgroundType::Color,\n\n color_background: iced_winit::Color::from_rgb8(27, 27, 27),\n\n wallpaper_conf: WallpaperConf::default(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]\n\npub enum BackgroundType {\n\n Color,\n\n Wallpaper,\n\n}\n\n\n\nimpl BackgroundType {\n\n pub const ALL: [BackgroundType; 2] = [BackgroundType::Color, BackgroundType::Wallpaper];\n\n}\n\n\n\nimpl Display for BackgroundType {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n", "file_path": "desktop/src/configs/background_conf.rs", "rank": 48, "score": 32874.952562829356 }, { "content": " use BackgroundType::*;\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n Color => \"Color\",\n\n Wallpaper => \"Wallpaper\",\n\n }\n\n )\n\n }\n\n}\n\n\n\nmod ser {\n\n use iced_winit::Color;\n\n use serde::ser::Serializer;\n\n\n\n pub(super) fn serialize_color_hex<S>(color: &Color, s: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n", "file_path": "desktop/src/configs/background_conf.rs", "rank": 49, "score": 32873.927890244704 }, { "content": " btn_add_state: Default::default(),\n\n btn_apply_state: Default::default(),\n\n color_state: text_input::State::focused(),\n\n is_changed: false,\n\n placement_state: Default::default(),\n\n scroll: Default::default(),\n\n },\n\n Command::none(),\n\n )\n\n }\n\n\n\n fn title(&self) -> String {\n\n String::from(\"Desktop Background Configuration\")\n\n }\n\n\n\n fn background_color(&self) -> Color {\n\n BACKGROUND\n\n }\n\n}\n\n\n", "file_path": "desktop/src/gui/background_config.rs", "rank": 50, "score": 32872.19729854574 }, { "content": "use std::path::{PathBuf, Path};\n\nuse super::wallpaper_error::WallpaperError;\n\nuse crate::constants::{DESKTOP_ENTRY, NAME};\n\nuse std::cmp::Ordering;\n\nconst METADATA_FILE: &str = \"metadata.desktop\";\n\n\n\n#[derive(Debug, Clone, Default, PartialEq, PartialOrd, Eq)]\n\npub struct WallpaperItem {\n\n pub name: Option<String>,\n\n pub is_local: bool,\n\n pub path: PathBuf,\n\n}\n\n\n\nimpl WallpaperItem {\n\n pub fn from_file<P: AsRef<Path>>(path: P, is_local: bool) -> Result<Self, WallpaperError> {\n\n let path = path.as_ref();\n\n if path.exists() {\n\n if path.is_file() {\n\n Ok(Self {\n\n is_local,\n", "file_path": "desktop/src/background/wallpaper_item.rs", "rank": 51, "score": 32871.5023910977 }, { "content": " PlacementChanged(Placement),\n\n WallpaperChanged(usize),\n\n AddWallpaperClicked,\n\n ApplyClicked,\n\n}\n\n\n\nimpl Application for BackgroundConfigUI {\n\n type Flags = (EventLoopProxy<ProxyMessage>, Rc<RefCell<DesktopConf>>, (u32, u32), usize, Rc<RefCell<Vec<WallpaperItem>>>, Option<usize>);\n\n\n\n fn new(flags: Self::Flags) -> (Self, Command<BackgroundConfMsg>) {\n\n (\n\n Self {\n\n proxy: flags.0,\n\n desktop_conf: flags.1,\n\n size: flags.2,\n\n wallpaper_items_state: vec![button::State::new(); flags.3],\n\n wallpaper_items: flags.4,\n\n selected_wallpaper: flags.5,\n\n text: String::from(\"sample test\"),\n\n bg_type_state: Default::default(),\n", "file_path": "desktop/src/gui/background_config.rs", "rank": 52, "score": 32871.21475584154 }, { "content": " BackgroundType::Color => {\n\n let lb_color = Text::new(\"Color: \");\n\n let txt_color = TextInput::new(&mut self.color_state, \"\", &self.text, ColorChanged).padding(7);\n\n\n\n Row::new().spacing(15).align_items(Align::Center).push(lb_color).push(txt_color).into()\n\n },\n\n BackgroundType::Wallpaper => {\n\n let lb_placement = Text::new(\"Mode: \");\n\n let pl_placement = PickList::new(placement_state, &Placement::ALL[..], Some(bg_conf.wallpaper_conf.placement), PlacementChanged).style(CustomSelect);\n\n let sec_selected_wallpaper: Element<_, _> = if let Some(selected) = *selected_wallpaper {\n\n if let Some(item) = wallpaper_items.get(selected) {\n\n let image = Image::new(item.load_image(MIN_THUMNAIL_SIZE, false)).width(Length::Units(THUMBNAIL_SIZE));\n\n let mut row = Row::new().padding(10).spacing(20).align_items(Align::Center).push(image);\n\n if let Some(name) = &item.name {\n\n row = row.push(Text::new(name).size(15))\n\n }\n\n \n\n row.into()\n\n } else {\n\n Row::new().into()\n", "file_path": "desktop/src/gui/background_config.rs", "rank": 53, "score": 32871.05934435951 }, { "content": " let Self {\n\n desktop_conf,\n\n bg_type_state,\n\n placement_state,\n\n wallpaper_items_state,\n\n wallpaper_items, \n\n selected_wallpaper,\n\n btn_add_state,\n\n btn_apply_state,\n\n scroll,\n\n ..\n\n } = self;\n\n\n\n let wallpaper_items = wallpaper_items.borrow();\n\n let desktop_conf = desktop_conf.borrow();\n\n let bg_conf = &desktop_conf.background_conf;\n\n\n\n let lb_bg = Text::new(\"Background:\");\n\n let pl_bg = PickList::new(bg_type_state, &BackgroundType::ALL[..], Some(bg_conf.kind), BackgroundTypeChanged).style(CustomSelect);\n\n let content: Element<_, _> = match bg_conf.kind {\n", "file_path": "desktop/src/gui/background_config.rs", "rank": 54, "score": 32870.26269825075 }, { "content": " proxy: EventLoopProxy<ProxyMessage>,\n\n size: (u32, u32),\n\n bg_type_state: pick_list::State<BackgroundType>,\n\n desktop_conf: Rc<RefCell<DesktopConf>>,\n\n color_state: text_input::State,\n\n text: String,\n\n placement_state: pick_list::State<Placement>,\n\n wallpaper_items_state: Vec<button::State>,\n\n wallpaper_items: Rc<RefCell<Vec<WallpaperItem>>>,\n\n selected_wallpaper: Option<usize>,\n\n btn_apply_state: button::State,\n\n btn_add_state: button::State,\n\n scroll: scrollable::State,\n\n is_changed: bool,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum BackgroundConfMsg {\n\n BackgroundTypeChanged(BackgroundType),\n\n ColorChanged(String),\n", "file_path": "desktop/src/gui/background_config.rs", "rank": 55, "score": 32869.97696058123 }, { "content": " grid.push(Tooltip::new(content, name, tooltip::Position::Top).size(13).gap(5).padding(5).style(CustomTooltip))\n\n } else {\n\n grid.push(content)\n\n }\n\n });\n\n \n\n Column::new().spacing(15)\n\n .push(\n\n Row::new().spacing(15).align_items(Align::Center).push(lb_placement).push(pl_placement)\n\n )\n\n .push(sec_selected_wallpaper)\n\n .push(Container::new(wallpaper_grid).style(CustomContainer::Foreground))\n\n .into()\n\n }\n\n };\n\n\n\n let btn_add = Button::new(btn_add_state, Text::new(\" Choose New \")).on_press(AddWallpaperClicked).style(CustomButton::Default);\n\n let mut btn_apply = Button::new(btn_apply_state, Text::new(\" Apply \")).style(CustomButton::Primary);\n\n if self.is_changed {\n\n btn_apply = btn_apply.on_press(ApplyClicked)\n", "file_path": "desktop/src/gui/background_config.rs", "rank": 56, "score": 32869.91007843236 }, { "content": " let red = format!(\"{:02x}\", (color.r * 255.0) as u8);\n\n let green = format!(\"{:02x}\", (color.g * 255.0) as u8);\n\n let blue = format!(\"{:02x}\", (color.b * 255.0) as u8);\n\n s.serialize_str(&format!(\"#{}{}{}\", red, green, blue))\n\n }\n\n}\n\n\n\nmod de {\n\n use iced_winit::Color;\n\n use serde::de::{self, Error, Unexpected, Visitor};\n\n use std::fmt;\n\n\n\n fn hex_to_color(hex: &str) -> Option<Color> {\n\n if hex.len() == 7 {\n\n let hash = &hex[0..1];\n\n let r = u8::from_str_radix(&hex[1..3], 16);\n\n let g = u8::from_str_radix(&hex[3..5], 16);\n\n let b = u8::from_str_radix(&hex[5..7], 16);\n\n\n\n return match (hash, r, g, b) {\n", "file_path": "desktop/src/configs/background_conf.rs", "rank": 57, "score": 32867.274361849195 }, { "content": " (\"#\", Ok(r), Ok(g), Ok(b)) => Some(Color {\n\n r: r as f32 / 255.0,\n\n g: g as f32 / 255.0,\n\n b: b as f32 / 255.0,\n\n a: 1.0,\n\n }),\n\n _ => None,\n\n };\n\n }\n\n\n\n None\n\n }\n\n\n\n pub(super) fn deserialize_color_hex_string<'de, D>(deserializer: D) -> Result<Color, D::Error>\n\n where\n\n D: de::Deserializer<'de>,\n\n {\n\n struct ColorVisitor;\n\n\n\n impl<'de> Visitor<'de> for ColorVisitor {\n", "file_path": "desktop/src/configs/background_conf.rs", "rank": 58, "score": 32866.88311916093 }, { "content": "use thiserror::Error;\n\nuse freedesktop_entry_parser::errors::ParseError;\n\n\n\n#[derive(Debug, Error)]\n\npub enum WallpaperError {\n\n #[error(transparent)]\n\n ParseError(#[from] ParseError),\n\n #[error(transparent)]\n\n IOError(#[from] std::io::Error),\n\n #[error(\"this has no image: {0}\")]\n\n NotFound(String),\n\n #[error(\"invalid type of wallpaper\", )]\n\n InvalidType,\n\n}", "file_path": "desktop/src/background/wallpaper_error.rs", "rank": 59, "score": 32866.72110171553 }, { "content": " }\n\n } else {\n\n Row::new().into()\n\n };\n\n\n\n let mut wallpaper_grid = Grid::new().width(Length::Fill).column_width(175).padding(7).spacing(10);\n\n wallpaper_grid = wallpaper_items_state.iter_mut().zip(wallpaper_items.iter()).enumerate().fold(wallpaper_grid, |grid, (idx, (state, item))| {\n\n let mut btn = Button::new(state, Image::new(item.load_image(MIN_THUMNAIL_SIZE, false)).width(Length::Fill)).padding(7).width(Length::Units(165)).on_press(WallpaperChanged(idx));\n\n btn = if let Some(selected) = *selected_wallpaper {\n\n if idx == selected {\n\n btn.style(CustomButton::Selected)\n\n } else {\n\n btn.style(CustomButton::Text)\n\n }\n\n } else {\n\n btn.style(CustomButton::Text)\n\n };\n\n let content = Container::new(btn).height(Length::Fill).center_x().center_y();\n\n\n\n if let Some(name) = &item.name {\n", "file_path": "desktop/src/gui/background_config.rs", "rank": 60, "score": 32865.10444796496 }, { "content": " if let Some(item) = wallpaper_items.get(idx) {\n\n wallpaper_conf.wallpaper_path = item.load_image(self.size, true);\n\n }\n\n },\n\n AddWallpaperClicked => {\n\n self.proxy.send_event(ProxyMessage::Bg(AddWallpaperClicked)).unwrap();\n\n had_changed = !self.is_changed;\n\n },\n\n ApplyClicked => {\n\n let _ = desktop_conf.save();\n\n had_changed = true;\n\n }\n\n }\n\n self.is_changed = !had_changed;\n\n\n\n Command::none()\n\n }\n\n\n\n fn view(&mut self) -> Element<Self::Message, Renderer> {\n\n use BackgroundConfMsg::*;\n", "file_path": "desktop/src/gui/background_config.rs", "rank": 61, "score": 32864.11890322658 }, { "content": " type Value = Color;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a hex string in the format of '#09ACDF'\")\n\n }\n\n\n\n #[allow(clippy::unnecessary_unwrap)]\n\n fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>\n\n where\n\n E: Error,\n\n {\n\n if let Some(color) = hex_to_color(s) {\n\n return Ok(color);\n\n }\n\n\n\n Err(de::Error::invalid_value(Unexpected::Str(s), &self))\n\n }\n\n }\n\n\n\n deserializer.deserialize_any(ColorVisitor)\n\n }\n\n}\n", "file_path": "desktop/src/configs/background_conf.rs", "rank": 62, "score": 32863.76245601671 }, { "content": " }\n\n \n\n if high_dpi {\n\n walkdir::WalkDir::new(images_path).follow_links(true).into_iter().filter_map(|e| e.ok())\n\n .filter_map(|entry| if entry.path().is_file() {\n\n Some(entry.path().to_path_buf())\n\n } else {\n\n None\n\n })\n\n .nth(0)\n\n .unwrap_or(screenshot)\n\n } else {\n\n screenshot\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Ord for WallpaperItem {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n self.name.cmp(&other.name)\n\n }\n\n}", "file_path": "desktop/src/background/wallpaper_item.rs", "rank": 63, "score": 32862.09995700431 }, { "content": " }\n\n\n\n Column::new().spacing(15).padding(20)\n\n .push(Row::new().spacing(10).align_items(Align::Center).push(lb_bg).push(pl_bg))\n\n .push(\n\n Scrollable::new(scroll).width(Length::Fill).height(Length::Fill).scroller_width(4).scrollbar_width(4).spacing(15)\n\n .push(content)\n\n )\n\n .push(Row::new().spacing(15).push(btn_add).push(Space::with_width(Length::Fill)).push(btn_apply))\n\n .into()\n\n }\n\n}\n\n\n\nimpl HasChanged for BackgroundConfigUI {\n\n fn has_changed(&self) -> bool {\n\n self.is_changed\n\n }\n\n} \n", "file_path": "desktop/src/gui/background_config.rs", "rank": 64, "score": 32861.364076896934 }, { "content": " path: path.to_path_buf(),\n\n name: path.file_stem().map(|name| name.to_str().unwrap().to_string()),\n\n })\n\n } else if path.is_dir() {\n\n let metadata = path.join(METADATA_FILE);\n\n if metadata.exists() {\n\n let entry = freedesktop_entry_parser::parse_entry(metadata)?;\n\n let desktop_entry = entry.section(DESKTOP_ENTRY);\n\n let name = desktop_entry.attr(NAME).map(ToString::to_string);\n\n\n\n Ok(Self {\n\n is_local, name, path: path.to_path_buf()\n\n })\n\n\n\n } else {\n\n Err(WallpaperError::NotFound(path.display().to_string()))\n\n }\n\n } else {\n\n Err(WallpaperError::InvalidType)\n\n }\n", "file_path": "desktop/src/background/wallpaper_item.rs", "rank": 65, "score": 32858.94768304919 }, { "content": " } else {\n\n Err(WallpaperError::NotFound(path.display().to_string()))\n\n }\n\n }\n\n\n\n pub fn load_image(&self, size: (u32, u32), high_dpi: bool) -> PathBuf {\n\n if self.path.is_file() {\n\n self.path.to_path_buf()\n\n } else {\n\n let contents_path = self.path.join(\"contents\");\n\n let images_path = contents_path.join(\"images\");\n\n let image_path = images_path.join(format!(\"{}x{}\", size.0, size.1)).with_extension(\"jpg\");\n\n if image_path.exists() {\n\n image_path.to_path_buf()\n\n } else if image_path.with_extension(\"png\").exists() {\n\n image_path.with_extension(\"png\").to_path_buf()\n\n } else {\n\n let mut screenshot = contents_path.join(\"screenshot\").with_extension(\"png\");\n\n if !screenshot.exists() {\n\n screenshot = screenshot.with_extension(\"jpg\");\n", "file_path": "desktop/src/background/wallpaper_item.rs", "rank": 66, "score": 32858.69897165569 }, { "content": "use iced::{\n\n progress_bar::{Style, StyleSheet},\n\n Background, Color,\n\n};\n\n\n\npub enum ProgressType {\n\n Default,\n\n}\n\n\n\nimpl StyleSheet for ProgressType {\n\n fn style(&self) -> Style {\n\n match self {\n\n ProgressType::Default => Style {\n\n background: Background::Color(Color::BLACK),\n\n border_radius: 10.0,\n\n bar: Background::Color(Color::from_rgba8(9, 132, 227, 1.0)),\n\n },\n\n }\n\n }\n\n}\n", "file_path": "panel/src/styles/progress_bar.rs", "rank": 76, "score": 23.582554492114983 }, { "content": "\n\n match bg_conf.kind {\n\n BackgroundType::Color => bg_conf.color_background,\n\n BackgroundType::Wallpaper => Color::TRANSPARENT,\n\n }\n\n }\n\n}\n\n\n\nimpl Program for Desktop {\n\n type Renderer = Renderer;\n\n type Message = DesktopMsg;\n\n type Clipboard = Clipboard;\n\n\n\n fn update(\n\n &mut self,\n\n message: Self::Message,\n\n _clipboard: &mut Clipboard,\n\n ) -> Command<Self::Message> {\n\n use DesktopMsg::*;\n\n let desktop_items = self.ls_desktop_items.borrow();\n", "file_path": "desktop/src/gui/desktop.rs", "rank": 77, "score": 21.62751640145712 }, { "content": "use super::applets::ControlType;\n\nuse super::common::*;\n\nuse crate::styles::buttonstyle::buttons::ButtonStyle;\n\nuse chrono::Timelike;\n\nuse iced::time;\n\nuse iced::{svg::Svg, Text};\n\nuse iced_wgpu::Renderer;\n\nuse iced_winit::{\n\n application::Application, button, winit, Align, Button, Color, Command, Container, Element,\n\n Length, Program, Row, Space, Subscription,\n\n};\n\n// use std::{cell::RefCell, rc::Rc};\n\nuse winit::event_loop::EventLoopProxy;\n\n#[derive(Debug)]\n\npub struct DesktopPanel {\n\n pub background_color: Color,\n\n pub widgets: [button::State; 5],\n\n pub is_exit: bool,\n\n pub is_shown: bool,\n\n pub pre_kind: ControlType,\n", "file_path": "panel/src/views/panel.rs", "rank": 78, "score": 19.663926898197392 }, { "content": "use super::styles::{CustomButton, HOVERED};\n\nuse crate::proxy_message::ProxyMessage;\n\nuse iced_wgpu::Renderer;\n\nuse iced_winit::{\n\n Command, Container, Element, Length, Program, Button, Text, Column, button, \n\n Row, Icon, icon::Icons, Space, Rule, Application, Color, winit, Clipboard,\n\n};\n\nuse winit::event_loop::EventLoopProxy;\n\n\n\n#[derive(Debug)]\n\npub struct ContextMenu {\n\n menu_items: Vec<MenuItemNode>,\n\n proxy: EventLoopProxy<ProxyMessage>,\n\n}\n\n\n\n#[derive(Debug, Clone, Default)]\n\npub struct MenuItemNode {\n\n state: button::State,\n\n title: String,\n\n selected: bool,\n", "file_path": "desktop/src/gui/context_menu.rs", "rank": 79, "score": 19.149558102681105 }, { "content": "use super::has_changed::HasChanged;\n\nuse super::styles::{CustomButton, CustomCheckbox, CustomSelect, CustomSlider, BACKGROUND};\n\nuse crate::configs::{\n\n desktop_item_conf::{Arrangement, DesktopItemConf, Sorting},\n\n DesktopConf, PersistentData,\n\n};\n\nuse crate::proxy_message::ProxyMessage;\n\nuse iced_wgpu::Renderer;\n\nuse iced_winit::{\n\n button, pick_list, scrollable, slider, winit, Align, Application, Button, Checkbox, Clipboard,\n\n Color, Column, Command, Element, Length, PickList, Program, Row, Scrollable, Slider, Space,\n\n Text,\n\n};\n\nuse std::{cell::RefCell, rc::Rc};\n\nuse winit::event_loop::EventLoopProxy;\n\n\n\n#[derive(Debug)]\n\npub struct DesktopConfigUI {\n\n desktop_conf: Rc<RefCell<DesktopConf>>,\n\n arrangement_state: pick_list::State<Arrangement>,\n", "file_path": "desktop/src/gui/desktop_config.rs", "rank": 80, "score": 18.953388551325958 }, { "content": "use super::common::icon;\n\nuse crate::styles::containers::CustomContainer;\n\nuse crate::styles::slider::SliderType;\n\nuse iced::{Column, Element};\n\nuse iced_wgpu::Renderer;\n\nuse iced_winit::{\n\n button, slider, Align, Button, Command, Container, Length, Program, Row, Slider, Text,\n\n};\n\nuse libkoompi::system_settings::sounds::controllers::{\n\n DeviceControl, SinkController, SourceController,\n\n};\n\n#[derive(Default)]\n\npub struct Audio {\n\n controllers: [button::State; 2],\n\n settings: [button::State; 4],\n\n\n\n slide_dev: slider::State,\n\n slide_head: slider::State,\n\n current_index: usize,\n\n sink_input: SinkController,\n", "file_path": "panel/src/views/sound.rs", "rank": 82, "score": 18.383845185388548 }, { "content": "use super::common::icon;\n\nuse crate::styles::containers::CustomContainer;\n\nuse iced::svg::Svg;\n\nuse iced_wgpu::Renderer;\n\nuse iced_winit::{\n\n button, Align, Button, Checkbox, Column, Command, Container, Element, HorizontalAlignment,\n\n Length, Program, Row, Text,\n\n};\n\n#[derive(Debug, Default)]\n\npub struct Monitor {\n\n is_present_mode: bool,\n\n monitor: [button::State; 6],\n\n test: button::State,\n\n}\n\n#[derive(Debug, Clone)]\n\npub enum MonitorMsg {\n\n OnPresent(bool),\n\n OnScreenMode,\n\n External,\n\n Laptop,\n", "file_path": "panel/src/views/monitor.rs", "rank": 83, "score": 17.93505254240078 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq)]\n\npub enum ConnectionState {\n\n Started,\n\n Activated,\n\n Deactivated,\n\n Activating,\n\n Deactivating,\n\n Finished,\n\n}\n\n#[allow(dead_code)]\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub enum NetworkState {\n\n Known,\n\n Unknown,\n\n}\n\nimpl Default for ConnectionState {\n\n fn default() -> Self {\n\n ConnectionState::Started\n\n }\n\n}\n\nimpl WifiProperty {\n\n pub fn new() -> Self {\n\n Self {\n\n button_string: \"Connect\".to_string(),\n\n ..Default::default()\n\n }\n\n }\n\n}\n", "file_path": "panel/src/views/wireless.rs", "rank": 84, "score": 17.828807950544647 }, { "content": "use super::common::*;\n\nuse super::panel::Message as PanelMessage;\n\nuse crate::styles::{containers::CustomContainer, progress_bar::ProgressType, slider::SliderType};\n\nuse battery::{units::ratio::percent, Batteries, Battery as BatteryInfo, Manager};\n\nuse iced_wgpu::Renderer;\n\nuse iced_winit::{\n\n slider, Align, Application, Column, Command, Container, Element, HorizontalAlignment, Length,\n\n Program, ProgressBar, Row, Slider, Space, Text,\n\n};\n\nuse libkoompi::system_settings::devices::Brightness;\n\nuse std::any::type_name;\n\n\n\n#[derive(Debug)]\n\npub struct Battery {\n\n // data state\n\n pub current_battery: f32,\n\n battery_health: f32,\n\n is_full: bool,\n\n is_charging: bool,\n\n is_discharged: bool,\n\n manager: Manager,\n\n battery_info: BatteryInfo,\n\n}\n\n#[derive(Debug, Default)]\n", "file_path": "panel/src/views/battery.rs", "rank": 85, "score": 17.677514209158065 }, { "content": "use super::battery::{BatteryView, BatteryViewMsg};\n\nuse super::common::icon;\n\nuse super::monitor::{Monitor, MonitorMsg};\n\nuse super::panel::Message;\n\nuse super::sound::{Audio, AudioMsg};\n\nuse super::wireless::{Wireless, WirelessMsg};\n\nuse crate::styles::containers::CustomContainer;\n\nuse iced_wgpu::Renderer;\n\nuse iced_winit::{\n\n button, slider,\n\n winit::event_loop::{EventLoop, EventLoopProxy},\n\n Application, Command, Container, Element, Length, Program, Text,\n\n};\n\n\n\npub struct Applets {\n\n pub slider: slider::State,\n\n pub value: f32,\n\n pub mute: button::State,\n\n pub kind: ControlType,\n\n monitor: Monitor,\n", "file_path": "panel/src/views/applets.rs", "rank": 86, "score": 17.233453021838248 }, { "content": "use super::common::*;\n\nuse super::panel::Message;\n\nuse crate::styles::{buttonstyle::buttons::ButtonStyle, containers::CustomContainer};\n\nuse async_std::task;\n\nuse iced_wgpu::Renderer;\n\nuse iced_winit::{\n\n button, scrollable, text_input, winit::event_loop::EventLoopProxy, Align, Button, Column,\n\n Command, Container, Element, HorizontalAlignment, Length, Program, Row, Rule, Scrollable,\n\n Space, Text, TextInput,\n\n};\n\nuse libkoompi::system_settings::network::{\n\n get_accesspoints, wifi::Connectivity, wifi::WifiInterface, Wifi,\n\n};\n\nuse std::sync::mpsc;\n\n#[derive(Debug, Clone)]\n\npub struct Wireless {\n\n is_active: bool,\n\n is_shown: bool,\n\n status: String,\n\n security: Option<String>,\n", "file_path": "panel/src/views/wireless.rs", "rank": 87, "score": 16.9039532848001 }, { "content": "impl Default for Placement {\n\n fn default() -> Self {\n\n Self::FillScreen\n\n }\n\n}\n\n\n\nimpl Placement {\n\n pub const ALL: [Placement; 6] = [\n\n Placement::Tiled, Placement::Zoomed, Placement::Centered, Placement::Scaled, Placement::FillScreen, Placement::Spanned\n\n ];\n\n}\n\n\n\nimpl Display for Placement {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { \n\n use Placement::*;\n\n write!(f, \"{}\", match self {\n\n Tiled => \"Tiled\",\n\n Zoomed => \"Zoomed\",\n\n Centered => \"Centered\",\n\n Scaled => \"Scaled\",\n\n FillScreen => \"FillScreen\",\n\n Spanned => \"Spanned\"\n\n })\n\n }\n\n}\n", "file_path": "desktop/src/configs/wallpaper_conf.rs", "rank": 88, "score": 16.901857487804197 }, { "content": "use super::styles::CustomButton;\n\nuse crate::background::WallpaperItem;\n\nuse crate::configs::wallpaper_conf::{Placement, WallpaperConf};\n\nuse iced_wgpu::Renderer;\n\nuse iced_winit::{\n\n button, pick_list, Align, Button, Column, Container, Element, Grid, Image, Length, PickList,\n\n Row, Text,\n\n};\n\n\n\n#[derive(Debug, Clone, Default)]\n\npub struct WallpaperConfigUI {\n\n placement_state: pick_list::State<Placement>,\n\n wallpaper_conf: WallpaperConf,\n\n wallpaper_items: Vec<(button::State, WallpaperItem)>,\n\n selected_wallpaper: Option<usize>,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum WallpaperConfigMsg {\n\n PlacementChanged(Placement),\n", "file_path": "desktop/src/gui/wallpaper_config.rs", "rank": 89, "score": 16.87141231799365 }, { "content": " pub kind: ControlType,\n\n pub now: chrono::DateTime<chrono::Local>,\n\n proxy: EventLoopProxy<Message>,\n\n monitor_visible: bool,\n\n sound_visible: bool,\n\n battery_visible: bool,\n\n wifi_visible: bool,\n\n battery_level: f32,\n\n}\n\n\n\nimpl Application for DesktopPanel {\n\n type Flags = EventLoopProxy<Message>;\n\n fn new(flags: Self::Flags) -> (Self, Command<Message>) {\n\n (\n\n Self {\n\n background_color: Color::from_rgb8(255, 255, 255),\n\n widgets: Default::default(),\n\n is_exit: false,\n\n is_shown: false,\n\n pre_kind: ControlType::Monitor,\n", "file_path": "panel/src/views/panel.rs", "rank": 90, "score": 16.522141164151375 }, { "content": "pub mod buttonstyle;\n\npub mod containers;\n\npub mod progress_bar;\n\npub mod slider;\n", "file_path": "panel/src/styles.rs", "rank": 91, "score": 15.561436561426241 }, { "content": "use crate::gui::{BackgroundConfMsg, ContextMsg, DesktopConfigMsg, DesktopMsg};\n\n\n\n#[derive(Debug, Clone)]\n\npub enum ProxyMessage {\n\n Desktop(DesktopMsg),\n\n ContextMenu(ContextMsg),\n\n Bg(BackgroundConfMsg),\n\n DesktopConf(DesktopConfigMsg),\n\n}\n\n\n\nimpl From<DesktopMsg> for ProxyMessage {\n\n fn from(msg: DesktopMsg) -> Self {\n\n Self::Desktop(msg)\n\n }\n\n}\n\n\n\nimpl From<ContextMsg> for ProxyMessage {\n\n fn from(msg: ContextMsg) -> Self {\n\n Self::ContextMenu(msg)\n\n }\n", "file_path": "desktop/src/proxy_message.rs", "rank": 92, "score": 14.91978702668954 }, { "content": "mod background_config;\n\nmod color_config;\n\nmod context_menu;\n\nmod desktop;\n\nmod desktop_config;\n\nmod has_changed;\n\nmod styles;\n\n\n\npub use background_config::{BackgroundConfMsg, BackgroundConfigUI};\n\npub use context_menu::{ContextMenu, ContextMsg};\n\npub use desktop::{Desktop, DesktopMsg};\n\npub use desktop_config::{DesktopConfigMsg, DesktopConfigUI};\n\npub use has_changed::HasChanged;\n", "file_path": "desktop/src/gui.rs", "rank": 93, "score": 14.91092717147459 }, { "content": " match msg {\n\n PlacementChanged(val) => self.wallpaper_conf.placement = val,\n\n WallpaperChanged(idx) => {\n\n self.selected_wallpaper = Some(idx);\n\n self.wallpaper_conf.wallpaper_path = self.wallpaper_items[idx].1.path.to_path_buf();\n\n }\n\n }\n\n }\n\n\n\n pub fn view(&mut self) -> Element<WallpaperConfigMsg, Renderer> {\n\n use WallpaperConfigMsg::*;\n\n let Self {\n\n wallpaper_conf,\n\n placement_state,\n\n wallpaper_items,\n\n selected_wallpaper,\n\n } = self;\n\n let lb_placement = Text::new(\"Placement: \");\n\n let pl_placement = PickList::new(\n\n placement_state,\n", "file_path": "desktop/src/gui/wallpaper_config.rs", "rank": 94, "score": 14.76868643843633 }, { "content": " fn title(&self) -> String {\n\n String::from(\"Desktop\")\n\n }\n\n\n\n fn background_color(&self) -> Color {\n\n BACKGROUND\n\n }\n\n}\n\n\n\nimpl Program for DesktopConfigUI {\n\n type Message = DesktopConfigMsg;\n\n type Renderer = Renderer;\n\n type Clipboard = Clipboard;\n\n\n\n fn update(&mut self, msg: Self::Message, _clipboard: &mut Clipboard) -> Command<Self::Message> {\n\n use DesktopConfigMsg::*;\n\n let mut had_changed = false;\n\n let mut desktop_conf = self.desktop_conf.borrow_mut();\n\n let desktop_item_conf = &mut desktop_conf.desktop_item_conf;\n\n\n", "file_path": "desktop/src/gui/desktop_config.rs", "rank": 95, "score": 14.647222894200878 }, { "content": "pub mod background_conf;\n\npub mod desktop_item_conf;\n\nmod persistent_data;\n\npub mod wallpaper_conf;\n\n\n\nuse background_conf::BackgroundConf;\n\nuse desktop_item_conf::DesktopItemConf;\n\npub use persistent_data::PersistentData;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::path::PathBuf;\n\n\n\nconst DESKTOP_CONF: &str = \"desktop.toml\";\n\n\n\n#[derive(Debug, Clone, Default, Serialize, Deserialize)]\n\npub struct DesktopConf {\n\n #[serde(rename = \"Background\")]\n\n pub background_conf: BackgroundConf,\n\n #[serde(rename = \"Desktop_Entry\")]\n\n pub desktop_item_conf: DesktopItemConf,\n\n}\n", "file_path": "desktop/src/configs.rs", "rank": 96, "score": 14.595478270631201 }, { "content": "use std::str::FromStr;\n\nuse std::fmt::{self, Display, Formatter};\n\nuse crate::constants::{APP, DIR, LINK, FILE};\n\nuse super::desktop_item_error::DesktopItemError;\n\nuse super::desktop_entry::DesktopEntry;\n\n\n\n#[derive(Debug, Clone, Eq, Ord, PartialEq, PartialOrd)]\n\npub enum DesktopItemType {\n\n APP(DesktopEntry),\n\n DIR,\n\n FILE,\n\n LINK,\n\n NULL,\n\n}\n\n\n\nimpl Default for DesktopItemType {\n\n fn default() -> Self {\n\n Self::NULL\n\n }\n\n}\n", "file_path": "desktop/src/desktop_item/desktop_item_type.rs", "rank": 97, "score": 14.282410851324979 }, { "content": " pub fn new(proxy: EventLoopProxy<Message>) -> Self {\n\n // let result = task::spawn(async {\n\n // let handle = task::spawn(async move {\n\n // return get_list_ssid();\n\n // });\n\n // handle.await;\n\n // });\n\n // println!(\"Result: {:?}\", result);\n\n Self {\n\n ssid_vector: Vec::new(),\n\n is_shown: false,\n\n is_active: match Wifi::is_wifi_enabled() {\n\n Ok(status) => status,\n\n Err(e) => {\n\n println!(\"Error : {:?}\", e);\n\n false\n\n }\n\n },\n\n search_vector: Vec::new(),\n\n is_found: true,\n", "file_path": "panel/src/views/wireless.rs", "rank": 98, "score": 14.254202139792014 }, { "content": " fn title(&self) -> String {\n\n String::from(\"Context Menu\")\n\n }\n\n\n\n fn background_color(&self) -> Color {\n\n HOVERED\n\n }\n\n}\n\n\n\nimpl Program for ContextMenu {\n\n type Renderer = Renderer;\n\n type Message = ContextMsg;\n\n type Clipboard = Clipboard;\n\n\n\n fn update(&mut self, message: ContextMsg, _clipboard: &mut Clipboard) -> Command<ContextMsg> {\n\n use ContextMsg::*;\n\n match message {\n\n NewFolder => self.proxy.send_event(ProxyMessage::ContextMenu(NewFolder)).unwrap(),\n\n ChangeBG => self.proxy.send_event(ProxyMessage::ContextMenu(ChangeBG)).unwrap(),\n\n SortBy => println!(\"change sort by field\"),\n", "file_path": "desktop/src/gui/context_menu.rs", "rank": 99, "score": 14.161316716070917 } ]
Rust
src/models.rs
Roba1993/SPE3D
b453ef33a6fc0adfc042784034634aa75e459573
use crate::error::*; use std::sync::atomic::{AtomicUsize, Ordering}; use dlc_decrypter::DlcPackage; use std::sync::{Arc, RwLock, Mutex}; use std::sync::mpsc::{Sender, Receiver}; use std::fs::File; use std::io::prelude::*; use crate::bus::{MessageBus, Message}; use std::thread; static IDCOUNTER: AtomicUsize = AtomicUsize::new(1); pub fn set_idcounter(id: usize) { if id > IDCOUNTER.load(Ordering::Relaxed) { IDCOUNTER.store(id, Ordering::Relaxed); IDCOUNTER.fetch_add(1, Ordering::SeqCst); } } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct DownloadPackage { id: usize, pub name: String, pub files: Vec<DownloadFile>, } impl DownloadPackage { pub fn new<S: Into<String>>(name: S, files: Vec<DownloadFile>) -> DownloadPackage { DownloadPackage { id: IDCOUNTER.fetch_add(1, Ordering::SeqCst), name: name.into(), files } } pub fn id(&self) -> usize { self.id } } impl From<DlcPackage> for DownloadPackage { fn from(dlc: DlcPackage) -> Self { let files = dlc.files.into_iter().map(|i| { let mut f = DownloadFile::new(); f.url = i.url; f.name = i.name; f.size = i.size.parse().unwrap_or(0); f }).collect(); DownloadPackage::new(dlc.name, files) } } #[derive(Default, Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct DownloadFile { id: usize, pub status: FileStatus, pub hoster: FileHoster, pub name: String, pub url: String, pub size: usize, pub downloaded: usize, pub speed: usize, pub hash: FileHash, pub file_id: String, } impl DownloadFile { pub fn new() -> DownloadFile { DownloadFile { id: IDCOUNTER.fetch_add(1, Ordering::SeqCst), status: FileStatus::Unknown, hoster: FileHoster::Unknown, name: "".to_string(), url: "".to_string(), size: 0, downloaded: 0, speed: 0, hash: FileHash::None, file_id: "".to_string(), } } pub fn default() -> Self { Self::new() } pub fn id(&self) -> usize { self.id } } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub enum FileHoster { Unknown, ShareOnline, Filer } impl Default for FileHoster { fn default() -> Self { FileHoster::Unknown } } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Copy)] pub enum FileStatus { Unknown, Offline, Online, DownloadQueue, Downloading, Downloaded, WrongHash, } impl Default for FileStatus { fn default() -> Self { FileStatus::Unknown } } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub enum FileHash { None, Md5(String), } impl FileHash { pub fn md5(&self) -> Option<String> { match self { FileHash::Md5(h) => Some(h.clone()), _ => None } } } impl Default for FileHash { fn default() -> Self { FileHash::None } } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct CaptchaResult { pub id: usize, pub file_id: String, pub hoster: String, pub url: String } pub type DownloadList = Vec<DownloadPackage>; #[derive(Clone)] pub struct SmartDownloadList { downloads: Arc<RwLock<DownloadList>>, sender: Arc<Mutex<Sender<Message>>>, receiver: Arc<Mutex<Receiver<Message>>>, } impl SmartDownloadList { pub fn new(bus: &MessageBus) -> Result<SmartDownloadList> { let (sender, receiver) = bus.channel()?; let d_list = SmartDownloadList { downloads: Arc::new(RwLock::new(Vec::new())), sender: Arc::new(Mutex::new(sender)), receiver: Arc::new(Mutex::new(receiver)) }; match d_list.load() { Ok(_) => {}, Err(_) => {println!("Can't read previus status of the download list")} }; let d_list_internal = d_list.clone(); thread::spawn(move || loop { match d_list_internal.handle_msg() { Ok(_) => {} Err(e) => println!("{}", e), } }); Ok(d_list) } pub fn set_status(&self, id: usize, status: &FileStatus) -> Result<()> { { let mut dloads = self.downloads.write()?; match dloads.iter().find(|i| i.id() == id) { Some(_) => { dloads.iter_mut().find(|i| i.id() == id).ok_or("The id didn't exist")?.files.iter_mut().for_each(|i| { i.status = *status; }); }, None => { dloads.iter_mut().for_each(|pck| { if let Some(i) = pck.files.iter_mut().find(|i| i.id() == id) { i.status = *status; } }); } }; } self.publish_update()?; self.save() } pub fn add_downloaded(&self, id: usize, size: usize) -> Result<()> { self.downloads.write()?.iter_mut().for_each(|pck| if let Some(i) = pck.files.iter_mut().find(|f| f.id() == id) { i.downloaded += size; i.speed = size; } ); Ok(()) } pub fn set_downloaded(&self, id: usize, size: usize) -> Result<()> { self.downloads.write()?.iter_mut().for_each(|pck| if let Some(i) = pck.files.iter_mut().find(|f| f.id() == id) { i.downloaded = size; } ); self.publish_update()?; Ok(()) } pub fn add_package(&self, package: DownloadPackage) -> Result<()> { self.downloads.write()?.push(package); self.publish_update()?; self.save() } pub fn remove(&self, id: usize) -> Result<()> { self.downloads.write()?.iter_mut().for_each(|p| p.files.retain(|i| i.id() != id)); self.downloads.write()?.retain(|i| i.id() != id && !i.files.is_empty() ); self.publish_update()?; self.save() } pub fn get_downloads(&self) -> Result<DownloadList> { Ok(self.downloads.read()?.clone()) } pub fn files_status(&self, status: FileStatus) -> Result<Vec<usize>> { let ids = self.downloads.read()?.iter().map(|pck| pck.files.iter() .filter(|i| i.status == status) .map(|i| i.id()).collect::<Vec<usize>>() ).flat_map(|i| i.into_iter()) .collect::<Vec<usize>>(); Ok(ids) } pub fn files_status_hoster(&self, status: FileStatus, hoster: FileHoster) -> Result<Vec<usize>> { let ids = self.downloads.read()?.iter().map(|pck| pck.files.iter() .filter(|i| i.status == status && i.hoster == hoster) .map(|i| i.id()).collect::<Vec<usize>>() ).flat_map(|i| i.into_iter()) .collect::<Vec<usize>>(); Ok(ids) } pub fn get_file(&self, id: &usize) -> Result<DownloadFile> { let file = self.downloads.read()?.iter() .flat_map(|i| i.files.iter()) .find(|i| id == &i.id()).ok_or("The file can't be found")?.clone(); Ok(file.clone()) } pub fn get_package(&self, id: &usize) -> Result<DownloadPackage> { match self.downloads.read()?.iter().find(|i| &i.id() == id) { Some(i) => Ok(i.clone()), None => Ok(self.downloads.read()?.iter().find(|i| i.files.iter().any(|j| &j.id() == id)).ok_or("No download package available")?.clone()) } } pub fn get_high_id(&self) -> Result<usize> { let biggest_child = self.downloads.read()?.iter().map(|pck| pck.files.iter() .map(|i| i.id()).collect::<Vec<usize>>() ).flat_map(|i| i.into_iter()) .collect::<Vec<usize>>(); let biggest_child = biggest_child.iter().max().unwrap_or(&1); let biggest_parent = self.downloads.read()?.iter() .map(|x| x.id()) .collect::<Vec<usize>>(); let biggest_parent = biggest_parent.iter().max().unwrap_or(&1); Ok( if biggest_child > biggest_parent { *biggest_child } else { *biggest_parent } ) } pub fn publish_update(&self) -> Result<()> { self.sender.lock()?.send(Message::DownloadList(self.get_downloads()?))?; Ok(()) } fn handle_msg(&self) -> Result<()> { if let Message::DownloadSpeed((id, size)) = self.receiver.lock()?.recv()? { self.add_downloaded(id, size)?; } Ok(()) } fn save(&self) -> Result<()> { let d_list = ::serde_json::to_string_pretty(&(self.get_high_id()?, self.get_downloads()?))?; let mut file = File::create("./config/status.json")?; file.write_all(&d_list.into_bytes())?; Ok(()) } fn load(&self) -> Result<()> { let file = File::open("./config/status.json")?; let (id, d_list) : (usize, DownloadList) = ::serde_json::from_reader(file)?; crate::models::set_idcounter(id); for mut p in d_list { p.files.iter_mut().for_each(|f| { f.speed = 0; if f.status == FileStatus::Downloading { f.status = FileStatus::DownloadQueue; }; }); self.add_package(p)?; } Ok(()) } }
use crate::error::*; use std::sync::atomic::{AtomicUsize, Ordering}; use dlc_decrypter::DlcPackage; use std::sync::{Arc, RwLock, Mutex}; use std::sync::mpsc::{Sender, Receiver}; use std::fs::File; use std::io::prelude::*; use crate::bus::{MessageBus, Message}; use std::thread; static IDCOUNTER: AtomicUsize = AtomicUsize::new(1); pub fn set_idcounter(id: usize) { if id > IDCOUNTER.load(Ordering::Relaxed) { IDCOUNTER.store(id, Ordering::Relaxed); IDCOUNTER.fetch_add(1, Ordering::SeqCst); } } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct DownloadPackage { id: usize, pub name: String, pub files: Vec<DownloadFile>, } impl DownloadPackage { pub fn new<S: Into<String>>(name: S, files: Vec<DownloadFile>) -> DownloadPackage { DownloadPackage { id: IDCOUNTER.fetch_add(1, Ordering::SeqCst), name: name.into(), files } } pub fn id(&self) -> usize { self.id } } impl From<DlcPackage> for DownloadPackage { fn from(dlc: DlcPackage) -> Self { let files = dlc.files.into_iter().map(|i| { let mut f = DownloadFile::new(); f.url = i.url; f.name = i.name; f.size = i.size.parse().unwrap_or(0); f }).collect(); DownloadPackage::new(dlc.name, files) } } #[derive(Default, Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct DownloadFile { id: usize, pub status: FileStatus, pub hoster: FileHoster, pub name: String, pub url: String, pub size: usize, pub downloaded: usize, pub speed: usize, pub hash: FileHash, pub file_id: String, } impl DownloadFile { pub fn new() -> DownloadFile { DownloadFile { id: IDCOUNTER.fetch_add(1, Ordering::SeqCst), status: FileStatus::Unknown, hoster: FileHoster::Unknown, name: "".to_string(), url: "".to_string(), size: 0, downloaded: 0, speed: 0, hash: FileHash::None, file_id: "".to_string(), } } pub fn default() -> Self { Self::new() } pub fn id(&self) -> usize { self.id } } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub enum FileHoster { Unknown, ShareOnline, Filer } impl Default for FileHoster { fn default() -> Self { FileHoster::Unknown } } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Copy)] pub enum FileStatus { Unknown, Offline, Online, DownloadQueue, Downloading, Downloaded, WrongHash, } impl Default for FileStatus { fn default() -> Self { FileStatus::Unknown } } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub enum FileHash { None, Md5(String), } impl FileHash { pub fn md5(&self) -> Option<String> { match self { FileHash::Md5(h) => Some(h.clone()), _ => None } } } impl Default for FileHash { fn default() -> Self { FileHash::None } } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq)] pub struct CaptchaResult { pub id: usize, pub file_id: String, pub hoster: String, pub url: String } pub type DownloadList = Vec<DownloadPackage>; #[derive(Clone)] pub struct SmartDownloadList { downloads: Arc<RwLock<DownloadList>>, sender: Arc<Mutex<Sender<Message>>>, receiver: Arc<Mutex<Receiver<Message>>>, } impl SmartDownloadList { pub fn new(bus: &MessageBus) -> Result<SmartDownloadList> { let (sender, receiver) = bus.channel()?; let d_list = SmartDownloadList { downloads: Arc::new(RwLock::new(Vec::new())), sender: Arc::new(Mutex::new(sender)), receiver: Arc::new(Mutex::new(receiver))
} }); Ok(d_list) } pub fn set_status(&self, id: usize, status: &FileStatus) -> Result<()> { { let mut dloads = self.downloads.write()?; match dloads.iter().find(|i| i.id() == id) { Some(_) => { dloads.iter_mut().find(|i| i.id() == id).ok_or("The id didn't exist")?.files.iter_mut().for_each(|i| { i.status = *status; }); }, None => { dloads.iter_mut().for_each(|pck| { if let Some(i) = pck.files.iter_mut().find(|i| i.id() == id) { i.status = *status; } }); } }; } self.publish_update()?; self.save() } pub fn add_downloaded(&self, id: usize, size: usize) -> Result<()> { self.downloads.write()?.iter_mut().for_each(|pck| if let Some(i) = pck.files.iter_mut().find(|f| f.id() == id) { i.downloaded += size; i.speed = size; } ); Ok(()) } pub fn set_downloaded(&self, id: usize, size: usize) -> Result<()> { self.downloads.write()?.iter_mut().for_each(|pck| if let Some(i) = pck.files.iter_mut().find(|f| f.id() == id) { i.downloaded = size; } ); self.publish_update()?; Ok(()) } pub fn add_package(&self, package: DownloadPackage) -> Result<()> { self.downloads.write()?.push(package); self.publish_update()?; self.save() } pub fn remove(&self, id: usize) -> Result<()> { self.downloads.write()?.iter_mut().for_each(|p| p.files.retain(|i| i.id() != id)); self.downloads.write()?.retain(|i| i.id() != id && !i.files.is_empty() ); self.publish_update()?; self.save() } pub fn get_downloads(&self) -> Result<DownloadList> { Ok(self.downloads.read()?.clone()) } pub fn files_status(&self, status: FileStatus) -> Result<Vec<usize>> { let ids = self.downloads.read()?.iter().map(|pck| pck.files.iter() .filter(|i| i.status == status) .map(|i| i.id()).collect::<Vec<usize>>() ).flat_map(|i| i.into_iter()) .collect::<Vec<usize>>(); Ok(ids) } pub fn files_status_hoster(&self, status: FileStatus, hoster: FileHoster) -> Result<Vec<usize>> { let ids = self.downloads.read()?.iter().map(|pck| pck.files.iter() .filter(|i| i.status == status && i.hoster == hoster) .map(|i| i.id()).collect::<Vec<usize>>() ).flat_map(|i| i.into_iter()) .collect::<Vec<usize>>(); Ok(ids) } pub fn get_file(&self, id: &usize) -> Result<DownloadFile> { let file = self.downloads.read()?.iter() .flat_map(|i| i.files.iter()) .find(|i| id == &i.id()).ok_or("The file can't be found")?.clone(); Ok(file.clone()) } pub fn get_package(&self, id: &usize) -> Result<DownloadPackage> { match self.downloads.read()?.iter().find(|i| &i.id() == id) { Some(i) => Ok(i.clone()), None => Ok(self.downloads.read()?.iter().find(|i| i.files.iter().any(|j| &j.id() == id)).ok_or("No download package available")?.clone()) } } pub fn get_high_id(&self) -> Result<usize> { let biggest_child = self.downloads.read()?.iter().map(|pck| pck.files.iter() .map(|i| i.id()).collect::<Vec<usize>>() ).flat_map(|i| i.into_iter()) .collect::<Vec<usize>>(); let biggest_child = biggest_child.iter().max().unwrap_or(&1); let biggest_parent = self.downloads.read()?.iter() .map(|x| x.id()) .collect::<Vec<usize>>(); let biggest_parent = biggest_parent.iter().max().unwrap_or(&1); Ok( if biggest_child > biggest_parent { *biggest_child } else { *biggest_parent } ) } pub fn publish_update(&self) -> Result<()> { self.sender.lock()?.send(Message::DownloadList(self.get_downloads()?))?; Ok(()) } fn handle_msg(&self) -> Result<()> { if let Message::DownloadSpeed((id, size)) = self.receiver.lock()?.recv()? { self.add_downloaded(id, size)?; } Ok(()) } fn save(&self) -> Result<()> { let d_list = ::serde_json::to_string_pretty(&(self.get_high_id()?, self.get_downloads()?))?; let mut file = File::create("./config/status.json")?; file.write_all(&d_list.into_bytes())?; Ok(()) } fn load(&self) -> Result<()> { let file = File::open("./config/status.json")?; let (id, d_list) : (usize, DownloadList) = ::serde_json::from_reader(file)?; crate::models::set_idcounter(id); for mut p in d_list { p.files.iter_mut().for_each(|f| { f.speed = 0; if f.status == FileStatus::Downloading { f.status = FileStatus::DownloadQueue; }; }); self.add_package(p)?; } Ok(()) } }
}; match d_list.load() { Ok(_) => {}, Err(_) => {println!("Can't read previus status of the download list")} }; let d_list_internal = d_list.clone(); thread::spawn(move || loop { match d_list_internal.handle_msg() { Ok(_) => {} Err(e) => println!("{}", e),
function_block-random_span
[ { "content": "/// Trait to write a stream of data to a file.\n\npub trait FileWriter : Read {\n\n /// Function to write a stream of data, to a file\n\n /// based on the std::io::Read trait. This functions\n\n /// returns as result the hash of the written file.\n\n fn write_to_file<S: Into<String>>(&mut self, file: S, id: usize, bus: &MessageBus) -> Result<String> {\n\n // get the sender\n\n let sender = bus.get_sender()?;\n\n\n\n // define the buffer\n\n let mut buffer = [0u8; 4096];\n\n let mut start = Instant::now();\n\n\n\n // define the hasher\n\n let mut hasher = Md5::new();\n\n\n\n // Create the output file\n\n let mut file = File::create(file.into())?;\n\n let mut speed = 0;\n\n\n\n // print out the values\n", "file_path": "src/loader/mod.rs", "rank": 1, "score": 76910.59929600215 }, { "content": " @computed get isDownloaded() { return this.status == \"Downloaded\"; }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 2, "score": 59754.15294873803 }, { "content": " @computed get isDownloading() { return this.status == \"Downloading\"; }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 3, "score": 59754.15294873803 }, { "content": " @computed get speedFmt() { return formatBytes(this.speed, 2); }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 4, "score": 58655.899602913836 }, { "content": " @computed get sizeFmt() { return formatBytes(this.size, 2); }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 5, "score": 58655.899602913836 }, { "content": " @computed get downloadedPercent() { return (this.size != 0) ? (this.downloaded / this.size * 100).toFixed(0) : 0; }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 6, "score": 58408.3181030175 }, { "content": " @computed get downloadTime() { return (this.speed != 0) ? formatTime((this.size / this.speed)) : (this.isDownloaded) ? 'Done' : 'Not Started'; }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 7, "score": 58408.3181030175 }, { "content": " getFileById(id) {\n\n // loop over all container\n\n for (var i of this.dloads) {\n\n // loop over all files\n\n for (var j of i.files) {\n\n // check if the id is matching\n\n if (j.id == id) {\n\n return j;\n\n }\n\n }\n\n }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 8, "score": 56103.892207708464 }, { "content": "class File {\n\n @observable id;\n\n @observable name;\n\n @observable downloaded;\n\n @observable hash;\n\n @observable host;\n\n @observable infos;\n\n @observable size;\n\n @observable speed;\n\n @observable status;\n\n @observable url;\n\n @computed get sizeFmt() { return formatBytes(this.size, 2); }\n\n @computed get speedFmt() { return formatBytes(this.speed, 2); }\n\n @computed get isDownloading() { return this.status == \"Downloading\"; }\n\n @computed get isDownloaded() { return this.status == \"Downloaded\"; }\n\n @computed get isWarning() { return this.status == \"WrongHash\"; }\n\n @computed get downloadedPercent() { return (this.size != 0) ? (this.downloaded / this.size * 100).toFixed(0) : 0; }\n\n @computed get downloadTime() { return (this.speed != 0) ? formatTime((this.size / this.speed)) : (this.isDownloaded) ? 'Done' : 'Not Started'; }\n\n @computed get icon() { return (this.isDownloaded) ? 'check' : (this.isWarning) ? 'warning sign' : (this.isDownloading) ? 'spinner' : 'arrow down'; }\n\n\n\n constructor(rawObj) {\n\n this.id = rawObj.id\n\n this.name = rawObj.name\n\n this.downloaded = rawObj.downloaded\n\n this.hash = rawObj.hash\n\n this.host = rawObj.host\n\n this.infos = rawObj.infos\n\n this.size = rawObj.size\n\n this.speed = rawObj.speed\n\n this.status = rawObj.status\n\n this.url = rawObj.url\n\n }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 9, "score": 55794.69698033585 }, { "content": " @computed get size() { return this.files.reduce((pre, curr) => pre + curr.size, 0); }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 10, "score": 55474.83174116825 }, { "content": " @computed get speed() { return this.files.reduce((pre, curr) => pre + curr.speed, 0); }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 11, "score": 55474.83174116825 }, { "content": "/// This `Loader` defines which funtionalities are used to download a file from a source\n\npub trait Loader {\n\n /// This function updates an Share-Online account with the actual status\n\n fn update_account(&self, account: &mut crate::config::ConfigAccount) -> Result<()> ;\n\n\n\n /// Check the download url and return the file info\n\n fn check_url(&self, url: &str) -> Result<Option<DownloadFile>>;\n\n\n\n /// Download a file, with this laoder\n\n fn download(&self, file: &DownloadFile) -> Result<::reqwest::Response>;\n\n\n\n /// Prove that the downloaded file is correct\n\n fn prove_download(&self, file: &DownloadFile, path: &str) -> Result<bool>;\n\n\n\n /// Get the next file download id to continue the download with\n\n fn get_next_download(&self) -> Result<usize>;\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "src/loader/mod.rs", "rank": 12, "score": 49106.19575026444 }, { "content": " static get(hoster) {\n\n switch(hoster) {\n\n case \"Filer\":\n\n return HosterImages.filer();\n\n case \"Share-Online\":\n\n return HosterImages.shareonline();\n\n default:\n\n return HosterImages.other();\n\n }\n", "file_path": "react/src/asset/hoster/hoster.js", "rank": 13, "score": 39414.78845699189 }, { "content": "@observer\n\nexport default class AddFile extends Component {\n\n constructor() {\n\n super()\n\n this.state = { files: [] }\n\n }\n\n\n\n onDrop(acceptedFiles) {\n\n acceptedFiles.forEach(file => {\n\n const reader = new FileReader();\n\n reader.onload = () => {\n\n const fileAsBinaryString = reader.result;\n\n\n\n fetch(\"http://\" + window.location.hostname + \":8000/api/add-dlc\",\n\n {\n\n method: \"POST\",\n\n headers: {\n\n 'Accept': 'application/json, text/plain, */*',\n\n 'content-type': 'text/plain'\n\n },\n\n body: fileAsBinaryString\n\n })\n\n .then(res => { \n\n if (res.status != 200) {\n\n this.props.global.notify.createErrorMsg(\"The .dlc file is not valid\", \"The server was not able to interpret the .dlc file\");\n\n }\n\n else {\n\n this.props.global.notify.createOkMsg(\"The .dlc file is valid\", \"The server successfully added the .dlc file\");\n\n }\n\n })\n\n };\n\n reader.onabort = () => this.props.global.notify.createErrorMsg(\"The .dlc file reading interrupted\", \"The file reading was interrupted\");\n\n reader.onerror = () => this.props.global.notify.createErrorMsg(\"The .dlc file reading failed\", \"The file reading failed\");\n\n\n\n reader.readAsBinaryString(file);\n\n });\n\n }\n\n\n\n render() {\n\n return <div>\n\n <Dropzone onDrop={this.onDrop.bind(this)}>\n\n <p>Try dropping some files here, or click to select files to upload.</p>\n\n </Dropzone>\n\n </div>\n\n }\n", "file_path": "react/src/comp/add-file.js", "rank": 14, "score": 38305.66282168422 }, { "content": " constructor() {\n\n super()\n\n this.state = { files: [] }\n", "file_path": "react/src/comp/add-file.js", "rank": 15, "score": 37746.885502642195 }, { "content": " onDrop(acceptedFiles) {\n\n acceptedFiles.forEach(file => {\n\n const reader = new FileReader();\n\n reader.onload = () => {\n\n const fileAsBinaryString = reader.result;\n\n\n\n fetch(\"http://\" + window.location.hostname + \":8000/api/add-dlc\",\n\n {\n\n method: \"POST\",\n\n headers: {\n\n 'Accept': 'application/json, text/plain, */*',\n\n 'content-type': 'text/plain'\n\n },\n\n body: fileAsBinaryString\n\n })\n\n .then(res => { \n\n if (res.status != 200) {\n\n this.props.global.notify.createErrorMsg(\"The .dlc file is not valid\", \"The server was not able to interpret the .dlc file\");\n\n }\n\n else {\n\n this.props.global.notify.createOkMsg(\"The .dlc file is valid\", \"The server successfully added the .dlc file\");\n\n }\n\n })\n\n };\n\n reader.onabort = () => this.props.global.notify.createErrorMsg(\"The .dlc file reading interrupted\", \"The file reading was interrupted\");\n\n reader.onerror = () => this.props.global.notify.createErrorMsg(\"The .dlc file reading failed\", \"The file reading failed\");\n\n\n\n reader.readAsBinaryString(file);\n\n });\n", "file_path": "react/src/comp/add-file.js", "rank": 16, "score": 37746.885502642195 }, { "content": " render() {\n\n return <div>\n\n <Dropzone onDrop={this.onDrop.bind(this)}>\n\n <p>Try dropping some files here, or click to select files to upload.</p>\n\n </Dropzone>\n\n </div>\n", "file_path": "react/src/comp/add-file.js", "rank": 17, "score": 37746.885502642195 }, { "content": " }\n\n\n\n // get the body and split the data\n\n let body = resp.text()?;\n\n let json: serde_json::Value = serde_json::from_str(&body)?;\n\n\n\n // create a new file\n\n let mut file = DownloadFile::default();\n\n file.hoster = FileHoster::Filer;\n\n file.url = format!(\n\n \"http://api.filer.net/dl/{}.json\",\n\n json[\"data\"][\"hash\"].as_str().ok_or(\"No Hash provided\")?\n\n );\n\n file.status = FileStatus::Online;\n\n file.name = json[\"data\"][\"name\"]\n\n .as_str()\n\n .ok_or(\"No name provided\")?\n\n .to_string();\n\n file.size = (json[\"data\"][\"size\"].as_i64().ok_or(\"No size provided\")?) as usize;\n\n file.hash = FileHash::Md5(\n", "file_path": "src/loader/filer.rs", "rank": 18, "score": 35757.29480413499 }, { "content": " bail!(\"No download id availablr for this hoster\");\n\n }\n\n}\n\n\n\nimpl Filer {\n\n /// Create a new Share-Online downlaoder\n\n pub fn new(config: Config, d_list: SmartDownloadList, bus: MessageBus) -> Filer {\n\n Filer {\n\n config,\n\n d_list,\n\n bus,\n\n }\n\n }\n\n\n\n /// Try to get a free download\n\n fn free_download(&self, file: &DownloadFile) -> Result<::reqwest::Response> {\n\n let (sender, receiver) = self.bus.channel()?;\n\n\n\n // try to get the chaptchar max 30 times\n\n for _i in 0..30 {\n", "file_path": "src/loader/filer.rs", "rank": 19, "score": 35756.476844443765 }, { "content": "//! Filer.net downloader. Responsible to download files from Filer and\n\n//! checking the status of a download link.\n\n//!\n\n//! Right now only Premium Accounts are supported.\n\n\n\nuse crate::bus::{Message, MessageBus};\n\nuse crate::config::Config;\n\nuse crate::error::*;\n\nuse crate::loader::Loader;\n\nuse crate::models::{DownloadFile, FileHash, FileHoster, FileStatus, SmartDownloadList};\n\nuse md5::{Digest, Md5};\n\nuse reqwest;\n\nuse std::fs::File;\n\n\n\n/// Filer downloader struct which allows to download files from filer with an premium account.\n\n#[derive(Clone)]\n\npub struct Filer {\n\n config: Config,\n\n d_list: SmartDownloadList,\n\n bus: MessageBus,\n", "file_path": "src/loader/filer.rs", "rank": 20, "score": 35751.616762896214 }, { "content": " /// Prove the downloaded file via the hash\n\n fn prove_download(&self, file: &DownloadFile, path: &str) -> Result<bool> {\n\n // Only inspect filer downloads\n\n if file.hoster != FileHoster::Filer {\n\n return Ok(false);\n\n }\n\n\n\n // we dont know the hash alogrithem and can't check\n\n Ok(true)\n\n }\n\n\n\n /// Get the next Filer file download id to continue with\n\n fn get_next_download(&self) -> Result<usize> {\n\n let qeue = self\n\n .d_list\n\n .files_status_hoster(FileStatus::DownloadQueue, FileHoster::Filer)?;\n\n\n\n // check for share-online premium account\n\n match self.config.get().get_account(\n\n crate::config::ConfigHoster::Filer,\n", "file_path": "src/loader/filer.rs", "rank": 21, "score": 35751.527164955754 }, { "content": "\n\n if r_url.host_str() != Some(\"filer.net\") {\n\n bail!(\"The given link wasn't a share-online download link\");\n\n }\n\n\n\n let req = format!(\n\n \"http://api.filer.net/status/{}.json\",\n\n url.split(\"get/\")\n\n .collect::<Vec<&str>>()\n\n .get(1)\n\n .unwrap_or(&\"\")\n\n );\n\n let mut resp = reqwest::get(&req)?;\n\n\n\n // only continue if the answer was successfull\n\n if resp.status() != reqwest::StatusCode::OK {\n\n let mut file = DownloadFile::default();\n\n file.hoster = FileHoster::Filer;\n\n file.status = FileStatus::Offline;\n\n return Ok(Some(file));\n", "file_path": "src/loader/filer.rs", "rank": 22, "score": 35749.8705315066 }, { "content": " json[\"data\"][\"hash\"]\n\n .as_str()\n\n .ok_or(\"No hash provided\")?\n\n .to_string(),\n\n );\n\n\n\n Ok(Some(file))\n\n }\n\n\n\n /// Download a file, with this laoder\n\n fn download(&self, file: &DownloadFile) -> Result<::reqwest::Response> {\n\n if file.hoster != FileHoster::Filer {\n\n bail!(\"Wrong hoster\");\n\n }\n\n\n\n let acc = match self.config.get().get_account(\n\n crate::config::ConfigHoster::Filer,\n\n crate::config::ConfigAccountStatus::Premium,\n\n ) {\n\n Ok(a) => a,\n", "file_path": "src/loader/filer.rs", "rank": 23, "score": 35748.6216534919 }, { "content": " crate::config::ConfigAccountStatus::Premium,\n\n ) {\n\n Ok(_) => {\n\n // return a new id when a download id exists\n\n if !qeue.is_empty() {\n\n return Ok(qeue.get(0).ok_or(\"Id is not available anymore\")?.clone());\n\n }\n\n }\n\n Err(_) => {\n\n let dloads = self\n\n .d_list\n\n .files_status_hoster(FileStatus::Downloading, FileHoster::Filer)?;\n\n\n\n // start a new free download when nothing is downloaded from so right now\n\n if dloads.len() == 0 && !qeue.is_empty() {\n\n return Ok(qeue.get(0).ok_or(\"Id is not available anymore\")?.clone());\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/loader/filer.rs", "rank": 24, "score": 35747.357263935526 }, { "content": " // send a request\n\n sender.send(Message::CaptchaRequest(file.clone()))?;\n\n\n\n // try to get the info for 60 seconds\n\n let now = ::std::time::Instant::now();\n\n while now.elapsed() < ::std::time::Duration::from_secs(60) {\n\n match receiver.recv_timeout(::std::time::Duration::from_secs(5)) {\n\n // when value is received and matched reurn download channel\n\n Ok(v) => {\n\n // we need a captcha response or we continue\n\n if let Some(v) = v.get_captcha_response() {\n\n if v.id == file.id() && v.file_id == file.file_id {\n\n // wait the 30 seconf delay from ShareOnline\n\n ::std::thread::sleep(::std::time::Duration::from_secs(30));\n\n\n\n // create the download stream\n\n let resp = reqwest::Client::new().get(&v.url).send()?;\n\n\n\n // only continue if the answer was successfull\n\n if resp.status() != reqwest::StatusCode::OK {\n", "file_path": "src/loader/filer.rs", "rank": 25, "score": 35746.991206079474 }, { "content": "}\n\n\n\nimpl Loader for Filer {\n\n /// This function updates an filer account with the actual status\n\n fn update_account(&self, account: &mut crate::config::ConfigAccount) -> Result<()> {\n\n // This implementation can only check filer accounts\n\n if account.hoster != crate::config::ConfigHoster::Filer {\n\n bail!(\"Not a Filer Account\");\n\n }\n\n\n\n // Get the user status from Filer\n\n let client = reqwest::Client::new();\n\n let mut resp = client\n\n .get(\"http://api.filer.net/profile.json\")\n\n .basic_auth(&account.username, Some(&account.password))\n\n .send()?;\n\n\n\n // only continue if the answer was successfull\n\n if resp.status() != reqwest::StatusCode::OK {\n\n account.status = crate::config::ConfigAccountStatus::NotValid;\n", "file_path": "src/loader/filer.rs", "rank": 26, "score": 35741.66661635677 }, { "content": " return Ok(());\n\n }\n\n\n\n // get the body\n\n let body = resp.text()?;\n\n let json: serde_json::Value = serde_json::from_str(&body)?;\n\n\n\n // set the account status\n\n match json[\"data\"][\"state\"].as_str() {\n\n Some(\"premium\") => account.status = crate::config::ConfigAccountStatus::Premium,\n\n Some(\"free\") => account.status = crate::config::ConfigAccountStatus::Free,\n\n _ => account.status = crate::config::ConfigAccountStatus::Unknown,\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Check the download url and return the file info\n\n fn check_url(&self, url: &str) -> Result<Option<DownloadFile>> {\n\n let r_url = reqwest::Url::parse(url)?;\n", "file_path": "src/loader/filer.rs", "rank": 27, "score": 35740.7698444712 }, { "content": " Err(_) => {\n\n return self.free_download(file);\n\n }\n\n };\n\n\n\n let client = reqwest::Client::new();\n\n let resp = client\n\n .get(&file.url)\n\n .basic_auth(&acc.username, Some(&acc.password))\n\n .send()?;\n\n\n\n // only continue if the answer was successfull\n\n if resp.status() != reqwest::StatusCode::OK {\n\n bail!(\"Filer file info failed, please check your credentials or download link\");\n\n }\n\n\n\n // return the result\n\n Ok(resp)\n\n }\n\n\n", "file_path": "src/loader/filer.rs", "rank": 28, "score": 35738.84883226259 }, { "content": " bail!(\"Share-online free download failed\");\n\n }\n\n\n\n // return the result\n\n return Ok(resp);\n\n }\n\n }\n\n\n\n continue;\n\n }\n\n // On error either continue or return error\n\n Err(e) => {\n\n if e == ::std::sync::mpsc::RecvTimeoutError::Timeout {\n\n continue;\n\n } else {\n\n bail!(\"Can't receive captcha solving\");\n\n }\n\n }\n\n };\n\n }\n\n }\n\n\n\n bail!(\"Can't do free download\");\n\n }\n\n}\n", "file_path": "src/loader/filer.rs", "rank": 29, "score": 35735.390262249304 }, { "content": "class Message {\n\n @observable id;\n\n @observable name;\n\n @observable description;\n\n @observable type;\n\n\n\n @computed get icon() {\n\n if(this.type == \"error\") {\n\n return \"warning sign\"\n\n }\n\n else if(this.type == \"ok\") {\n\n return \"check circle outline\"\n\n }\n\n }\n\n\n\n constructor(name, description, type) {\n\n this.id = Date.now();\n\n this.name = name;\n\n this.description = description;\n\n this.type = type;\n\n }\n", "file_path": "react/src/stores/NotifyStore.js", "rank": 30, "score": 30907.516152176373 }, { "content": " startDownload(e) {\n\n e.preventDefault();\n\n global.dload.con.startDloadById(global.ui.selected);\n", "file_path": "react/src/index.js", "rank": 31, "score": 30648.256092543546 }, { "content": " @computed get icon() {\n\n if(this.type == \"error\") {\n\n return \"warning sign\"\n\n }\n\n else if(this.type == \"ok\") {\n\n return \"check circle outline\"\n\n }\n", "file_path": "react/src/stores/NotifyStore.js", "rank": 32, "score": 30195.350255550075 }, { "content": " constructor(name, description, type) {\n\n this.id = Date.now();\n\n this.name = name;\n\n this.description = description;\n\n this.type = type;\n", "file_path": "react/src/stores/NotifyStore.js", "rank": 33, "score": 30195.350255550075 }, { "content": " @computed get downloaded() { return this.files.reduce((pre, curr) => pre + curr.downloaded, 0); }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 34, "score": 29942.064023510473 }, { "content": " @computed get isDownloading() { return this.files.some(f => f.status == \"Downloading\"); }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 35, "score": 29942.064023510473 }, { "content": " @computed get isDownloaded() { return this.files.every(f => f.status == \"Downloaded\"); }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 36, "score": 29942.064023510473 }, { "content": " @computed get icon() { return (this.isDownloaded) ? 'check' : (this.isWarning) ? 'warning sign' : (this.isDownloading) ? 'spinner' : 'arrow down'; }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 37, "score": 29812.088925227556 }, { "content": " @computed get isWarning() { return this.status == \"WrongHash\"; }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 38, "score": 29812.088925227556 }, { "content": " constructor(rawObj) {\n\n this.id = rawObj.id\n\n this.name = rawObj.name\n\n this.downloaded = rawObj.downloaded\n\n this.hash = rawObj.hash\n\n this.host = rawObj.host\n\n this.infos = rawObj.infos\n\n this.size = rawObj.size\n\n this.speed = rawObj.speed\n\n this.status = rawObj.status\n\n this.url = rawObj.url\n", "file_path": "react/src/stores/DloadStore.js", "rank": 39, "score": 29812.088925227556 }, { "content": " @computed get speedFmt() { return formatBytes(this.speed, 2); }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 40, "score": 29515.264394613878 }, { "content": " @computed get sizeFmt() { return formatBytes(this.size, 2); }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 41, "score": 29515.264394613878 }, { "content": " @computed get downloadedFmt() { return formatBytes(this.downloaded, 2); }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 42, "score": 29267.682894717535 }, { "content": " @computed get downloadedPercent() { return (this.size != 0) ? (this.downloaded / this.size * 100).toFixed(0) : 0; }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 43, "score": 29267.682894717535 }, { "content": " @computed get downloadTime() { return (this.speed != 0) ? formatTime((this.size / this.speed)) : (this.isDownloaded) ? 'Done' : 'Not Started'; }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 44, "score": 29267.682894717535 }, { "content": " @computed get finishedDownloads() { return this.files.reduce((pre, curr) => (curr.status == \"Downloaded\") ? pre += 1 : pre, 0); }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 45, "score": 29267.682894717535 }, { "content": " removeDloadById(id) {\n\n if (!id) {\n\n console.error(\"No 'id' was given for function call 'removeDloadById'\");\n\n throw \"No 'id' was given for function call 'removeDloadById'\";\n\n }\n\n\n\n fetch(\"http://\" + this.store.server + \"/api/delete-link/\" + id,\n\n {\n\n method: \"POST\",\n\n headers: {\n\n 'Accept': 'application/json, text/plain, */*',\n\n 'Content-Type': 'application/json'\n\n },\n\n })\n\n .then(res => {\n\n // only the 200 status indicates that every went correct, every other message leads to an error\n\n if (res.status != 200) {\n\n throw { error: \"No 200 header returned\", details: res };\n\n }\n\n })\n\n .catch(error => {\n\n console.error(\"The function 'removeDloadById' returned the following error:\");\n\n console.error(error);\n\n this.store.notify.createErrorMsg(\"Deletion failed\", \"The server was not able to remove the link\");\n\n });\n", "file_path": "react/src/con/Dload.js", "rank": 46, "score": 28840.917855731113 }, { "content": " startDloadById(id) {\n\n if (!id) {\n\n console.error(\"No 'id' was given for function call 'startDloadById'\");\n\n throw \"No 'id' was given for function call 'startDloadById'\";\n\n }\n\n\n\n fetch(\"http://\" + this.store.server + \"/api/start-download/\" + id,\n\n {\n\n method: \"POST\"\n\n })\n\n .then(res => {\n\n // only the 200 status indicates that every went correct, every other message leads to an error\n\n if (res.status != 200) {\n\n throw { error: \"No 200 header returned\", details: res };\n\n }\n\n })\n\n .catch(error => {\n\n console.error(\"The function 'startDloadById' returned the following error:\");\n\n console.error(error);\n\n this.store.notify.createErrorMsg(\"Download not started\", \"The server was not able to start the download\");\n\n });\n", "file_path": "react/src/con/Dload.js", "rank": 47, "score": 28840.917855731113 }, { "content": " start_download(e, id) {\n\n e.preventDefault();\n\n this.props.global.dload.con.startDloadById(id);\n", "file_path": "react/src/comp/dload-container.js", "rank": 48, "score": 28623.01063020389 }, { "content": " getFile(id) {\n\n for (var i of this.dloads) {\n\n for (var j of i.files) {\n\n if (j.id == id) {\n\n return j;\n\n }\n\n }\n\n }\n", "file_path": "react/src/stores/DloadStore.js", "rank": 49, "score": 28498.761392846955 }, { "content": " getHosterImage(hoster) {\n\n for (var h of this.hoster) {\n\n if (h.value == hoster) {\n\n return h.img;\n\n }\n\n }\n\n\n\n return HosterImage.other();\n", "file_path": "react/src/stores/ConfigStore.js", "rank": 50, "score": 28267.206907440694 }, { "content": "document.body.innerHTML = `\n\n <div>\n\n <h1>reCAPTCHA demo: Explicit render after an onload callback</h1>\n\n </div>\n\n <div id=\"html_element\"></div>\n\n `;\n\n\n\nresetCss();\n\n\n\ntab_id = null;\n\n\n\n// Get the ShareOnline value from the store\n\nchrome.storage.local.get(['ShareOnline'],\n\n function (result) {\n\n if (result.ShareOnline != null) {\n\n console.log(\"FILE RECEIVED:\");\n\n console.log(result.ShareOnline);\n\n\n\n var so = result.ShareOnline;\n\n tab_id = so.tab_id;\n\n\n\n // execute the injection scripts\n\n addJs(`\n\n String.prototype.insertAt=function(index, string) { \n\n return this.substr(0, index) + string + this.substr(index);\n\n }\n\n\n\n function setDone() {\n\n g = document.createElement('div');\n\n g.setAttribute(\"id\", \"CaptchaSolved\");\n\n document.body.appendChild(g);\n\n }\n\n \n\n function captchaSolved(t) {\n\n var body = \"dl_free=1&captcha=\"+captcha+\"&recaptcha_challenge_field=\" + t + \"&recaptcha_response_field=\" + t;\n\n \n\n console.log(\"body:\");\n\n console.log(body);\n\n \n\n var u = url.split(\"///\").join(\"/free/captcha/\");\n\n u = u.insertAt(4, \"s\");\n\n console.log(\"url\");\n\n console.log(u);\n\n \n\n fetch(u, {\n\n method: 'POST',\n\n body: body,\n\n cache: 'no-store',\n\n timeout: 2e4,\n\n headers: { 'Content-Type': 'application/x-www-form-urlencoded', },\n\n })\n\n .then(res => res.text())\n\n .then(function(data) {\n\n // Here you get the data to modify as you please\n\n console.log(\"data\")\n\n console.log(data)\n\n if (data == \"0\") {\n\n grecaptcha.reset();\n\n //location.reload();\n\n }\n\n \n\n console.log(\"file6\")\n\n my_file[6] = $.base64Decode($.trim(data))\n\n console.log(my_file[6])\n\n \n\n sendResult(my_file[6]);\n\n })\n\n .catch(function(error) {\n\n // If there is any error you will catch them here\n\n console.log(error);\n\n setDone();\n\n }); \n\n }\n\n\n\n function sendResult(url) {\n\n var body = {\n\n id: `+ so.id + `,\n\n file_id: '`+ so.file_id + `',\n\n hoster: '`+ so.hoster + `',\n\n url: url\n\n }\n\n\n\n console.log(body);\n\n\n\n fetch('http://localhost:8000/api/captcha-result', {\n\n method: 'POST',\n\n body: JSON.stringify(body),\n\n cache: 'no-store',\n\n headers: {\n\n 'Accept': 'application/json, text/plain, */*',\n\n 'Content-Type': 'application/json'\n\n }\n\n })\n\n .then(res => res.text())\n\n .then(function(data) {\n\n console.log(\"Response successfully commited to server\");\n\n setDone();\n\n })\n\n .catch(function(error) {\n\n // If there is any error you will catch them here\n\n console.log(error);\n\n setDone();;\n\n }); \n\n }\n\n `);\n\n\n\n addJs(`\n\n console.log(\"nfo:\");\n\n console.log(nfo);\n\n console.log(\"dl:\");\n\n console.log(dl);\n\n my_file = info(nfo).split(div);\n\n my_file[5] = $.base64Decode(dl);\n\n console.log('my_file:');\n\n console.log(my_file);\n\n \n\n my_captcha = my_file[5].split(\"hk||\")[1];\n\n console.log(\"captchar:\");\n\n console.log(my_captcha);\n\n \n\n if (my_captcha == undefined) {\n\n var url = \"` + so.url + `/free/\";\n\n var obj, obj2; obj = document.createElement('form');\n\n obj2 = document.createElement('input');\n\n $(obj).attr(\"action\", url).attr(\"method\", \"post\");\n\n $(obj2).attr(\"type\", \"hidden\").attr(\"value\", \"1\").attr(\"name\", \"dl_free\");\n\n $(obj).append(obj2); obj2 = document.createElement('input');\n\n $(obj2).attr(\"type\", \"hidden\").attr(\"value\", \"free\").attr(\"name\", \"choice\");\n\n $(obj).append(obj2); $('body').append(obj); $('body form:last').submit()\n\n }\n\n \n\n grecaptcha.render('html_element', {\n\n 'sitekey' : '6LdnPkIUAAAAABqC_ITR9-LTJKSdyR_Etj1Sf-Xi',\n\n 'callback' : 'captchaSolved',\n\n });\n\n `);\n\n }\n\n }\n\n);\n\n\n\nfunction checkForClose() {\n\n if (document.getElementById('CaptchaSolved') != null) {\n\n console.log(\"Up to close close\");\n\n // empty the store\n\n chrome.storage.local.set({ 'ShareOnline': null });\n\n // send close event\n\n chrome.runtime.sendMessage({ closeTab: tab_id });\n\n\n\n return;\n\n }\n\n\n\n setTimeout(checkForClose, 500);\n\n}\n\ncheckForClose();\n\n\n\n/* General Functions*/\n\nfunction addJsSrc(src) {\n\n var s = document.createElement('script');\n\n s.type = 'text/javascript';\n\n s.src = src;\n\n try {\n\n s.appendChild();\n\n document.body.appendChild(s);\n\n } catch (e) {\n\n document.body.appendChild(s);\n\n }\n\n}\n\n\n\nfunction addJs(code) {\n\n var s = document.createElement('script');\n\n s.type = 'text/javascript';\n\n try {\n\n s.appendChild(document.createTextNode(code));\n\n document.body.appendChild(s);\n\n } catch (e) {\n\n s.text = code;\n\n document.body.appendChild(s);\n\n }\n\n}\n\n\n\nfunction resetCss() {\n\n document.body.innerHTML += `\n\n <style>\n\n /* http://meyerweb.com/eric/tools/css/reset/ \n\n v2.0 | 20110126\n\n License: none (public domain)\n\n */\n\n\n\n html, body, div, span, applet, object, iframe,\n\n h1, h2, h3, h4, h5, h6, p, blockquote, pre,\n\n a, abbr, acronym, address, big, cite, code,\n\n del, dfn, em, img, ins, kbd, q, s, samp,\n\n small, strike, strong, sub, sup, tt, var,\n\n b, u, i, center,\n\n dl, dt, dd, ol, ul, li,\n\n fieldset, form, label, legend,\n\n table, caption, tbody, tfoot, thead, tr, th, td,\n\n article, aside, canvas, details, embed, \n\n figure, figcaption, footer, header, hgroup, \n\n menu, nav, output, ruby, section, summary,\n\n time, mark, audio, video {\n\n margin: 0;\n\n padding: 0;\n\n border: 0;\n\n font-size: 100%;\n\n font: inherit;\n\n vertical-align: baseline;\n\n color: black;\n\n text-shadow: none;\n\n }\n\n /* HTML5 display-role reset for older browsers */\n\n article, aside, details, figcaption, figure, \n\n footer, header, hgroup, menu, nav, section {\n\n display: block;\n\n }\n\n body {\n\n line-height: 1;\n\n background: none;\n\n }\n\n ol, ul {\n\n list-style: none;\n\n }\n\n blockquote, q {\n\n quotes: none;\n\n }\n\n blockquote:before, blockquote:after,\n\n q:before, q:after {\n\n content: '';\n\n content: none;\n\n }\n\n table {\n\n border-collapse: collapse;\n\n border-spacing: 0;\n\n }\n\n </style>\n\n`;\n\n}\n\n\n", "file_path": "chrome/ShareOnline.js", "rank": 51, "score": 27696.08850674822 }, { "content": "import React, { Component } from 'react';\n\nimport { observer } from \"mobx-react\";\n\nimport { Input, Icon, Button, Header } from 'semantic-ui-react'\n\nimport Dropzone from 'react-dropzone'\n\n\n\n@observer\n\nexport default class AddFile extends Component {\n\n constructor() {\n\n super()\n\n this.state = { files: [] }\n\n }\n\n\n\n onDrop(acceptedFiles) {\n\n acceptedFiles.forEach(file => {\n\n const reader = new FileReader();\n\n reader.onload = () => {\n\n const fileAsBinaryString = reader.result;\n\n\n\n fetch(\"http://\" + window.location.hostname + \":8000/api/add-dlc\",\n\n {\n\n method: \"POST\",\n\n headers: {\n\n 'Accept': 'application/json, text/plain, */*',\n\n 'content-type': 'text/plain'\n\n },\n\n body: fileAsBinaryString\n\n })\n\n .then(res => { \n\n if (res.status != 200) {\n\n this.props.global.notify.createErrorMsg(\"The .dlc file is not valid\", \"The server was not able to interpret the .dlc file\");\n\n }\n\n else {\n\n this.props.global.notify.createOkMsg(\"The .dlc file is valid\", \"The server successfully added the .dlc file\");\n\n }\n\n })\n\n };\n\n reader.onabort = () => this.props.global.notify.createErrorMsg(\"The .dlc file reading interrupted\", \"The file reading was interrupted\");\n\n reader.onerror = () => this.props.global.notify.createErrorMsg(\"The .dlc file reading failed\", \"The file reading failed\");\n\n\n\n reader.readAsBinaryString(file);\n\n });\n\n }\n\n\n\n render() {\n\n return <div>\n\n <Dropzone onDrop={this.onDrop.bind(this)}>\n\n <p>Try dropping some files here, or click to select files to upload.</p>\n\n </Dropzone>\n\n </div>\n\n }\n", "file_path": "react/src/comp/add-file.js", "rank": 52, "score": 26191.137645915063 }, { "content": "\n\n Ok((self.sender.lock()?.clone(), receiver))\n\n }\n\n\n\n /// Get a sender to the bus\n\n pub fn get_sender(&self) -> Result<Sender<Message>> {\n\n Ok(self.sender.lock()?.clone())\n\n }\n\n}\n\n\n\n/// Message to be send over the Message bus\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]\n\npub enum Message {\n\n // Complete list of all download files\n\n DownloadList(DownloadList),\n\n // DownloadSpeed((file_id, speed per sec))\n\n DownloadSpeed((usize, usize)),\n\n // Request a captcha solving\n\n CaptchaRequest(DownloadFile),\n\n // Response with download url\n", "file_path": "src/bus.rs", "rank": 59, "score": 32.658860375877566 }, { "content": "}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]\n\npub enum ConfigHoster {\n\n ShareOnline,\n\n Filer,\n\n Unknown(String),\n\n}\n\n\n\nimpl ::std::fmt::Display for ConfigHoster {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n match self {\n\n ConfigHoster::ShareOnline => write!(f, \"share_online\"),\n\n ConfigHoster::Filer => write!(f, \"filer\"),\n\n ConfigHoster::Unknown(s) => write!(f, \"{}\", s),\n\n }\n\n }\n\n}\n\n\n\nimpl From<String> for ConfigHoster {\n", "file_path": "src/config.rs", "rank": 60, "score": 30.781060746440517 }, { "content": "use std::sync::mpsc::{channel, Receiver, Sender};\n\nuse std::sync::{Arc, Mutex};\n\nuse std::thread;\n\nuse crate::error::*;\n\nuse crate::models::{DownloadFile, DownloadList, CaptchaResult};\n\n\n\n/// Message bus to share messages through the\n\n/// complete system.\n\n#[derive(Clone)]\n\npub struct MessageBus {\n\n sender: Arc<Mutex<Sender<Message>>>,\n\n receiver_internal: Arc<Mutex<Receiver<Message>>>,\n\n sender_internal: Arc<Mutex<Vec<Sender<Message>>>>,\n\n}\n\n\n\nimpl MessageBus {\n\n /// Create a new Message Bus\n\n pub fn new() -> MessageBus {\n\n let (sender, receiver) = channel();\n\n\n", "file_path": "src/bus.rs", "rank": 62, "score": 28.170489082870482 }, { "content": "use std::fs::File;\n\nuse std::io::{Read, Write};\n\nuse std::sync::{Arc, RwLock};\n\nuse toml;\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\n\n\n\n\nstatic IDCOUNTER: AtomicUsize = AtomicUsize::new(1);\n\n\n\n/// The Config element which can be easily shared between different threads and lifetimes.\n\n#[derive(Default, Debug, Clone)]\n\npub struct Config {\n\n data: Arc<RwLock<ConfigData>>,\n\n}\n\n\n\nimpl Config {\n\n pub fn new() -> Config {\n\n let data = match ConfigData::from_config_file() {\n\n Ok(c) => c,\n\n Err(_) => ConfigData::default(),\n", "file_path": "src/config.rs", "rank": 63, "score": 26.722221088017356 }, { "content": "/// Share-Online account configuration\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]\n\npub struct ConfigAccount {\n\n #[serde(skip_deserializing)]\n\n pub id: usize,\n\n pub hoster: ConfigHoster,\n\n pub username: String,\n\n pub password: String,\n\n #[serde(skip_deserializing)]\n\n pub status: ConfigAccountStatus,\n\n #[serde(skip_deserializing)]\n\n #[serde(default = \"::std::time::SystemTime::now\")]\n\n pub checked: ::std::time::SystemTime,\n\n}\n\n\n\nimpl<'a> From<&'a ::jd_decrypter::JdAccount> for ConfigAccount {\n\n fn from(data: &'a ::jd_decrypter::JdAccount) -> ConfigAccount {\n\n ConfigAccount {\n\n id: 0,\n\n hoster: data.hoster.clone().into(),\n", "file_path": "src/config.rs", "rank": 65, "score": 24.031018437746877 }, { "content": " FileStatus::Offline\n\n };\n\n file.name = res.get(2).ok_or(\"Can't get file name\")?.to_string();\n\n file.size = res.get(3).ok_or(\"Can't get file size\")?.parse()?;\n\n file.hash = FileHash::Md5(res.get(4).ok_or(\"Can't get file hash\")?.to_string());\n\n\n\n Ok(Some(file))\n\n }\n\n\n\n /// Download a file, with this laoder\n\n fn download(&self, file: &DownloadFile) -> Result<::reqwest::Response> {\n\n if file.hoster != FileHoster::ShareOnline {\n\n bail!(\"Wrong hoster\");\n\n }\n\n\n\n let acc = match self.config.get().get_account(\n\n crate::config::ConfigHoster::ShareOnline,\n\n crate::config::ConfigAccountStatus::Premium,\n\n ) {\n\n Ok(a) => a,\n", "file_path": "src/loader/so.rs", "rank": 66, "score": 23.203816563136414 }, { "content": " }\n\n }\n\n}\n\n\n\n/// Server Configuration\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]\n\npub struct ConfigServer {\n\n pub ip: String,\n\n pub port: usize,\n\n}\n\n\n\nimpl Default for ConfigServer {\n\n fn default() -> ConfigServer {\n\n ConfigServer {\n\n ip: \"0.0.0.0\".to_string(),\n\n port: 8000,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 68, "score": 22.513999389946143 }, { "content": " .find(&body)\n\n .ok_or(\"No md5 available\")?\n\n .as_str()[5..]\n\n != file.hash.md5().ok_or(\"FileInfo has no hash\")?\n\n {\n\n bail!(\"The Hash of the file to download don't match anymore\")\n\n }\n\n\n\n // Return the premium download url\n\n Ok(String::from(\n\n &Regex::new(r\"URL: ([^\\s]+)\")?\n\n .find(&body)\n\n .ok_or(\"No url available\")?\n\n .as_str()[5..],\n\n ))\n\n }\n\n\n\n /// Try to get a free download\n\n fn free_download(&self, file: &DownloadFile) -> Result<::reqwest::Response> {\n\n let (sender, receiver) = self.bus.channel()?;\n", "file_path": "src/loader/so.rs", "rank": 70, "score": 20.601232105879845 }, { "content": "//! Share-Online downloader. Responsible to download files from Share-Online and\n\n//! checking the status of a download link.\n\n//!\n\n//! Right now only Premium Accounts are supported.\n\n\n\nuse crate::bus::{Message, MessageBus};\n\nuse crate::config::Config;\n\nuse crate::error::*;\n\nuse crate::loader::Loader;\n\nuse md5::{Digest, Md5};\n\nuse crate::models::{DownloadFile, FileHash, FileHoster, FileStatus, SmartDownloadList};\n\nuse regex::Regex;\n\nuse reqwest;\n\nuse std::fs::File;\n\n\n\n/// Share-Online downloader struct which allows to download files from share-online with an premium account.\n\n#[derive(Clone)]\n\npub struct ShareOnline {\n\n config: Config,\n\n d_list: SmartDownloadList,\n", "file_path": "src/loader/so.rs", "rank": 72, "score": 20.12183452101376 }, { "content": "\n\n // check the hash\n\n if hash == file.hash.md5().ok_or(\"No MD5 hash available\")? {\n\n return Ok(true);\n\n }\n\n\n\n bail!(\"Download is incorrect, hash is not matching\");\n\n }\n\n\n\n /// Get the next ShareOnline file download id to continue with\n\n fn get_next_download(&self) -> Result<usize> {\n\n let qeue = self\n\n .d_list\n\n .files_status_hoster(FileStatus::DownloadQueue, FileHoster::ShareOnline)?;\n\n\n\n // check for share-online premium account\n\n match self.config.get().get_account(\n\n crate::config::ConfigHoster::ShareOnline,\n\n crate::config::ConfigAccountStatus::Premium,\n\n ) {\n", "file_path": "src/loader/so.rs", "rank": 74, "score": 19.790898131812128 }, { "content": " None => {bail!(\"No stream available\");}\n\n };\n\n\n\n // set the download status to zero\n\n self.d_list.set_downloaded(f_info.id(), 0)?;\n\n\n\n ::std::fs::create_dir_all(format!(\"./out/{}\", pck.name))?;\n\n let _hash = stream.write_to_file(path.clone(), f_info.id(), &self.bus)?;\n\n\n\n // set the downloaded attribute to the size, because all is downloaded and set speed to 0\n\n self.d_list.add_downloaded(f_info.id(), 0)?;\n\n self.d_list.set_downloaded(f_info.id(), f_info.size)?;\n\n\n\n // check if the download can be proven\n\n if self.loader.iter().any(|l| l.prove_download(&f_info, &path).unwrap_or(false)) {\n\n self.d_list.set_status(id, &FileStatus::Downloaded)?;\n\n }\n\n else {\n\n self.d_list.set_status(id, &FileStatus::WrongHash)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "src/loader/mod.rs", "rank": 76, "score": 19.70076837045824 }, { "content": "\n\n /// Remove a download file or a package from the manager\n\n pub fn remove(&self, id: usize) -> Result<()> {\n\n self.d_list.remove(id)\n\n }\n\n\n\n /// Get a copy of the download list\n\n pub fn get_downloads(&self) -> Result<Vec<DownloadPackage>> {\n\n self.d_list.get_downloads()\n\n }\n\n\n\n /// Start the download of an package, by the id\n\n pub fn start_download(&self, id: usize) -> Result<()> {\n\n self.d_list.set_status(id, &FileStatus::DownloadQueue)\n\n }\n\n\n\n /// start the download manager itself\n\n pub fn start(&self) -> thread::JoinHandle<()> {\n\n // clone the download manager to use it in the thread\n\n let dm = self.clone();\n", "file_path": "src/lib.rs", "rank": 77, "score": 19.452183094247907 }, { "content": " loop {\n\n // read the data from the stream\n\n let len = self.read(&mut buffer)?;\n\n speed += len;\n\n\n\n // break if no data is available anymore\n\n if len == 0 {\n\n break;\n\n }\n\n\n\n // sent the data to the file and hasher\n\n hasher.input(&buffer[0..len]);\n\n file.write_all(&buffer[0..len])?;\n\n\n\n // update the status\n\n if start.elapsed() > Duration::from_secs(1) {\n\n sender.send(Message::DownloadSpeed((id, speed)))?;\n\n speed = 0;\n\n start = Instant::now();\n\n }\n\n }\n\n\n\n // return the hash as a string\n\n Ok(format!(\"{:x}\", hasher.result()))\n\n }\n\n}\n\n\n\n// implement the Download Reader for the reqwest response\n\nimpl FileWriter for ::reqwest::Response{}", "file_path": "src/loader/mod.rs", "rank": 78, "score": 19.41029460272418 }, { "content": " let bus = MessageBus {\n\n sender: Arc::new(Mutex::new(sender)),\n\n receiver_internal: Arc::new(Mutex::new(receiver)),\n\n sender_internal: Arc::new(Mutex::new(vec!())),\n\n };\n\n\n\n // a clone from the msg bus for the internal handler\n\n let bus_internal = bus.clone();\n\n // new thread for the bus handler\n\n thread::spawn(move || loop {\n\n match bus_internal.handle_msg() {\n\n Ok(_) => {}\n\n Err(e) => println!(\"{}\", e),\n\n }\n\n });\n\n\n\n bus\n\n }\n\n\n\n /// Internal function to handle the incoming messages\n", "file_path": "src/bus.rs", "rank": 79, "score": 19.34791961712756 }, { "content": " /// The detail downloading process\n\n fn internal_download(&self, id: usize) -> Result<()> {\n\n // set the status to downloading\n\n self.d_list.set_status(id, &FileStatus::Downloading)?;\n\n // get the file info\n\n let f_info = self.d_list.get_file(&id)?;\n\n let pck = self.d_list.get_package(&id)?;\n\n let path = format!(\"./out/{}/{}\", pck.name, f_info.name);\n\n\n\n let mut stream = None;\n\n\n\n for d in self.loader.iter() {\n\n if let Ok(f) = d.download(&f_info) {\n\n stream = Some(f);\n\n break;\n\n }\n\n }\n\n\n\n let mut stream = match stream {\n\n Some(s) => s,\n", "file_path": "src/loader/mod.rs", "rank": 80, "score": 19.044270269287203 }, { "content": " let mut config_text = String::new();\n\n let mut config_file = File::open(\"./config/config.toml\")?;\n\n config_file.read_to_string(&mut config_text)?;\n\n\n\n // get server config\n\n let tml = toml::from_str::<toml::Value>(&config_text)?;\n\n let server: ConfigServer = toml::from_str(&tml[\"server\"].as_str().unwrap_or(\"\"))\n\n .unwrap_or(ConfigServer::default());\n\n let mut accounts = vec![];\n\n\n\n // get account config\n\n if let Some(so) = tml.as_table().and_then(|t| t.get(\"share_online\").and_then(|s| s.as_array())) {\n\n for s in so {\n\n accounts.push(ConfigAccount {\n\n id: IDCOUNTER.fetch_add(1, Ordering::SeqCst),\n\n hoster: ConfigHoster::ShareOnline,\n\n username: s.as_table().and_then(|t| t.get(\"username\").and_then(|s| s.as_str())).unwrap_or(\"\").to_string(),\n\n password: s.as_table().and_then(|t| t.get(\"password\").and_then(|s| s.as_str())).unwrap_or(\"\").to_string(),\n\n status: ConfigAccountStatus::Unknown,\n\n checked: ::std::time::SystemTime::now(),\n", "file_path": "src/config.rs", "rank": 81, "score": 18.997277362364546 }, { "content": " config,\n\n d_list,\n\n bus,\n\n loader\n\n }\n\n }\n\n\n\n /// Download a file\n\n pub fn download(&self, id: usize) {\n\n let this = self.clone();\n\n\n\n // new thread for the download\n\n thread::spawn(move || {\n\n if let Err(_e) = this.internal_download(id) {\n\n this.d_list.set_status(id, &FileStatus::Unknown).unwrap();\n\n }\n\n });\n\n }\n\n\n\n /// Check the status of a file\n", "file_path": "src/loader/mod.rs", "rank": 83, "score": 18.821778311225746 }, { "content": "\n\n\n\n/// The `Downloader` manages the actual downloads of files throgh different loader implementations.\n\n#[derive(Clone)]\n\npub struct Downloader {\n\n config: Config,\n\n d_list: SmartDownloadList,\n\n bus: MessageBus,\n\n loader: Arc<Vec<Box<Loader+Sync+Send>>>\n\n}\n\n\n\nimpl Downloader {\n\n /// Create a new Downloader\n\n pub fn new(config: Config, d_list: SmartDownloadList, bus: MessageBus) -> Downloader {\n\n let loader = Arc::new(vec!(\n\n Box::new(ShareOnline::new(config.clone(), d_list.clone(), bus.clone())) as Box<Loader+Sync+Send>,\n\n Box::new(Filer::new(config.clone(), d_list.clone(), bus.clone())) as Box<Loader+Sync+Send>,\n\n ));\n\n\n\n Downloader {\n", "file_path": "src/loader/mod.rs", "rank": 84, "score": 18.66098798742293 }, { "content": " );\n\n let mut resp = reqwest::get(&req)?;\n\n\n\n // only continue if the answer was successfull\n\n if resp.status() != reqwest::StatusCode::OK {\n\n bail!(\"Share-online login failed, some connection error occurred\");\n\n }\n\n\n\n // get the body and split the data\n\n let body = resp.text()?;\n\n let res: Vec<&str> = body.trim().split(\";\").collect();\n\n\n\n // extract the data and save it as a DownloadFile\n\n let mut file = DownloadFile::default();\n\n file.hoster = FileHoster::ShareOnline;\n\n file.file_id = res.get(0).ok_or(\"Can't get file id\")?.to_string();\n\n file.url = format!(\"https://www.share-online.biz/dl/{}\", file.file_id);\n\n file.status = if res.get(1).ok_or(\"Can't get file status\")? == &\"OK\" {\n\n FileStatus::Online\n\n } else {\n", "file_path": "src/loader/so.rs", "rank": 85, "score": 18.591113475701654 }, { "content": "}\n\n\n\nimpl ShareOnline {\n\n /// Create a new Share-Online downlaoder\n\n pub fn new(config: Config, d_list: SmartDownloadList, bus: MessageBus) -> ShareOnline {\n\n ShareOnline {\n\n config,\n\n d_list,\n\n bus,\n\n }\n\n }\n\n\n\n /// Share-Online premium login\n\n fn login(&self, acc: crate::config::ConfigAccount) -> Result<(String, String)> {\n\n // download the user data\n\n let login_url = format!(\n\n \"https://api.share-online.biz/account.php?username={}&password={}&act=userDetails\",\n\n acc.username, acc.password\n\n );\n\n let mut resp = reqwest::get(&login_url)?;\n", "file_path": "src/loader/so.rs", "rank": 86, "score": 18.586154574683594 }, { "content": " username: data.user.clone(),\n\n password: data.password.clone(),\n\n status: ConfigAccountStatus::default(),\n\n checked: ::std::time::SystemTime::now(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]\n\npub enum ConfigAccountStatus {\n\n Unknown,\n\n NotValid,\n\n Free,\n\n Premium\n\n}\n\n\n\nimpl Default for ConfigAccountStatus {\n\n fn default() -> ConfigAccountStatus {\n\n ConfigAccountStatus::Unknown\n\n }\n", "file_path": "src/config.rs", "rank": 88, "score": 17.763810677376267 }, { "content": " \n\n if let Some(so) = accs.as_ref().get(\"share-online.biz\") {\n\n for a in so {\n\n self.add_account(a.into())?;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]\n\npub struct ConfigData {\n\n pub server: ConfigServer,\n\n pub accounts: Vec<ConfigAccount>,\n\n}\n\n\n\nimpl ConfigData {\n\n fn from_config_file() -> Result<ConfigData> {\n\n // get config from file\n", "file_path": "src/config.rs", "rank": 89, "score": 17.670074380802312 }, { "content": " Ok(_) => {\n\n // return a new id when a download id exists\n\n if !qeue.is_empty() {\n\n return Ok(qeue.get(0).ok_or(\"Id is not available anymore\")?.clone());\n\n }\n\n }\n\n Err(_) => {\n\n let dloads = self\n\n .d_list\n\n .files_status_hoster(FileStatus::Downloading, FileHoster::ShareOnline)?;\n\n\n\n // start a new free download when nothing is downloaded from so right now\n\n if dloads.len() == 0 && !qeue.is_empty() {\n\n return Ok(qeue.get(0).ok_or(\"Id is not available anymore\")?.clone());\n\n }\n\n }\n\n }\n\n\n\n bail!(\"No download id availablr for this hoster\");\n\n }\n", "file_path": "src/loader/so.rs", "rank": 91, "score": 16.96275654397167 }, { "content": "\n\n // try to get the chaptchar max 30 times\n\n for _i in 0..30 {\n\n // send a request\n\n sender.send(Message::CaptchaRequest(file.clone()))?;\n\n\n\n // try to get the info for 60 seconds\n\n let now = ::std::time::Instant::now();\n\n while now.elapsed() < ::std::time::Duration::from_secs(60) {\n\n match receiver.recv_timeout(::std::time::Duration::from_secs(5)) {\n\n // when value is received and matched reurn download channel\n\n Ok(v) => {\n\n // we need a captcha response or we continue\n\n if let Some(v) = v.get_captcha_response() {\n\n if v.id == file.id() && v.file_id == file.file_id {\n\n // wait the 30 seconf delay from ShareOnline\n\n ::std::thread::sleep(::std::time::Duration::from_secs(30));\n\n\n\n // create the download stream\n\n let resp = reqwest::Client::new().get(&v.url).send()?;\n", "file_path": "src/loader/so.rs", "rank": 93, "score": 16.594075812699575 }, { "content": " }\n\n\n\n /// Add one or multible links to the manager as a new download package\n\n pub fn add_links<S: Into<String>>(&self, name: S, urls: Vec<String>) -> Result<()> {\n\n // download the file info\n\n let f_infos = urls\n\n .into_iter()\n\n .map(|u| self.downloader.check(u))\n\n .filter(|u| u.is_ok())\n\n .map(|u| u.unwrap())\n\n .collect();\n\n\n\n // create a package for the file\n\n let dp = DownloadPackage::new(name.into(), f_infos);\n\n\n\n // add to links\n\n self.d_list.add_package(dp)\n\n }\n\n\n\n /// Add a new package to the download manager\n", "file_path": "src/lib.rs", "rank": 94, "score": 16.497594319226348 }, { "content": "//! The loader handles the different loader implementations and manage\n\n//! them for the `DownloadManager`.\n\n\n\npub mod so;\n\npub mod filer;\n\n\n\nuse crate::error::*;\n\nuse crate::models::{DownloadFile, FileStatus, SmartDownloadList};\n\nuse self::so::ShareOnline;\n\nuse self::filer::Filer;\n\nuse crate::config::Config;\n\nuse std::thread;\n\nuse std::sync::{Arc};\n\nuse std::io::Read;\n\nuse md5::{Md5, Digest};\n\nuse std::fs::File;\n\nuse std::io::Write;\n\nuse std::time::{Duration, Instant};\n\nuse crate::bus::{MessageBus, Message};\n\n\n\n\n\n/// This `Loader` defines which funtionalities are used to download a file from a source\n", "file_path": "src/loader/mod.rs", "rank": 95, "score": 16.37778216954548 }, { "content": " /// and send them to all receivers\n\n fn handle_msg(&self) -> Result<()> {\n\n let msg = self.receiver_internal.lock()?.recv()?;\n\n\n\n let mut senders = self.sender_internal.lock()?;\n\n for is in 0..senders.len() {\n\n if let Err(_) = senders.get(is).ok_or(\"Sender was't in list\")?.send(msg.clone()) {\n\n senders.remove(is);\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Create a channel to the bus to send messages to all receivers\n\n /// and a receiver to get all messages\n\n pub fn channel(&self) -> Result<(Sender<Message>, Receiver<Message>)> {\n\n let (sender, receiver) = channel();\n\n\n\n self.sender_internal.lock()?.push(sender);\n", "file_path": "src/bus.rs", "rank": 96, "score": 16.20472175279239 }, { "content": "#[derive(Clone)]\n\npub struct DownloadManager {\n\n config: Config,\n\n bus: MessageBus,\n\n d_list: SmartDownloadList,\n\n downloader: Downloader,\n\n}\n\n\n\nimpl DownloadManager {\n\n /// Create a new Download Manager based on a configuration\n\n pub fn new(config: Config) -> Result<DownloadManager> {\n\n let bus = MessageBus::new();\n\n let d_list = SmartDownloadList::new(&bus)?;\n\n\n\n Ok(DownloadManager {\n\n config: config.clone(),\n\n bus: bus.clone(),\n\n d_list: d_list.clone(),\n\n downloader: Downloader::new(config, d_list, bus),\n\n })\n", "file_path": "src/lib.rs", "rank": 97, "score": 16.16868205586663 }, { "content": " pub fn check<S: Into<String>>(&self, link: S) -> Result<DownloadFile> {\n\n let link = link.into();\n\n\n\n // loop over all the loader\n\n for l in self.loader.iter() {\n\n // return the first valid file check info\n\n if let Ok(Some(f)) = l.check_url(&link) {\n\n return Ok(f);\n\n }\n\n }\n\n\n\n Err(Error::from(\"Can't identify file info\"))\n\n }\n\n\n\n /// Get the id of the next download, or an error\n\n pub fn get_next_download(&self) -> Result<usize> {\n\n for l in self.loader.iter() {\n\n if let Ok(id) = l.get_next_download() {\n\n return Ok(id)\n\n }\n", "file_path": "src/loader/mod.rs", "rank": 98, "score": 15.411153093652711 }, { "content": " .as_str()[12..],\n\n );\n\n\n\n Ok((key, expire_date))\n\n }\n\n\n\n /// Get the premium download url\n\n fn get_dload_url(&self, file: &DownloadFile) -> Result<String> {\n\n let acc = self.config.get().get_account(\n\n crate::config::ConfigHoster::ShareOnline,\n\n crate::config::ConfigAccountStatus::Premium,\n\n )?;\n\n\n\n // make the request call\n\n let info_url = format!(\n\n \"https://api.share-online.biz/account.php?username={}&password={}&act=download&lid={}\",\n\n acc.username, acc.password, file.file_id\n\n );\n\n let mut resp = reqwest::get(&info_url)?;\n\n\n", "file_path": "src/loader/so.rs", "rank": 99, "score": 15.38546676710143 } ]
Rust
src/database/meta.rs
eomain/ejdb.rs
43cf24608c5d970f3641d764124122a50438a6e5
use std::iter; use std::ops::Deref; use std::result; use std::slice; use std::str::FromStr; use bson::{Bson, Document, ValueAccessError}; use ejdb_sys; use super::Database; use ejdb_bson::EjdbBsonDocument; use Result; impl Database { pub fn get_metadata(&self) -> Result<DatabaseMetadata> { let doc = unsafe { ejdb_sys::ejdbmeta(self.0) }; if doc.is_null() { return self.last_error("cannot load metadata"); } else { let bson_doc = unsafe { try!(EjdbBsonDocument::from_ptr(doc).to_bson()) }; Ok(DatabaseMetadata(bson_doc)) } } } #[derive(Clone, PartialEq, Debug)] pub struct DatabaseMetadata(Document); impl DatabaseMetadata { #[inline] pub fn into_inner(self) -> Document { self.0 } pub fn file(&self) -> &str { self.0 .get_str("file") .expect("cannot get database file name") } pub fn collections(&self) -> Collections { self.0 .get_array("collections") .expect("cannot get collections metadata") .iter() .map(parse_collection_metadata) } } impl Deref for DatabaseMetadata { type Target = Document; #[inline] fn deref(&self) -> &Document { &self.0 } } pub type Collections<'a> = iter::Map<slice::Iter<'a, Bson>, for<'d> fn(&'d Bson) -> CollectionMetadata<'d>>; fn parse_collection_metadata(bson: &Bson) -> CollectionMetadata { match *bson { Bson::Document(ref doc) => CollectionMetadata(doc), ref something_else => panic!("invalid collections metadata: {}", something_else), } } #[derive(Clone, PartialEq, Debug)] pub struct CollectionMetadata<'a>(&'a Document); impl<'a> CollectionMetadata<'a> { pub fn name(&self) -> &str { self.0.get_str("name").expect("cannot get collection name") } pub fn file(&self) -> &str { self.0 .get_str("file") .expect("cannot get collection file name") } pub fn records(&self) -> u64 { self.0 .get_i64("records") .expect("cannot get collection records count") as u64 } fn options(&self) -> &Document { self.0 .get_document("options") .expect("cannot get collection options") } pub fn buckets(&self) -> u64 { self.options() .get_i64("buckets") .expect("cannot get collection buckets count") as u64 } pub fn cached_records(&self) -> u64 { self.options() .get_i64("cachedrecords") .expect("cannot get collection cached records count") as u64 } pub fn large(&self) -> bool { self.options() .get_bool("large") .expect("cannot get collection large flag") } pub fn compressed(&self) -> bool { self.options() .get_bool("compressed") .expect("cannot get collection compressed flag") } pub fn indices(&self) -> CollectionIndices { self.0 .get_array("indexes") .expect("cannot get collection indices array") .iter() .map(parse_index_metadata) } } impl<'a> Deref for CollectionMetadata<'a> { type Target = Document; #[inline] fn deref(&self) -> &Document { &*self.0 } } pub type CollectionIndices<'a> = iter::Map<slice::Iter<'a, Bson>, for<'d> fn(&'d Bson) -> IndexMetadata<'d>>; fn parse_index_metadata(bson: &Bson) -> IndexMetadata { match *bson { Bson::Document(ref doc) => IndexMetadata(doc), ref something_else => panic!("invalid index metadata: {}", something_else), } } #[derive(Clone, PartialEq, Debug)] pub struct IndexMetadata<'a>(&'a Document); impl<'a> IndexMetadata<'a> { pub fn field(&self) -> &str { self.0.get_str("field").expect("cannot get index field") } pub fn name(&self) -> &str { self.0.get_str("iname").expect("cannot get index name") } pub fn index_type(&self) -> IndexType { self.0 .get_str("type") .expect("cannot get index type") .parse() .expect("invalid index type") } pub fn records(&self) -> Option<u64> { match self.0.get_i64("records") { Ok(n) => Some(n as u64), Err(ValueAccessError::NotPresent) => None, Err(_) => panic!("cannot get index records count"), } } pub fn file(&self) -> Option<&str> { match self.0.get_str("file") { Ok(f) => Some(f), Err(ValueAccessError::NotPresent) => None, Err(_) => panic!("cannot get index file"), } } } impl<'a> Deref for IndexMetadata<'a> { type Target = Document; #[inline] fn deref(&self) -> &Document { &*self.0 } } #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub enum IndexType { Lexical, Decimal, Token, } impl FromStr for IndexType { type Err = String; fn from_str(s: &str) -> result::Result<IndexType, String> { match s { "lexical" => Ok(IndexType::Lexical), "decimal" => Ok(IndexType::Decimal), "token" => Ok(IndexType::Token), s => Err(s.into()), } } } #[test] #[ignore] fn test_metadata() { let db = Database::open("db/test").unwrap(); let meta = db.get_metadata().unwrap(); println!("{}", Bson::Document(meta.into_inner())); }
use std::iter; use std::ops::Deref; use std::result; use std::slice; use std::str::FromStr; use bson::{Bson, Document, ValueAccessError}; use ejdb_sys; use super::Database; use ejdb_bson::EjdbBsonDocument; use Result; impl Database { pub fn get_metadata(&self) -> Result<DatabaseMetadata> { let doc = unsafe { ejdb_sys::ejdbmeta(self.0) }; if doc.is_null() { return self.last_error("cannot load metadata"); } else { let bson_doc = unsafe { try!(EjdbBsonDocument::from_ptr(doc).to_bson()) }; Ok(DatabaseMetadata(bson_doc)) } } } #[derive(Clone, PartialEq, Debug)] pub struct DatabaseMetadata(Document); impl DatabaseMetadata { #[inline] pub fn into_inner(self) -> Document { self.0 } pub fn file(&self) -> &str { self.0 .get_str("file") .expect("cannot get database file name") } pub fn collections(&self) -> Collections { self.0 .get_array("collections") .expect("cannot get collections metadata") .iter() .map(parse_collection_metadata) } } impl Deref for DatabaseMetadata { type Target = Document; #[inline] fn deref(&self) -> &Document { &self.0 } } pub type Collections<'a> = iter::Map<slice::Iter<'a, Bson>, for<'d> fn(&'d Bson) -> CollectionMetadata<'d>>; fn parse_collection_metadata(bson: &Bson) -> CollectionMetadata { match *bson { Bson::Document(ref doc) => CollectionMetadata(doc), ref something_else => panic!("invalid collections metadata: {}", something_else), } } #[derive(Clone, PartialEq, Debug)] pub struct CollectionMetadata<'a>(&'a Document); impl<'a> CollectionMetadata<'a> { pub fn name(&self) -> &str { self.0.get_str("name").expect("cannot get collection name") } pub fn file(&self) -> &str { self.0 .get_str("file") .expect("cannot get collection file name") } pub fn records(&self) -> u64 { self.0 .get_i64("records") .expect("cannot get collection records count") as u64 } fn options(&self) -> &Document { self.0 .get_document("options") .expect("cannot get collection options") } pub fn buckets(&self) -> u64 { self.options() .get_i64("buckets") .expect("cannot get collection buckets count") as u64 } pub fn cached_records(&self) -> u64 { self.options() .get_i64("cachedrecords") .expect("cannot get collection cached records count") as u64 } pub fn large(&self) -> bool { self.options() .get_bool("large") .expect("cannot get collection large flag") } pub fn compressed(&self) -> bool { self.options() .get_bool("compressed") .expect("cannot get collection compressed flag") } pub fn indices(&self) -> CollectionIndices { self.0 .get_array("indexes") .expect("cannot get collection indices array") .iter() .map(parse_index_metadata) } } impl<'a> Deref for CollectionMetadata<'a> { type Target = Document; #[inline] fn deref(&self) -> &Document { &*self.0 } } pub type CollectionIndices<'a> = iter::Map<slice::Iter<'a, Bson>, for<'d> fn(&'d Bson) -> IndexMetadata<'d>>; fn parse_index_metadata(bson: &Bson) -> IndexMetadata { match *bson { Bson::Document(ref doc) => IndexMetadata(doc), ref something_else => panic!("invalid index metadata: {}", something_else), } } #[derive(Clone, PartialEq, Debug)] pub struct IndexMetadata<'a>(&'a Document); impl<'a> IndexMetadata<'a> { pub fn field(&self) -> &str { self.0.get_str("field").expect("cannot get index field") } pub fn name(&self) -> &str { self.0.get_str("iname").expect("cannot get index name") } pub fn index_type(&self) -> IndexType { self.0 .get_str("type") .expect("cannot get index type") .parse() .expect("invalid index type") } pub fn records(&self) -> Option<u64> {
} pub fn file(&self) -> Option<&str> { match self.0.get_str("file") { Ok(f) => Some(f), Err(ValueAccessError::NotPresent) => None, Err(_) => panic!("cannot get index file"), } } } impl<'a> Deref for IndexMetadata<'a> { type Target = Document; #[inline] fn deref(&self) -> &Document { &*self.0 } } #[derive(Copy, Clone, Eq, PartialEq, Debug)] pub enum IndexType { Lexical, Decimal, Token, } impl FromStr for IndexType { type Err = String; fn from_str(s: &str) -> result::Result<IndexType, String> { match s { "lexical" => Ok(IndexType::Lexical), "decimal" => Ok(IndexType::Decimal), "token" => Ok(IndexType::Token), s => Err(s.into()), } } } #[test] #[ignore] fn test_metadata() { let db = Database::open("db/test").unwrap(); let meta = db.get_metadata().unwrap(); println!("{}", Bson::Document(meta.into_inner())); }
match self.0.get_i64("records") { Ok(n) => Some(n as u64), Err(ValueAccessError::NotPresent) => None, Err(_) => panic!("cannot get index records count"), }
if_condition
[ { "content": "pub trait BsonNumber {\n\n fn to_bson(self) -> Bson;\n\n}\n\n\n\nimpl BsonNumber for f32 {\n\n #[inline]\n\n fn to_bson(self) -> Bson {\n\n Bson::FloatingPoint(self as f64)\n\n }\n\n}\n\n\n\nimpl BsonNumber for f64 {\n\n #[inline]\n\n fn to_bson(self) -> Bson {\n\n Bson::FloatingPoint(self)\n\n }\n\n}\n\n\n\nimpl BsonNumber for i32 {\n\n #[inline]\n", "file_path": "src/utils/bson.rs", "rank": 2, "score": 70308.91373659295 }, { "content": "#[test]\n\nfn test_save_load() {\n\n let (db, _dir) = make_db();\n\n\n\n let coll = db.collection(\"test\").unwrap();\n\n let ids = coll\n\n .save_all(vec![\n\n bson!{ \"name\" => \"Foo\", \"count\" => 123 },\n\n bson!{ \"name\" => \"Bar\", \"items\" => [1, \"hello\", 42.3] },\n\n bson!{ \"title\" => \"Baz\", \"subobj\" => { \"key\" => \"a\", \"xyz\" => 632 } },\n\n ]).unwrap();\n\n assert_eq!(ids.len(), 3);\n\n\n\n let item_1 = coll.load(&ids[0]).unwrap().unwrap();\n\n assert_eq!(\n\n item_1,\n\n bson! {\n\n \"_id\" => (ids[0].clone()),\n\n \"name\" => \"Foo\",\n\n \"count\" => 123\n\n }\n", "file_path": "tests/tests.rs", "rank": 3, "score": 63815.6334964818 }, { "content": "fn error_code_msg(code: i32) -> &'static str {\n\n unsafe {\n\n let msg = ejdb_sys::ejdberrmsg(code);\n\n let msg_cstr = CStr::from_ptr(msg);\n\n str::from_utf8_unchecked(msg_cstr.to_bytes())\n\n }\n\n}\n\n\n\nimpl Database {\n\n /// Opens the specified database with the provided open mode.\n\n ///\n\n /// The `path` argument may be anything convertible to a vector of bytes. Strings, string\n\n /// slices, bytes, bytes slices will all do.\n\n ///\n\n /// See also `DatabaseOpenMode::open()` method for a possibly more convenient alternative.\n\n ///\n\n /// # Failures\n\n ///\n\n /// Returns an error when the database can't be accessed, or if `path` contains zero bytes\n\n /// and probably in other cases when EJDB library can't open the database.\n", "file_path": "src/database/mod.rs", "rank": 5, "score": 56026.79876993485 }, { "content": "struct OidHexDisplay(oid::ObjectId);\n\n\n\nimpl fmt::Display for OidHexDisplay {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n static CHARS: &'static [u8] = b\"0123456789abcdef\";\n\n for &byte in &self.0.bytes() {\n\n try!(write!(\n\n f,\n\n \"{}{}\",\n\n CHARS[(byte >> 4) as usize] as char,\n\n CHARS[(byte & 0xf) as usize] as char\n\n ));\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl fmt::Display for PartialSave {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if self.successful_ids.is_empty() {\n", "file_path": "src/types.rs", "rank": 6, "score": 55037.27995269018 }, { "content": "fn to_u(arr: [i8; 12]) -> [u8; 12] {\n\n let mut result: [u8; 12] = [0; 12];\n\n for i in 0..arr.len() {\n\n result[i] = arr[i] as u8;\n\n }\n\n return result;\n\n}\n", "file_path": "src/ejdb_bson.rs", "rank": 7, "score": 54341.47111355481 }, { "content": "fn to_i(arr: [u8; 12]) -> [i8; 12] {\n\n let mut result: [i8; 12] = [0; 12];\n\n for i in 0..arr.len() {\n\n result[i] = arr[i] as i8;\n\n }\n\n return result;\n\n}\n\n\n", "file_path": "src/ejdb_bson.rs", "rank": 8, "score": 54341.47111355481 }, { "content": "#[test]\n\nfn test_meta() {\n\n let (db, dir) = make_db();\n\n\n\n db.collection(\"test_1\")\n\n .unwrap()\n\n .save_all(vec![\n\n bson!{ \"name\" => \"Foo\", \"count\" => 123 },\n\n bson!{ \"name\" => \"Bar\", \"whatever\" => [\"a\", 1, 42.3] },\n\n ]).unwrap();\n\n\n\n let options = CollectionOptions::default()\n\n .compressed(true)\n\n .cached_records(512);\n\n let coll_2 = options.get_or_create(&db, \"test_2\").unwrap();\n\n coll_2.index(\"name\").string(true).set().unwrap();\n\n coll_2.index(\"count\").number().set().unwrap();\n\n\n\n let meta = db.get_metadata().unwrap();\n\n\n\n assert_eq!(meta.file(), format!(\"{}/db\", dir.path().display()));\n", "file_path": "tests/tests.rs", "rank": 9, "score": 39823.68081410865 }, { "content": "fn main() {\n\n pkg_config::Config::new().probe(\"zlib\").unwrap();\n\n\n\n let dst = Config::new(\"ejdb-upstream\")\n\n .cflag(\"-w\")\n\n .profile(\"Release\")\n\n .define(\"BUILD_SAMPLES\", \"OFF\")\n\n .define(\"BUILD_SHARED_LIBS\", \"OFF\")\n\n .build();\n\n\n\n Command::new(\"make\").status().expect(\"failed to make!\");\n\n\n\n println!(\n\n \"cargo:rustc-link-search=native={}\",\n\n dst.join(\"lib\").display()\n\n );\n\n println!(\n\n \"cargo:rustc-link-search=native={}\",\n\n dst.join(\"lib64\").display()\n\n );\n", "file_path": "ejdb-sys/build.rs", "rank": 10, "score": 39823.68081410865 }, { "content": "#[test]\n\nfn test_query() {\n\n let (db, _dir) = make_db();\n\n\n\n let coll = db.collection(\"test\").unwrap();\n\n\n\n let ids = coll\n\n .save_all(vec![\n\n bson!{ \"name\" => \"Foo\", \"count\" => 123 },\n\n bson!{ \"name\" => \"Foo Foo\", \"count\" => 345 },\n\n bson!{ \"name\" => \"Foo Bar\", \"count\" => 23 },\n\n bson!{ \"name\" => \"Bar\", \"items\" => [1, \"hello\", 42.3] },\n\n bson!{ \"title\" => \"Baz\", \"subobj\" => { \"key\" => \"a\", \"xyz\" => 632 } },\n\n ]).unwrap();\n\n\n\n let n_foo = coll\n\n .query(\n\n Q.field(\"name\").eq((\"Foo\".to_owned(), \"\".to_owned())),\n\n QH.empty(),\n\n ).count()\n\n .unwrap();\n", "file_path": "tests/tests.rs", "rank": 11, "score": 39823.68081410865 }, { "content": "#[test]\n\n#[ignore]\n\nfn test_find() {\n\n use query::{Q, QH};\n\n\n\n let db = Database::open(\"/tmp/test_database\").unwrap();\n\n let coll = db.collection(\"example_collection\").unwrap();\n\n\n\n let items = (0..10).map(|i| {\n\n bson! {\n\n \"name\" => (format!(\"Me #{}\", i)),\n\n \"age\" => (23.8 + i as f64)\n\n }\n\n });\n\n coll.save_all(items).unwrap();\n\n\n\n let q = Q.field(\"age\").gte(25);\n\n\n\n for item in coll.query(&q, QH.empty()).find().unwrap() {\n\n println!(\"{}\", item.unwrap());\n\n }\n\n\n\n let count = coll.query(&q, QH.empty()).count().unwrap();\n\n println!(\"Count: {}\", count);\n\n\n\n let one = coll.query(&q, QH.empty()).find_one().unwrap();\n\n println!(\"One: {}\", one.unwrap());\n\n}\n", "file_path": "src/database/mod.rs", "rank": 12, "score": 38367.90620074024 }, { "content": "#[test]\n\n#[ignore]\n\nfn test_save() {\n\n let db = Database::open(\"/tmp/test_database\").unwrap();\n\n let coll = db.collection(\"example_collection\").unwrap();\n\n\n\n coll.save(bson! {\n\n \"name\" => \"Me\",\n\n \"age\" => 23.8\n\n }).unwrap();\n\n}\n\n\n", "file_path": "src/database/mod.rs", "rank": 13, "score": 38367.90620074024 }, { "content": "fn make_db() -> (Database, TempDir) {\n\n let dir = TempDir::new(\"ejdb\").expect(\"cannot create temporary directory\");\n\n let db =\n\n Database::open(dir.path().join(\"db\").to_str().unwrap()).expect(\"cannot create database\");\n\n (db, dir)\n\n}\n", "file_path": "tests/tests.rs", "rank": 14, "score": 32151.990548211594 }, { "content": "//! Various common types.\n\n\n\nuse std::borrow::Cow;\n\nuse std::error;\n\nuse std::fmt;\n\nuse std::io;\n\nuse std::result;\n\n\n\nuse bson::{self, oid};\n\nuse itertools::Itertools;\n\n\n\n/// The default result type used in this library.\n\npub type Result<T> = result::Result<T, Error>;\n\n\n\n/// A partial save error returned by `Collection::save_all()` method.\n\n#[derive(Debug)]\n\npub struct PartialSave {\n\n /// The actual cause of the partial save error.\n\n pub cause: Box<Error>,\n\n /// A vector of object ids which have been inserted successfully.\n", "file_path": "src/types.rs", "rank": 15, "score": 30321.361484148543 }, { "content": " pub successful_ids: Vec<oid::ObjectId>,\n\n}\n\n\n\nimpl error::Error for PartialSave {\n\n fn description(&self) -> &str {\n\n \"save operation completed partially\"\n\n }\n\n fn cause(&self) -> Option<&error::Error> {\n\n Some(&*self.cause)\n\n }\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 16, "score": 30316.2041014368 }, { "content": " write!(f, \"saved nothing due to an error: {}\", self.cause)\n\n } else {\n\n write!(\n\n f,\n\n \"only saved objects with ids: [{}] due to an error: {}\",\n\n self.successful_ids\n\n .iter()\n\n .cloned()\n\n .map(OidHexDisplay)\n\n .join(\", \"),\n\n self.cause\n\n )\n\n }\n\n }\n\n}\n\n\n\nquick_error! {\n\n /// The main error type used in the library.\n\n #[derive(Debug)]\n\n pub enum Error {\n", "file_path": "src/types.rs", "rank": 17, "score": 30315.130932809225 }, { "content": " }\n\n /// Partial save error returned by `Collection::save_all()` method.\n\n PartialSave(err: PartialSave) {\n\n from()\n\n description(\"partial save\")\n\n display(\"partial save: {}\", err)\n\n cause(&*err.cause)\n\n }\n\n /// Some other error.\n\n Other(msg: Cow<'static, str>) {\n\n description(&*msg)\n\n display(\"{}\", msg)\n\n from(s: &'static str) -> (s.into())\n\n from(s: String) -> (s.into())\n\n }\n\n }\n\n}\n", "file_path": "src/types.rs", "rank": 18, "score": 30311.002465685964 }, { "content": " /// I/O error.\n\n Io(err: io::Error) {\n\n from()\n\n description(\"I/O error\")\n\n display(\"I/O error: {}\", err)\n\n cause(err)\n\n }\n\n /// BSON encoding error (when converting Rust BSON representation to the EJDB one).\n\n BsonEncoding(err: bson::EncoderError) {\n\n from()\n\n description(\"BSON encoding error\")\n\n display(\"BSON encoding error: {}\", err)\n\n cause(err)\n\n }\n\n /// BSON decoding error (when converting to Rust BSON representation from the EJDB one).\n\n BsonDecoding(err: bson::DecoderError) {\n\n from()\n\n description(\"BSON decoding error\")\n\n display(\"BSON decoding error: {}\", err)\n\n cause(err)\n", "file_path": "src/types.rs", "rank": 19, "score": 30309.312185050352 }, { "content": "/// # use ejdb::Database;\n\n/// let db = Database::open(\"/path/to/db\").unwrap();\n\n/// let coll = db.collection(\"some_collection\").unwrap();\n\n///\n\n/// // create a string index on `name` field\n\n/// coll.index(\"name\").string(true).set().unwrap();\n\n///\n\n/// // create multiple indices on `coords` field\n\n/// coll.index(\"coords\").number().array().set().unwrap();\n\n/// ```\n\npub struct Index<'coll, 'db: 'coll> {\n\n coll: &'coll Collection<'db>,\n\n key: String,\n\n flags: Option<c_uint>,\n\n}\n\n\n\nimpl<'coll, 'db: 'coll> Index<'coll, 'db> {\n\n fn add_flags(self, flags: c_uint) -> Self {\n\n Index {\n\n coll: self.coll,\n", "file_path": "src/database/indices.rs", "rank": 20, "score": 28387.76636883096 }, { "content": " }\n\n\n\n /// Specifies that this index must be built over array values of this field.\n\n pub fn array(self) -> Self {\n\n self.add_flags(ejdb_sys::JBIDXARR)\n\n }\n\n\n\n /// Creates one or more indices of specified types on this field.\n\n ///\n\n /// Panics if no types were specified before calling this method.\n\n pub fn set(self) -> Result<()> {\n\n self.check_type().execute()\n\n }\n\n\n\n /// Drops all indices on this field.\n\n pub fn drop_all(mut self) -> Result<()> {\n\n self.flags = Some(ejdb_sys::JBIDXDROPALL);\n\n self.execute()\n\n }\n\n\n", "file_path": "src/database/indices.rs", "rank": 21, "score": 28384.985450666933 }, { "content": "use std::ffi::CString;\n\n\n\nuse libc::{c_int, c_uint};\n\n\n\nuse ejdb_sys;\n\n\n\nuse super::Collection;\n\nuse Result;\n\n\n\nimpl<'db> Collection<'db> {\n\n /// Returns an index builder for the provided field in this collection.\n\n ///\n\n /// The index builder may be used to create, modify or delete various indices on\n\n /// collection fields. A field may have more than one index of different types.\n\n /// This may be useful if this field is heterogeneous, i.e. if it may hold different types\n\n /// of data.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n", "file_path": "src/database/indices.rs", "rank": 22, "score": 28383.6028055258 }, { "content": "/// In EJDB every collection can have an index on the fields of its records. Indices can be\n\n/// of one of three types:\n\n///\n\n/// * string (possibly case insensitive);\n\n/// * number;\n\n/// * array.\n\n///\n\n/// Indices can be set, dropped, optimized or rebuilt. Indices are stored in a separate file\n\n/// from their collections and can speed up certain access patterns in queries. Naturally,\n\n/// indices are specified for some field in collection records, so this structure is used to\n\n/// configure indices for one specific field.\n\n///\n\n/// Index manipulation is done with this structure which provides a builder-like interface\n\n/// to create, change properties or drop an index on one field. Since an index can't exist\n\n/// separately from a collection, this structure is linked via a lifetime to its corresponding\n\n/// collection object. An instance of this structure is obtained with `Collection::index()` method.\n\n///\n\n/// # Example\n\n///\n\n/// ```no_run\n", "file_path": "src/database/indices.rs", "rank": 23, "score": 28380.998523574228 }, { "content": " /// # use ejdb::Database;\n\n /// let db = Database::open(\"/path/to/db\").unwrap();\n\n /// let coll = db.collection(\"some_collection\").unwrap();\n\n ///\n\n /// // create a case sensitive string index on \"name\" field in this collection\n\n /// coll.index(\"name\").string(true).set().unwrap();\n\n /// ```\n\n ///\n\n /// See builder methods for more examples.\n\n pub fn index<S: Into<String>>(&self, key: S) -> Index {\n\n Index {\n\n coll: self,\n\n key: key.into(),\n\n flags: None,\n\n }\n\n }\n\n}\n\n\n\n/// A builder for an operation on an index of a certain field of an EJDB collection.\n\n///\n", "file_path": "src/database/indices.rs", "rank": 24, "score": 28380.91498593337 }, { "content": " /// Drops indices of the previously specified types on this field.\n\n ///\n\n /// Panics if no type has been set prior to calling this method.\n\n pub fn drop(self) -> Result<()> {\n\n self.add_flags(ejdb_sys::JBIDXDROP).check_type().execute()\n\n }\n\n\n\n /// Rebuilds indices of the previously specified types on this field from scratch.\n\n ///\n\n /// Panics if no type has been set prior to calling this method.\n\n pub fn rebuild(self) -> Result<()> {\n\n self.add_flags(ejdb_sys::JBIDXREBLD).check_type().execute()\n\n }\n\n\n\n /// Optimizes indices of the previously specified types on this field.\n\n ///\n\n /// Panics if no type has been set prior to calling this method.\n\n pub fn optimize(self) -> Result<()> {\n\n self.add_flags(ejdb_sys::JBIDXOP).check_type().execute()\n\n }\n", "file_path": "src/database/indices.rs", "rank": 25, "score": 28380.470006276144 }, { "content": "\n\n fn check_type(self) -> Self {\n\n let flags = self.flags.expect(\"index type is not specified\");\n\n assert!(\n\n [\n\n ejdb_sys::JBIDXSTR,\n\n ejdb_sys::JBIDXISTR,\n\n ejdb_sys::JBIDXNUM,\n\n ejdb_sys::JBIDXARR\n\n ]\n\n .iter()\n\n .any(|&f| flags & f != 0),\n\n \"index type is not specified\"\n\n );\n\n self\n\n }\n\n\n\n fn execute(self) -> Result<()> {\n\n let flags = self.flags.expect(\"index flags are not defined\"); // should always unwrap\n\n let key = try!(CString::new(self.key).map_err(|_| \"invalid key\"));\n", "file_path": "src/database/indices.rs", "rank": 26, "score": 28379.149909199197 }, { "content": " key: self.key,\n\n flags: Some(self.flags.unwrap_or(0) | flags),\n\n }\n\n }\n\n\n\n /// Specifies that this index must be built over string values of this field.\n\n ///\n\n /// `case_sensitive` argument determines whether this index must take string case into account,\n\n /// `true` for case sensitive matching, `false` for the opposite.\n\n pub fn string(self, case_sensitive: bool) -> Self {\n\n self.add_flags(if case_sensitive {\n\n ejdb_sys::JBIDXSTR\n\n } else {\n\n ejdb_sys::JBIDXISTR\n\n })\n\n }\n\n\n\n /// Specifies that this index must be built over numeric values of this field.\n\n pub fn number(self) -> Self {\n\n self.add_flags(ejdb_sys::JBIDXNUM)\n", "file_path": "src/database/indices.rs", "rank": 27, "score": 28378.142315437923 }, { "content": " let result =\n\n unsafe { ejdb_sys::ejdbsetindex(self.coll.coll, key.as_ptr(), flags as c_int) };\n\n if result {\n\n Ok(())\n\n } else {\n\n self.coll.db.last_error(\"cannot update index\")\n\n }\n\n }\n\n}\n", "file_path": "src/database/indices.rs", "rank": 28, "score": 28377.295663884895 }, { "content": "//! Contains low-level utilities for conversion between Rust and EJDB BSON representations.\n\n//!\n\n//! This module is only public to facilitate direct usage of `ejdb-sys` library, if such\n\n//! need arises. The types provided here are useful for converting Rust BSON values\n\n//! to EJDB ones and vice versa.\n\n//!\n\n//! Types from this module should not be used unless absolutely necessary.\n\n\n\nuse std::slice;\n\n\n\nuse bson::oid;\n\nuse bson::{self, DecoderResult, Document, EncoderResult};\n\nuse ejdb_sys;\n\n\n\npub struct EjdbBsonDocument(*mut ejdb_sys::bson);\n\n\n\nimpl EjdbBsonDocument {\n\n pub fn empty() -> EjdbBsonDocument {\n\n unsafe {\n\n // TODO: check for alloc errors properly\n", "file_path": "src/ejdb_bson.rs", "rank": 29, "score": 28253.346360211355 }, { "content": " ))\n\n }\n\n }\n\n\n\n pub fn from_bson(bson: &Document) -> EncoderResult<EjdbBsonDocument> {\n\n let mut buffer = Vec::new();\n\n bson::encode_document(&mut buffer, bson).map(|_| EjdbBsonDocument::from_buffer(&buffer))\n\n }\n\n\n\n pub fn to_bson(&self) -> DecoderResult<Document> {\n\n let buf_ptr = unsafe { ejdb_sys::bson_data(self.0 as *const _) as *const u8 };\n\n let buf_size = unsafe { ejdb_sys::bson_size(self.0 as *const _) };\n\n\n\n let mut buf = unsafe { slice::from_raw_parts(buf_ptr, buf_size as usize) };\n\n bson::decode_document(&mut buf)\n\n }\n\n\n\n #[inline]\n\n pub fn as_raw(&self) -> *const ejdb_sys::bson {\n\n self.0 as *const _\n", "file_path": "src/ejdb_bson.rs", "rank": 30, "score": 28250.68713315869 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn as_raw_mut(&mut self) -> *mut ejdb_sys::bson {\n\n self.0 as *mut _\n\n }\n\n}\n\n\n\nimpl Drop for EjdbBsonDocument {\n\n fn drop(&mut self) {\n\n unsafe {\n\n ejdb_sys::bson_del(self.0);\n\n }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct EjdbObjectId(ejdb_sys::bson_oid_t);\n\n\n\nimpl EjdbObjectId {\n", "file_path": "src/ejdb_bson.rs", "rank": 31, "score": 28249.298826793667 }, { "content": " let bson_ptr = ejdb_sys::bson_create();\n\n if bson_ptr.is_null() {\n\n panic!(\"Cannot allocate new BSON document\");\n\n }\n\n ejdb_sys::bson_init(bson_ptr);\n\n EjdbBsonDocument::from_ptr(bson_ptr)\n\n }\n\n }\n\n\n\n #[inline]\n\n pub unsafe fn from_ptr(ptr: *mut ejdb_sys::bson) -> EjdbBsonDocument {\n\n EjdbBsonDocument(ptr)\n\n }\n\n\n\n #[inline]\n\n pub fn from_buffer(buf: &[u8]) -> EjdbBsonDocument {\n\n unsafe {\n\n EjdbBsonDocument(ejdb_sys::bson_create_from_buffer(\n\n buf.as_ptr() as *const _,\n\n buf.len() as i32,\n", "file_path": "src/ejdb_bson.rs", "rank": 32, "score": 28248.162790124417 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn to_ejdb(self) -> ejdb_sys::bson_oid_t {\n\n self.0\n\n }\n\n\n\n #[inline]\n\n pub fn as_raw(&self) -> *const ejdb_sys::bson_oid_t {\n\n &self.0\n\n }\n\n\n\n #[inline]\n\n pub fn as_raw_mut(&mut self) -> *mut ejdb_sys::bson_oid_t {\n\n &mut self.0\n\n }\n\n}\n\n\n\nimpl From<ejdb_sys::bson_oid_t> for EjdbObjectId {\n\n #[inline]\n", "file_path": "src/ejdb_bson.rs", "rank": 33, "score": 28246.03183268051 }, { "content": " #[inline]\n\n pub fn empty() -> EjdbObjectId {\n\n let empty_arr: [i8; 12] = [0; 12];\n\n EjdbObjectId(ejdb_sys::bson_oid_t { bytes: empty_arr })\n\n }\n\n\n\n #[inline]\n\n pub fn to_rust(self) -> oid::ObjectId {\n\n let bytes: [i8; 12];\n\n unsafe {\n\n bytes = (self.0).bytes;\n\n }\n\n oid::ObjectId::with_bytes(to_u(bytes))\n\n }\n\n\n\n #[inline]\n\n pub fn from_rust(oid: oid::ObjectId) -> EjdbObjectId {\n\n EjdbObjectId(ejdb_sys::bson_oid_t {\n\n bytes: to_i(oid.bytes()),\n\n })\n", "file_path": "src/ejdb_bson.rs", "rank": 34, "score": 28243.49711971553 }, { "content": " fn to_bson(self) -> Bson {\n\n Bson::I32(self)\n\n }\n\n}\n\n\n\nimpl BsonNumber for i64 {\n\n #[inline]\n\n fn to_bson(self) -> Bson {\n\n Bson::I64(self)\n\n }\n\n}\n", "file_path": "src/utils/bson.rs", "rank": 35, "score": 28243.24082788078 }, { "content": " fn from(oid: ejdb_sys::bson_oid_t) -> EjdbObjectId {\n\n EjdbObjectId(oid)\n\n }\n\n}\n\n\n\nimpl From<oid::ObjectId> for EjdbObjectId {\n\n #[inline]\n\n fn from(oid: oid::ObjectId) -> EjdbObjectId {\n\n EjdbObjectId::from_rust(oid)\n\n }\n\n}\n\n\n\nimpl Into<ejdb_sys::bson_oid_t> for EjdbObjectId {\n\n #[inline]\n\n fn into(self) -> ejdb_sys::bson_oid_t {\n\n self.to_ejdb()\n\n }\n\n}\n\n\n\nimpl Into<oid::ObjectId> for EjdbObjectId {\n\n #[inline]\n\n fn into(self) -> oid::ObjectId {\n\n self.to_rust()\n\n }\n\n}\n\n\n", "file_path": "src/ejdb_bson.rs", "rank": 36, "score": 28242.13655467962 }, { "content": "use bson::Bson;\n\n\n", "file_path": "src/utils/bson.rs", "rank": 37, "score": 28240.213528799068 }, { "content": "#[inline]\n\nfn last_error_code(ejdb: *mut ejdb_sys::EJDB) -> i32 {\n\n unsafe { ejdb_sys::ejdbecode(ejdb) }\n\n}\n\n\n", "file_path": "src/database/mod.rs", "rank": 38, "score": 24847.92427737598 }, { "content": "enum FieldConstraintData {\n\n Root(Query),\n\n Child(Box<FieldConstraint>),\n\n}\n\n\n\n/// A transient builder for adding field-based query constraints.\n\n///\n\n/// Instances of this structure are returned by `Query::field()` and `FieldConstraint::field()`\n\n/// methods.\n\npub struct FieldConstraint(Cow<'static, str>, FieldConstraintData);\n\n\n\nimpl FieldConstraint {\n\n fn process<T: Into<Bson>>(self, value: T) -> Query {\n\n match self.1 {\n\n FieldConstraintData::Root(mut q) => match value.into() {\n\n Bson::Document(doc) => q.merge_documents_at_key(self.0.into_owned(), doc),\n\n value => {\n\n q.query.insert(self.0.into_owned(), value);\n\n q\n\n }\n", "file_path": "src/database/query.rs", "rank": 39, "score": 24127.18987963154 }, { "content": "ejdb.rs, high-level bindings for Embedded JSON Database engine\n\n==============================================================\n\n\n\n### Unmaintained\n\n\n\nI no longer have a capacity to maintain this project. Feel free to reach out if you want to continue its development and take its name on crates.io.\n\n\n\n---\n\n\n\n![Maintenance](https://img.shields.io/badge/maintentance-looking--for--maintainers-yellow?style=flat-square) [![Build Status][travis]](https://travis-ci.org/netvl/ejdb.rs) [![crates.io][crates]](https://crates.io/crates/ejdb)\n\n\n\n [travis]: https://img.shields.io/travis/netvl/ejdb.rs.svg?style=flat-square\n\n [crates]: https://img.shields.io/crates/v/ejdb.svg?style=flat-square\n\n\n\n[Documentation](https://netvl.github.io/ejdb.rs/)\n\n\n\nThis library provides high-level bindings to [EJDB], an Embedded JSON Database engine.\n\n\n\nEJDB is a document-oriented NoSQL embedded database, very similar to MongoDB. It allows storing,\n\nquerying and manipulation of collections of BSON documents. It has MongoDB-like query language,\n\ncollection-level transactions and typed indices.\n\n\n\nThis library attempts to provide idiomatic and safe Rust bindings to EJDB. It exposes all\n\nmain features of EJDB: databases, collections, queries, transactions, indices and metadata.\n\n\n\nSee crate documentation for usage examples.\n\n\n\n [EJDB]: http://ejdb.org/\n\n\n\n## Usage\n\n\n\nAdd a dependency in your `Cargo.toml`:\n\n\n\n```toml\n\n[dependencies]\n\nejdb = \"0.4\"\n\n```\n\n\n\nTo build the library, you need to have `cmake` installaled along with `gcc` and `clang`. \n\n\n\n## Changelog\n\n\n\n### Version 0.4.0\n\n\n\n* Switched to manual compilation and linking of ejdb statically.\n\n\n\n### Version 0.3.0\n\n\n\n* Bumped the `bson` dependency version.\n\n\n\n### Version 0.2.0\n\n\n\n* Bumped versions of various dependencies.\n\n\n\n### Version 0.1.2\n\n\n\n* Improved `bson!` macro to support optional values.\n\n\n\n### Version 0.1.1\n\n\n\n* Made `Database` implement `Send`.\n\n\n\n### Version 0.1.0\n\n\n\n* Initial release.\n\n\n\n## License\n\n\n\nThis library is provided under MIT license.\n", "file_path": "Readme.md", "rank": 40, "score": 17691.11016376475 }, { "content": "# Native bindings for libejdb\n\n\n\nThis crate provides low-level bindings to libejdb. It is used as a basis for ejdb.rs, high-level\n\nbindings for EJDB.\n\n\n\n## Usage\n\n\n\nAdd a dependency in your `Cargo.toml`:\n\n\n\n```toml\n\n[dependencies]\n\nejdb-sys = \"0.3\"\n\n```\n\nTo compile you need to have `cmake` installaled along with `gcc` and `clang`. \n\nIn runtime you need to have `gzlib` installed and available through pkg-config (almost all distros have it preinstalled).\n\n\n\n\n\nNote, however, that you usually don't need to depend on this crate directly; use `ejdb`\n\nlibrary instead. Therefore, no compatibility guarantees are given.\n\n\n\n## License\n\n\n\nThis library is provided under MIT license.\n", "file_path": "ejdb-sys/Readme.md", "rank": 41, "score": 16432.508772058605 }, { "content": "\n\nimpl CollectionOptions {\n\n pub fn large(mut self, large: bool) -> CollectionOptions {\n\n self.large = large;\n\n self\n\n }\n\n\n\n pub fn compressed(mut self, compressed: bool) -> CollectionOptions {\n\n self.compressed = compressed;\n\n self\n\n }\n\n\n\n pub fn records(mut self, records: i64) -> CollectionOptions {\n\n self.records = records;\n\n self\n\n }\n\n\n\n pub fn cached_records(mut self, cached_records: i32) -> CollectionOptions {\n\n self.cached_records = cached_records;\n\n self\n", "file_path": "src/database/mod.rs", "rank": 49, "score": 32.88749593263007 }, { "content": "///\n\n/// ```no_run\n\n/// # use ejdb::CollectionOptions;\n\n/// let options = CollectionOptions::default()\n\n/// .large(true)\n\n/// .compressed(true)\n\n/// .records(1_024_000)\n\n/// .cached_records(1024);\n\n/// ```\n\n#[derive(Clone, Eq, PartialEq, Debug)]\n\npub struct CollectionOptions {\n\n /// Make the collection \"large\", i.e. able to hold more than 2GB of data. Default is false.\n\n pub large: bool,\n\n /// Compress records in the collection with DEFLATE. Default is false.\n\n pub compressed: bool,\n\n /// Expected number of records in the collection. Default is 128 000.\n\n pub records: i64,\n\n /// Maximum number of records cached in memory. Default is 0.\n\n pub cached_records: i32,\n\n}\n", "file_path": "src/database/mod.rs", "rank": 50, "score": 30.702918742565338 }, { "content": " /// // work with the collection\n\n /// ```\n\n pub fn get_or_create<S: Into<Vec<u8>>>(self, db: &Database, name: S) -> Result<Collection> {\n\n db.collection_with_options(name, self)\n\n }\n\n}\n\n\n\nimpl Default for CollectionOptions {\n\n fn default() -> CollectionOptions {\n\n CollectionOptions {\n\n large: false,\n\n compressed: false,\n\n records: 128_000,\n\n cached_records: 0,\n\n }\n\n }\n\n}\n\n\n\n/// A handle to an EJDB collection.\n\n///\n", "file_path": "src/database/mod.rs", "rank": 51, "score": 30.684237154868924 }, { "content": " /// // work with the collection\n\n /// ```\n\n pub fn collection_with_options<S: Into<Vec<u8>>>(\n\n &self,\n\n name: S,\n\n options: CollectionOptions,\n\n ) -> Result<Collection> {\n\n let p = try!(CString::new(name).map_err(|_| \"invalid collection name\"));\n\n let mut ejcollopts = ejdb_sys::EJCOLLOPTS {\n\n large: options.large,\n\n compressed: options.compressed,\n\n records: options.records,\n\n cachedrecords: options.cached_records as c_int,\n\n };\n\n let coll = unsafe { ejdb_sys::ejdbcreatecoll(self.0, p.as_ptr(), &mut ejcollopts) };\n\n if coll.is_null() {\n\n self.last_error(\"cannot create or open a collection\")\n\n } else {\n\n Ok(Collection {\n\n coll: coll,\n", "file_path": "src/database/mod.rs", "rank": 55, "score": 26.949241337319908 }, { "content": "/// Objects of this structure are returned by `PreparedQuery::find()` method.\n\npub struct QueryResult {\n\n result: ejdb_sys::EJQRESULT,\n\n current: c_int,\n\n total: u32,\n\n}\n\n\n\nimpl QueryResult {\n\n /// Returns the number of records returned by the query.\n\n ///\n\n /// This iterator contains exactly `count()` elements.\n\n #[inline]\n\n pub fn count(&self) -> u32 {\n\n self.total\n\n }\n\n}\n\n\n\nimpl Drop for QueryResult {\n\n fn drop(&mut self) {\n\n unsafe {\n", "file_path": "src/database/mod.rs", "rank": 57, "score": 23.61740706539575 }, { "content": " assert_eq!(coll.compressed(), true);\n\n assert_eq!(coll.large(), false);\n\n\n\n let indices: Vec<_> = coll.indices().collect();\n\n assert_eq!(indices.len(), 2);\n\n\n\n for index in indices {\n\n match index.field() {\n\n \"name\" => {\n\n assert_eq!(index.name(), \"sname\");\n\n assert_eq!(\n\n index.file(),\n\n Some(&*format!(\n\n \"{}/db_test_2.idx.sname.lex\",\n\n dir.path().display()\n\n ))\n\n );\n\n assert_eq!(index.records(), Some(0));\n\n assert_eq!(index.index_type(), IndexType::Lexical);\n\n }\n", "file_path": "tests/tests.rs", "rank": 58, "score": 23.41543392916853 }, { "content": " /// use ejdb::query::{Q, QH};\n\n ///\n\n /// let db = Database::open(\"/path/to/db\").unwrap();\n\n /// let coll = db.collection(\"some_collection\").unwrap();\n\n /// match coll.query(Q.field(\"name\").eq(\"Foo\"), QH.empty()).find_one().unwrap() {\n\n /// Some(doc) => { /* `doc` is the first record with \"name\" field equal to \"Foo\" */ }\n\n /// None => { /* no document with \"name\" equal to \"Foo\" has been found */ }\n\n /// }\n\n /// ```\n\n pub fn find_one(self) -> Result<Option<bson::Document>> {\n\n self.execute(ejdb_sys::JBQRYFINDONE)\n\n .map(|(r, n)| QueryResult {\n\n result: r,\n\n current: 0,\n\n total: n,\n\n }).and_then(|qr| match qr.into_iter().next() {\n\n Some(r) => r.map(Some),\n\n None => Ok(None),\n\n })\n\n }\n", "file_path": "src/database/mod.rs", "rank": 59, "score": 22.98646199921292 }, { "content": " }\n\n\n\n /// Invokes `db.collection_with_options(name, options)` with this object as an argument.\n\n ///\n\n /// This is a convenience method which allows setting options and creating a collection\n\n /// in one go. Remember that if collection with the specified name already exists,\n\n /// it will be returned and options will be ignored.\n\n ///\n\n /// `name` argument can be of any type which is convertible to a vector of bytes, like\n\n /// string or byte slice.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// # use ejdb::{Database, CollectionOptions};\n\n /// let db = Database::open(\"/path/to/db\").unwrap();\n\n /// let coll = CollectionOptions::default()\n\n /// .large(true).compressed(true)\n\n /// .records(1_024_000).cached_records(1024)\n\n /// .get_or_create(&db, \"new_collection\").unwrap();\n", "file_path": "src/database/mod.rs", "rank": 60, "score": 22.06845758039273 }, { "content": " #[inline]\n\n fn into(self) -> Document {\n\n self.query\n\n }\n\n}\n\n\n\nimpl Deref for Query {\n\n type Target = Document;\n\n\n\n #[inline]\n\n fn deref(&self) -> &Document {\n\n self.as_bson()\n\n }\n\n}\n\n\n\nimpl DerefMut for Query {\n\n #[inline]\n\n fn deref_mut(&mut self) -> &mut Document {\n\n self.as_bson_mut()\n\n }\n\n}\n\n\n", "file_path": "src/database/query.rs", "rank": 61, "score": 21.800828448620962 }, { "content": " /// Returns a builder for setting inclusion/exclusion flag of the provided field.\n\n ///\n\n /// Corresponds to `$fields` hint in EJDB query syntax.\n\n #[inline]\n\n pub fn field<S: Into<String>>(self, field: S) -> QueryHintsField {\n\n QueryHintsField(self, field.into())\n\n }\n\n\n\n fn add_hint(&mut self, key: &str, subkey: String, value: i32) {\n\n if !self.hints.contains_key(key) {\n\n self.hints.insert(key, bson! { subkey => value });\n\n } else {\n\n match self.hints.get_mut(key) {\n\n Some(&mut Bson::Document(ref mut doc)) => {\n\n doc.insert(subkey, value);\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n }\n", "file_path": "src/database/query.rs", "rank": 62, "score": 21.66171758472924 }, { "content": "/// A reexport of `bson` crate used by this crate in public interface.\n\npub use bson_crate as bson;\n\n\n\npub use database::indices::Index;\n\npub use database::meta;\n\npub use database::open_mode::{self, DatabaseOpenMode};\n\npub use database::query;\n\npub use database::tx::Transaction;\n\npub use database::{Collection, CollectionOptions, Database, PreparedQuery, QueryResult};\n\npub use types::{Error, Result};\n\n\n\n#[macro_use]\n\nmod macros;\n\nmod database;\n\nmod utils;\n\n\n\npub mod ejdb_bson;\n\npub mod types;\n", "file_path": "src/lib.rs", "rank": 63, "score": 21.60486911303932 }, { "content": " db: self,\n\n })\n\n }\n\n }\n\n\n\n /// A shortcut for `Database::collection_with_options(&db, name, CollectionOptions::default())`.\n\n ///\n\n /// This method is used in most cases when access to a collection is needed.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// # use ejdb::Database;\n\n /// let db = Database::open(\"/path/to/db\").unwrap();\n\n /// let coll = db.collection(\"some_collection\").unwrap();\n\n /// // work with the collection\n\n /// ```\n\n #[inline]\n\n pub fn collection<S: Into<Vec<u8>>>(&self, name: S) -> Result<Collection> {\n\n CollectionOptions::default().get_or_create(self, name)\n", "file_path": "src/database/mod.rs", "rank": 64, "score": 21.58921500333348 }, { "content": " /// ```\n\n pub fn drop_collection<S: Into<Vec<u8>>>(&self, name: S, prune: bool) -> Result<()> {\n\n let p = try!(CString::new(name).map_err(|_| \"invalid collection name\"));\n\n if unsafe { ejdb_sys::ejdbrmcoll(self.0, p.as_ptr(), prune) } {\n\n Ok(())\n\n } else {\n\n self.last_error(\"cannot remove a collection\")\n\n }\n\n }\n\n}\n\n\n\n/// Represents a set of options of an EJDB collection.\n\n///\n\n/// Used when new collections are created. It is not possible to change options of a created\n\n/// collection.\n\n///\n\n/// This is a builder object, so you can chain method calls to set various options. Finally,\n\n/// you can create a collection with these options with `get_or_create()` method.\n\n///\n\n/// # Example\n", "file_path": "src/database/mod.rs", "rank": 65, "score": 21.26974487028784 }, { "content": " /// \"name\" => \"FooBar\",\n\n /// \"count\" => 12345\n\n /// }).unwrap();\n\n /// # }\n\n /// ```\n\n pub fn save<D: Borrow<bson::Document>>(&self, doc: D) -> Result<oid::ObjectId> {\n\n let mut ejdb_doc = try!(EjdbBsonDocument::from_bson(doc.borrow()));\n\n let mut out_id = EjdbObjectId::empty();\n\n\n\n if unsafe { ejdb_sys::ejdbsavebson(self.coll, ejdb_doc.as_raw_mut(), out_id.as_raw_mut()) }\n\n {\n\n Ok(out_id.into())\n\n } else {\n\n self.db.last_error(\"error saving BSON document\")\n\n }\n\n }\n\n\n\n /// Attempts to load a BSON document from this collection by its id.\n\n ///\n\n /// This is a convenient way to find a single object by its identifier without resorting\n", "file_path": "src/database/mod.rs", "rank": 66, "score": 20.950917248348834 }, { "content": "//! ```no_run\n\n//! # #[macro_use] extern crate ejdb;\n\n//! # use ejdb::Database;\n\n//! use ejdb::query::{Q, QH};\n\n//! use ejdb::bson;\n\n//! use ejdb::Result;\n\n//! # fn main() {\n\n//! # let db = Database::open(\"/path/to/db\").unwrap();\n\n//! # let coll = db.collection(\"some_collection\").unwrap();\n\n//!\n\n//! let n = coll.query(Q.field(\"name\").eq(\"Foo\").set(\"count\", 10), QH.empty()).update().unwrap();\n\n//! // `n` is the number of affected rows\n\n//!\n\n//! let names = [\"foo\", \"bar\", \"baz\"];\n\n//! let items = coll.query(Q.field(\"name\").contained_in(names.iter().cloned()), QH.max(12))\n\n//! .find().unwrap();\n\n//! // `items` is an iterator which contains at maximum 12 records whose `name`\n\n//! // field is either \"foo\", \"bar\" or \"baz\"\n\n//! let items: Result<Vec<bson::Document>> = items.collect(); // collect them into a vector\n\n//!\n", "file_path": "src/lib.rs", "rank": 67, "score": 20.88354869628901 }, { "content": " /// let db = Database::open(\"/path/to/db\").unwrap();\n\n /// let coll = db.collection(\"some_collection\").unwrap();\n\n /// let result = coll.query(Q.field(\"name\").eq(\"Foo\"), QH.empty()).find().unwrap();\n\n /// let items: Result<Vec<_>, _> = result.collect(); // collect all found records into a vector\n\n /// ```\n\n pub fn find(self) -> Result<QueryResult> {\n\n self.execute(0).map(|(r, n)| QueryResult {\n\n result: r,\n\n current: 0,\n\n total: n,\n\n })\n\n }\n\n\n\n fn execute(self, flags: u32) -> Result<(ejdb_sys::EJQRESULT, u32)> {\n\n let query = self.query.borrow().as_bson();\n\n let hints = self.hints.borrow().as_bson();\n\n\n\n let mut query_doc = Vec::new();\n\n try!(bson::encode_document(&mut query_doc, query));\n\n\n", "file_path": "src/database/mod.rs", "rank": 68, "score": 20.683431378725217 }, { "content": "//! with the default options. See `CollectionOptions` structure for more information about\n\n//! which options collections have.\n\n//!\n\n//! A collection may be used to perform queries, initiate transactions or save/load BSON\n\n//! documents by their identifiers directly, without using queries. Collection objects\n\n//! can also be used to manage indices.\n\n//!\n\n//! ## Saving/loading BSON documents\n\n//!\n\n//! You can use `Collection::save()` or `Collection::save_all()` methods to store BSON documents\n\n//! directly into the collection, and `Collection::load()` to load a document by its id:\n\n//!\n\n//! ```no_run\n\n//! # #[macro_use] extern crate ejdb;\n\n//! # use ejdb::Database;\n\n//! # fn main() {\n\n//! # let db = Database::open(\"/path/to/db\").unwrap();\n\n//! # let coll = db.collection(\"some_collection\").unwrap();\n\n//! let mut d = bson! {\n\n//! \"name\" => \"Foo Bar\",\n", "file_path": "src/lib.rs", "rank": 69, "score": 20.058595554631033 }, { "content": "\n\n let collections: Vec<_> = meta.collections().collect();\n\n assert_eq!(collections.len(), 2);\n\n\n\n for coll in collections {\n\n match coll.name() {\n\n \"test_1\" => {\n\n assert_eq!(coll.file(), format!(\"{}/db_test_1\", dir.path().display()));\n\n assert_eq!(coll.records(), 2);\n\n assert_eq!(coll.buckets(), 262139);\n\n assert_eq!(coll.cached_records(), 0);\n\n assert_eq!(coll.compressed(), false);\n\n assert_eq!(coll.large(), false);\n\n assert_eq!(coll.indices().len(), 0);\n\n }\n\n \"test_2\" => {\n\n assert_eq!(coll.file(), format!(\"{}/db_test_2\", dir.path().display()));\n\n assert_eq!(coll.records(), 0);\n\n assert_eq!(coll.buckets(), 262139);\n\n assert_eq!(coll.cached_records(), 512);\n", "file_path": "tests/tests.rs", "rank": 70, "score": 19.944237577241903 }, { "content": " ///\n\n /// 1. If this field holds an array of strings, `$stror` returns those records whose\n\n /// array contains at least one element from `values`.\n\n /// 2. If this field holds a string, it is first split into an array by space `' '` or comma\n\n /// `','` characters, and the resulting array is queried like in 1.\n\n pub fn str_or<I>(self, values: I) -> Query\n\n where\n\n I: IntoIterator,\n\n I::Item: Into<String>,\n\n {\n\n self.process(bson! {\n\n \"$stror\" => (\n\n values.into_iter().map(|v| v.into().into()) // S -> String -> Bson\n\n .collect::<Vec<Bson>>()\n\n )\n\n })\n\n }\n\n}\n\n\n\n/// An entry point for constructing queries.\n", "file_path": "src/database/query.rs", "rank": 71, "score": 19.734078203571045 }, { "content": " /// to queries. If the object with the specified id is present in the collection,\n\n /// returns it, otherwise returns `None`.\n\n ///\n\n /// # Failures\n\n ///\n\n /// Returns an error if there are problems in converting the document from EJDB BSON\n\n /// representation or if the corresponding EJDB operation can't be completed successfully.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// # use ejdb::Database;\n\n /// # use ejdb::bson::oid::ObjectId;\n\n /// let db = Database::open(\"/path/to/db\").unwrap();\n\n /// let coll = db.collection(\"some_collection\").unwrap();\n\n /// let value = coll.load(&ObjectId::with_string(\"1234567890abcdef0987feab\").unwrap()).unwrap();\n\n /// // value is ejdb::bson::Document\n\n /// ```\n\n pub fn load(&self, id: &oid::ObjectId) -> Result<Option<bson::Document>> {\n\n let ejdb_oid: EjdbObjectId = id.clone().into();\n", "file_path": "src/database/mod.rs", "rank": 72, "score": 19.650134212495878 }, { "content": " let result = unsafe { ejdb_sys::ejdbloadbson(self.coll, ejdb_oid.as_raw()) };\n\n if result.is_null() {\n\n if self.db.last_error_msg().is_none() {\n\n Ok(None)\n\n } else {\n\n self.db.last_error(\"error loading BSON document\")\n\n }\n\n } else {\n\n unsafe {\n\n EjdbBsonDocument::from_ptr(result)\n\n .to_bson()\n\n .map(Some)\n\n .map_err(|e| e.into())\n\n }\n\n }\n\n }\n\n\n\n /// Saves all BSON documents in the provided iterable to this collection.\n\n ///\n\n /// Every BSON document from the provided iterable will be saved to this collection as if\n", "file_path": "src/database/mod.rs", "rank": 73, "score": 19.638663724300066 }, { "content": " /// coll.save_all(&[\n\n /// bson!{ \"name\" => \"Foo\", \"count\" => 123 },\n\n /// bson!{ \"name\" => \"Bar\", \"items\" => [4, 5, 6] }\n\n /// ]).unwrap();\n\n /// # }\n\n /// ```\n\n pub fn save_all<I>(&self, docs: I) -> Result<Vec<oid::ObjectId>>\n\n where\n\n I: IntoIterator,\n\n I::Item: Borrow<bson::Document>,\n\n {\n\n let mut result = Vec::new();\n\n for doc in docs {\n\n match self.save(doc.borrow()) {\n\n Ok(id) => result.push(id),\n\n Err(e) => {\n\n return Err(Error::PartialSave(PartialSave {\n\n cause: Box::new(e),\n\n successful_ids: result,\n\n }))\n", "file_path": "src/database/mod.rs", "rank": 74, "score": 19.527909059217112 }, { "content": " /// Adds an `$strand` constraint for this field.\n\n ///\n\n /// 1. If this field holds an array of strings, `$strand` returns those records whose\n\n /// array contains all elements from `values`.\n\n /// 2. If this field holds a string, it is first split into an array by space `' '` or comma\n\n /// `','` characters, and the resulting array is queried like in 1.\n\n pub fn str_and<I>(self, values: I) -> Query\n\n where\n\n I: IntoIterator,\n\n I::Item: Into<String>,\n\n {\n\n self.process(bson! {\n\n \"$strand\" => (\n\n values.into_iter().map(|v| v.into().into()) // S -> String -> Bson\n\n .collect::<Vec<Bson>>()\n\n )\n\n })\n\n }\n\n\n\n /// Adds an `$stror` constraint for this field.\n", "file_path": "src/database/query.rs", "rank": 75, "score": 19.436472422571367 }, { "content": " /// Some(coll) => { /* work with the collection */ }\n\n /// None => { /* do something else */ }\n\n /// }\n\n /// ```\n\n pub fn get_collection<S: Into<Vec<u8>>>(&self, name: S) -> Result<Option<Collection>> {\n\n let p = try!(CString::new(name).map_err(|_| \"invalid collection name\"));\n\n let coll = unsafe { ejdb_sys::ejdbgetcoll(self.0, p.as_ptr()) };\n\n if coll.is_null() {\n\n match self.last_error_msg() {\n\n None => Ok(None),\n\n Some(msg) => Err(msg.into()),\n\n }\n\n } else {\n\n Ok(Some(Collection {\n\n coll: coll,\n\n db: self,\n\n }))\n\n }\n\n }\n\n\n", "file_path": "src/database/mod.rs", "rank": 76, "score": 19.406848371041246 }, { "content": " ejdb_sys::ejdbqresultdispose(self.result);\n\n }\n\n }\n\n}\n\n\n\nimpl Iterator for QueryResult {\n\n type Item = Result<bson::Document>;\n\n\n\n fn next(&mut self) -> Option<Result<bson::Document>> {\n\n let mut item_size = 0;\n\n let item: *const u8 = unsafe {\n\n ejdb_sys::ejdbqresultbsondata(self.result, self.current, &mut item_size) as *const _\n\n };\n\n if item.is_null() {\n\n return None;\n\n }\n\n self.current += 1;\n\n\n\n let mut data = unsafe { slice::from_raw_parts(item, item_size as usize) };\n\n Some(bson::decode_document(&mut data).map_err(|e| e.into()))\n\n }\n\n}\n\n\n", "file_path": "src/database/mod.rs", "rank": 77, "score": 18.873607326621286 }, { "content": "impl<'db> Collection<'db> {\n\n /// Returns the name of the collection.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// # use ejdb::Database;\n\n /// let db = Database::open(\"/path/to/db\").unwrap();\n\n /// let coll = db.collection(\"some_collection\").unwrap();\n\n /// assert_eq!(\"some_collection\", coll.name());\n\n /// ```\n\n pub fn name(&self) -> &str {\n\n fn get_coll_name(coll: *mut ejdb_sys::EJCOLL) -> (*const u8, usize) {\n\n #[repr(C)]\n\n struct EjcollInternal {\n\n cname: *const c_char,\n\n cnamesz: c_int,\n\n }\n\n\n\n let coll_internal = coll as *const _ as *const EjcollInternal;\n", "file_path": "src/database/mod.rs", "rank": 78, "score": 18.80870324177969 }, { "content": " ///\n\n /// let db = Database::open(\"/path/to/db\").unwrap();\n\n /// let coll = db.collection(\"some_collection\").unwrap();\n\n /// let query = coll.query(Q.field(\"name\").eq(\"Foo\"), QH.empty());\n\n /// let n = query.count().unwrap();\n\n /// // n is the number of records with \"name\" field equal to \"Foo\"\n\n /// ```\n\n #[inline]\n\n pub fn count(self) -> Result<u32> {\n\n self.execute(ejdb_sys::JBQRYCOUNT).map(|(_, n)| n)\n\n }\n\n\n\n /// Executes the query which does not return results, returning the number of affected records.\n\n ///\n\n /// No data is loaded from the database when this method is executed, so it is primarily\n\n /// needed for updating queries.\n\n ///\n\n /// Note that due to EJDB API structure this method is exactly equivalent to\n\n /// `PreparedQuery::count()`, but it has its own name for semantic purposes.\n\n ///\n", "file_path": "src/database/mod.rs", "rank": 79, "score": 18.473672300535213 }, { "content": "impl Deref for QueryHints {\n\n type Target = Document;\n\n\n\n #[inline]\n\n fn deref(&self) -> &Document {\n\n self.as_bson()\n\n }\n\n}\n\n\n\nimpl DerefMut for QueryHints {\n\n #[inline]\n\n fn deref_mut(&mut self) -> &mut Document {\n\n self.as_bson_mut()\n\n }\n\n}\n\n\n\n/// An entry point for constructing query hints.\n\n///\n\n/// This is a convenience API. This structure provides the same methods as `QueryHints`\n\n/// structure and inside them a fresh `QueryHints` instance is created and the corresponding\n", "file_path": "src/database/query.rs", "rank": 80, "score": 18.418811044691296 }, { "content": "//! Query API, a simple builder-like constructor for EJDB queries.\n\n\n\nuse std::borrow::Cow;\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse bson::{Bson, Document};\n\n\n\nuse utils::bson::BsonNumber;\n\n\n\n/// A container of EJDB query options.\n\n///\n\n/// This structure is a wrapper around a BSON document with various options affecting query\n\n/// execution in EJDB. It implements `Deref<Target=bson::Document>` and `DerefMut`, therefore\n\n/// it is possible to work with it as a BSON document directly. It also has\n\n/// `into_bson()`/`as_bson()` methods and `Into<bson::Document>`/`From<bson::Document>`\n\n/// implementations. If an invalid document is constructed and passed as a hints map when\n\n/// executing a query, an error will be returned.\n\n///\n\n/// Query hints are a part of any query operation and are passed with the actual query to\n\n/// `Collection::query()` method; they can be empty if the default behavior is sufficient.\n", "file_path": "src/database/query.rs", "rank": 81, "score": 18.182034503319077 }, { "content": " ///\n\n /// Limits the number of array items of the field `key` in the returned result. `limit` is\n\n /// the maximum number of elements to be returned starting from `offset`.\n\n pub fn slice_with_offset<S: Into<String>>(self, key: S, offset: i64, limit: i64) -> Query {\n\n self.add_subkey_at_key(\n\n \"$do\",\n\n key,\n\n bson!(\"$slice\" => [ (offset.to_bson()), (limit.to_bson()) ]),\n\n )\n\n }\n\n\n\n /// Converts this query to a BSON document.\n\n #[inline]\n\n pub fn into_bson(self) -> Document {\n\n self.query\n\n }\n\n\n\n /// Returns a reference to this query as a BSON document.\n\n #[inline]\n\n pub fn as_bson(&self) -> &Document {\n", "file_path": "src/database/query.rs", "rank": 82, "score": 18.026697998337102 }, { "content": " fn modify_document_at_key<K, F1, F2, V>(\n\n mut self,\n\n key: K,\n\n value: V,\n\n on_document: F1,\n\n on_something_else: F2,\n\n ) -> Query\n\n where\n\n K: Into<String> + AsRef<str>,\n\n F1: FnOnce(&mut Document, V), // Document is value at key K\n\n F2: FnOnce(&mut Document, K, V), // Document is the query itself\n\n {\n\n // unsafe is to overcome non-lexical borrow issues (and entry API is not available)\n\n let r = self\n\n .query\n\n .get_mut(key.as_ref())\n\n .map(|p| unsafe { &mut *(p as *mut _) });\n\n if let Some(&mut Bson::Document(ref mut d)) = r {\n\n on_document(d, value);\n\n } else {\n", "file_path": "src/database/query.rs", "rank": 83, "score": 18.01324874788212 }, { "content": " /// # Failures\n\n ///\n\n /// Returns an error if the query document can't be serialized to EJDB representation,\n\n /// if writing to the output log has failed or if any of the underlying EJDB operations\n\n /// can't be completed successfully.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// # use ejdb::Database;\n\n /// use ejdb::query::{Q, QH};\n\n ///\n\n /// let db = Database::open(\"/path/to/db\").unwrap();\n\n /// let coll = db.collection(\"some_collection\").unwrap();\n\n /// let n = coll.query(Q.field(\"name\").eq(\"Foo\").set(\"count\", 42), QH.empty())\n\n /// .update().unwrap();\n\n /// // n is the number of records affected by the update\n\n /// ```\n\n #[inline]\n\n pub fn update(self) -> Result<u32> {\n", "file_path": "src/database/mod.rs", "rank": 84, "score": 17.654519313039167 }, { "content": "use std::ops::{Deref, DerefMut};\n\nuse std::slice;\n\n\n\nuse ejdb_sys;\n\n\n\npub struct TCXString(*mut ejdb_sys::TCXSTR);\n\n\n\nimpl Drop for TCXString {\n\n fn drop(&mut self) {\n\n unsafe {\n\n ejdb_sys::tcxstrdel(self.0);\n\n }\n\n }\n\n}\n\n\n\nimpl TCXString {\n\n #[inline]\n\n pub fn new() -> TCXString {\n\n TCXString(unsafe { ejdb_sys::tcxstrnew() })\n\n }\n", "file_path": "src/utils/tcxstr.rs", "rank": 85, "score": 17.607089225110272 }, { "content": " &self.query\n\n }\n\n\n\n /// Returns a mutable reference to this query as a BSON document.\n\n ///\n\n /// Be careful when modifying the document directly because it may lead to invalid queries.\n\n #[inline]\n\n pub fn as_bson_mut(&mut self) -> &mut Document {\n\n &mut self.query\n\n }\n\n}\n\n\n\nimpl From<Document> for Query {\n\n #[inline]\n\n fn from(document: Document) -> Query {\n\n Query { query: document }\n\n }\n\n}\n\n\n\nimpl Into<Document> for Query {\n", "file_path": "src/database/query.rs", "rank": 86, "score": 17.492922890505945 }, { "content": " /// Returns the given collection by its name, if it exists.\n\n ///\n\n /// This method will only return a collection if it already exists in the database; it\n\n /// won't create a new collection. See `Database::collection_with_options()` and\n\n /// `Database::collection()` methods if you need to create new collections.\n\n ///\n\n /// `path` argument may be of any type convertible to a vector of bytes, like strings or\n\n /// byte arrays.\n\n ///\n\n /// # Failures\n\n ///\n\n /// Fails if `name` contains zero bytes or in other cases when the corresponding EJDB\n\n /// operation can't be completed.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// # use ejdb::Database;\n\n /// let db = Database::open(\"/path/to/db\").unwrap();\n\n /// match db.get_collection(\"some_collection\").unwrap() {\n", "file_path": "src/database/mod.rs", "rank": 88, "score": 17.201513801786604 }, { "content": " self.add_hint(1)\n\n }\n\n}\n\n\n\n/// A builder for inclusion/exclusion flag for a specific field.\n\npub struct QueryHintsField(QueryHints, String);\n\n\n\nimpl QueryHintsField {\n\n fn add_hint(mut self, value: i32) -> QueryHints {\n\n self.0.add_hint(\"$fields\", self.1, value);\n\n self.0\n\n }\n\n\n\n /// Sets that query results must not contain a field with this name.\n\n #[inline]\n\n pub fn exclude(self) -> QueryHints {\n\n self.add_hint(-1)\n\n }\n\n\n\n /// Sets that query results must contain a field with this name, if available.\n", "file_path": "src/database/query.rs", "rank": 89, "score": 17.15457097735146 }, { "content": "\n\n /// Converts these hints to a BSON document.\n\n #[inline]\n\n pub fn into_bson(self) -> Document {\n\n self.hints\n\n }\n\n\n\n /// Returns a reference to these hints as a BSON document.\n\n #[inline]\n\n pub fn as_bson(&self) -> &Document {\n\n &self.hints\n\n }\n\n\n\n /// Returns a mutable reference to these hints as a BSON document.\n\n ///\n\n /// Be careful when modifying the document directly because it may lead to invalid hints.\n\n #[inline]\n\n pub fn as_bson_mut(&mut self) -> &mut Document {\n\n &mut self.hints\n\n }\n", "file_path": "src/database/query.rs", "rank": 90, "score": 17.02685264403215 }, { "content": " /// Selects all records which satisfy at least one of the provided queries.\n\n pub fn or<I>(mut self, queries: I) -> Query\n\n where\n\n I: IntoIterator,\n\n I::Item: Into<Document>,\n\n {\n\n self.query.insert(\n\n \"$or\",\n\n queries\n\n .into_iter()\n\n .map(|v| v.into().into())\n\n .collect::<Vec<Bson>>(),\n\n );\n\n self\n\n }\n\n\n\n /// Sets equality constraint for `_id` field.\n\n ///\n\n /// This is just a shortcut for `query.field(\"_id\").eq(value)`.\n\n #[inline]\n", "file_path": "src/database/query.rs", "rank": 91, "score": 16.842135048653013 }, { "content": "#[macro_use(bson)]\n\nextern crate ejdb;\n\nextern crate bson;\n\nextern crate tempdir;\n\n\n\nuse tempdir::TempDir;\n\n\n\nuse ejdb::meta::IndexType;\n\nuse ejdb::query::{Q, QH};\n\nuse ejdb::{CollectionOptions, Database};\n\n\n\n#[test]\n", "file_path": "tests/tests.rs", "rank": 92, "score": 16.83202143128808 }, { "content": "use std::borrow::Borrow;\n\nuse std::ffi::{CStr, CString};\n\nuse std::fmt;\n\nuse std::io;\n\nuse std::ptr;\n\nuse std::slice;\n\nuse std::str;\n\n\n\nuse libc::{c_char, c_int};\n\n\n\nuse bson::{self, oid};\n\nuse ejdb_sys;\n\n\n\nuse self::open_mode::DatabaseOpenMode;\n\nuse ejdb_bson::{EjdbBsonDocument, EjdbObjectId};\n\nuse types::PartialSave;\n\nuse utils::tcxstr::TCXString;\n\nuse {Error, Result};\n\n\n\npub mod indices;\n", "file_path": "src/database/mod.rs", "rank": 93, "score": 16.747481943827893 }, { "content": " \"count\" => {\n\n assert_eq!(index.name(), \"ncount\");\n\n assert_eq!(\n\n index.file(),\n\n Some(&*format!(\n\n \"{}/db_test_2.idx.ncount.dec\",\n\n dir.path().display()\n\n ))\n\n );\n\n assert_eq!(index.records(), Some(0));\n\n assert_eq!(index.index_type(), IndexType::Decimal);\n\n }\n\n _ => panic!(\"unknown index: {:?}\", index),\n\n }\n\n }\n\n }\n\n _ => panic!(\"unknown collection: {:?}\", coll),\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/tests.rs", "rank": 94, "score": 16.734580489775716 }, { "content": "\n\n #[inline]\n\n pub fn as_raw(&self) -> *mut ejdb_sys::TCXSTR {\n\n self.0\n\n }\n\n}\n\n\n\nimpl Deref for TCXString {\n\n type Target = [u8];\n\n\n\n fn deref(&self) -> &[u8] {\n\n unsafe { slice::from_raw_parts((*self.0).ptr as *const _, (*self.0).size as usize) }\n\n }\n\n}\n\n\n\nimpl DerefMut for TCXString {\n\n fn deref_mut(&mut self) -> &mut [u8] {\n\n unsafe { slice::from_raw_parts_mut((*self.0).ptr as *mut _, (*self.0).size as usize) }\n\n }\n\n}\n", "file_path": "src/utils/tcxstr.rs", "rank": 95, "score": 16.702050814547647 }, { "content": "//! let item = coll.query(Q.field(\"count\").between(-10, 10.2), QH.field(\"name\").include())\n\n//! .find_one().unwrap();\n\n//! // `item` is an `Option<bson::Document>` which contains a record whose `count` field\n\n//! // is between -10 and 10.2, inclusive, if there is one, and this document will only contain\n\n//! // `name` field.\n\n//!\n\n//! let n = coll.query(Q.field(\"name\").exists(true), QH.empty()).count().unwrap();\n\n//! // `n` is the number of records which contain `name` field\n\n//! # }\n\n//! ```\n\n//!\n\n//! ## Transactions\n\n//!\n\n//! You can use `Collection::begin_transaction()` method which will start a transaction over\n\n//! this collection. Citing the official documentation:\n\n//!\n\n//! > EJDB provides atomic and durable non parallel and read-uncommitted collection level\n\n//! > transactions, i.e., There is only one transaction for collection is active for a single\n\n//! > point in a time. The data written in a transaction is visible for other non transactional\n\n//! > readers. EJDB transaction system utilizes write ahead logging to provide consistent\n", "file_path": "src/lib.rs", "rank": 96, "score": 16.59266938324725 }, { "content": " unsafe {\n\n (\n\n (*coll_internal).cname as *const u8,\n\n (*coll_internal).cnamesz as usize,\n\n )\n\n }\n\n }\n\n\n\n let (data, size) = get_coll_name(self.coll);\n\n let bytes = unsafe { slice::from_raw_parts(data, size) };\n\n // XXX: should be safe, but need to check\n\n unsafe { str::from_utf8_unchecked(bytes) }\n\n }\n\n\n\n /// Saves the given BSON document to this collection, assigning it a fresh id, if needed.\n\n ///\n\n /// This is a convenient way to store a single object into the database. If the document\n\n /// contains an `_id` field of type `bson::oid::ObjectId`, then it will be used as\n\n /// an identifier for the new record; otherwise, a fresh identifier is generated. The\n\n /// actual identifier of the record, be it the provided one or the generated one,\n", "file_path": "src/database/mod.rs", "rank": 97, "score": 16.430080895614413 }, { "content": " /// Returns a collection by its name or creates one with the given options if it doesn't exist.\n\n ///\n\n /// `name` argument may be of any type convertible to a byte vector, for example, strings\n\n /// or byte slices. `CollectionOptions` specify which options the collection will have\n\n /// if it doesn't exist; if it does exist, this argument is ignored.\n\n ///\n\n /// See also `CollectionOptions::get_or_create()` method for a possibly more convenient\n\n /// alternative.\n\n ///\n\n /// # Failures\n\n ///\n\n /// Returns an error when `name` argument contains zero bytes in it or when the corresponding\n\n /// EJDB operation cannot be completed successfully.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```no_run\n\n /// # use ejdb::{Database, CollectionOptions};\n\n /// let db = Database::open(\"/path/to/db\").unwrap();\n\n /// let coll = db.collection_with_options(\"some_collection\", CollectionOptions::default()).unwrap();\n", "file_path": "src/database/mod.rs", "rank": 98, "score": 16.376466240716816 }, { "content": " /// Constructs a `$pull` update query.\n\n ///\n\n /// Removes the `value` from an array at the field `key` in all matched records.\n\n pub fn pull<S: Into<String>, V: Into<Bson>>(self, key: S, value: V) -> Query {\n\n self.add_subkey_at_key(\"$pull\", key, value)\n\n }\n\n\n\n /// Constructs a multiple-valued `$pull` update query.\n\n ///\n\n /// Removes all values from `values` from an array at the field `key` in all matched records.\n\n /// Multiple `push_all()` calls will be merged.\n\n pub fn pull_all<S, I>(self, key: S, values: I) -> Query\n\n where\n\n S: Into<String>,\n\n I: IntoIterator,\n\n I::Item: Into<Bson>,\n\n {\n\n let values: Vec<_> = values.into_iter().map(I::Item::into).collect();\n\n self.add_subkey_at_key(\"$pullAll\", key, values)\n\n }\n", "file_path": "src/database/query.rs", "rank": 99, "score": 16.32259010130757 } ]
Rust
src/coreobjs.rs
mayadata-io/dynservice
3ce44fd53a8986056c85edecefa84c81c87b6e59
use crate::{ common::{ServiceUnregister, DEFAULT_SERVICE_TIMEOUT}, store::{StoreError, TimedLease}, ServiceError, }; use serde::{Deserialize, Serialize}; use snafu::ResultExt; #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct Service { name: String, instance_id: String, endpoints: Vec<String>, } impl Service { pub(crate) fn new(options: ServiceConfig) -> Self { Self { name: options.name, instance_id: options.instance_id, endpoints: options.endpoints, } } pub fn name(&self) -> &str { &self.name } pub fn instance_id(&self) -> &str { &self.instance_id } pub fn endpoints(&self) -> &Vec<String> { &self.endpoints } } #[derive(Default, Debug)] pub struct ServiceConfigBuilder { name: Option<String>, instance_id: Option<String>, heartbeat_interval: Option<i64>, endpoints: Option<Vec<String>>, } impl ServiceConfigBuilder { pub fn build(self) -> ServiceConfig { let name = self.name.expect("Service name is mandatory"); let instance_id = self.instance_id.expect("Service instance ID is mandatory"); let heartbeat_interval = self.heartbeat_interval.unwrap_or(DEFAULT_SERVICE_TIMEOUT); let endpoints = self.endpoints.expect("Service endpoints are mandatory"); ServiceConfig { name, instance_id, heartbeat_interval, endpoints, } } pub fn with_name(mut self, name: impl Into<String>) -> Self { self.name = Some(name.into()); self } pub fn with_instance_id(mut self, instance_id: impl Into<String>) -> Self { self.instance_id = Some(instance_id.into()); self } pub fn with_heartbeat_interval(mut self, interval: i64) -> Self { assert!( interval > 0, "Heartbeat interval must be non-negative: {}", interval ); self.heartbeat_interval = Some(interval); self } pub fn with_endpoints<S: AsRef<str>, E: AsRef<[S]>>(mut self, endpoints: E) -> Self { let eps: Vec<String> = endpoints .as_ref() .iter() .map(|e| e.as_ref().to_string()) .collect(); assert!(!eps.is_empty(), "Service must have at least one endpoint"); self.endpoints = Some(eps); self } } #[derive(Debug, Clone)] pub struct ServiceConfig { name: String, instance_id: String, heartbeat_interval: i64, endpoints: Vec<String>, } impl ServiceConfig { pub fn builder() -> ServiceConfigBuilder { ServiceConfigBuilder::default() } pub fn name(&self) -> &str { &self.name } pub fn instance_id(&self) -> &str { &self.instance_id } pub fn heartbeat_interval(&self) -> i64 { self.heartbeat_interval } pub fn endpoints(&self) -> Vec<String> { self.endpoints.clone() } } #[derive(Debug)] pub struct ServiceDescriptor { service: String, instance: String, lease: TimedLease, } impl ServiceDescriptor { pub(crate) fn new(service: &Service, lease: TimedLease) -> Self { Self { service: service.name().to_string(), instance: service.instance_id().to_string(), lease, } } pub async fn send_heartbeat(&mut self) -> Result<(), ServiceError> { match self.lease.keep_alive().await { Err(StoreError::LeaseLost { .. }) => Err(ServiceError::HeartbeatLost { service: self.service.to_string(), instance: self.instance.to_string(), }), Err(e) => Err(ServiceError::HeartbeatError { service: self.service.to_string(), instance: self.instance.to_string(), source: e, }), _ => Ok(()), } } pub async fn unregister(mut self) -> Result<(), ServiceError> { self.lease.revoke().await.context(ServiceUnregister { service: self.service.to_string(), instance: self.instance.to_string(), })?; Ok(()) } pub fn heartbeat_interval(&self) -> i64 { self.lease.ttl() } pub fn service(&self) -> &str { &self.service } pub fn instance(&self) -> &str { &self.instance } }
use crate::{ common::{ServiceUnregister, DEFAULT_SERVICE_TIMEOUT}, store::{StoreError, TimedLease}, ServiceError, }; use serde::{Deserialize, Serialize}; use snafu::ResultExt; #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct Service { name: String, instance_id: String, endpoints: Vec<String>, } impl Service { pub(crate) fn new(options: ServiceConfig) -> Self { Self { name: options.name, instance_id: options.instance_id, endpoints: options.endpoints, } } pub fn name(&self) -> &str { &self.name } pub fn instance_id(&self) -> &str { &self.instance_id } pub fn endpoints(&self) -> &Vec<String> { &self.endpoints } } #[derive(Default, Debug)] pub struct ServiceConfigBuilder { name: Option<String>, instance_id: Option<String>, heartbeat_interval: Option<i64>, endpoints: Option<Vec<String>>, } impl ServiceConfigBuilder { pub fn build(self) -> ServiceConfig { let name = self.name.expect("Service name is mandatory"); let instance_id = self.instance_id.expect("Service instance ID is mandatory"); let heartbeat_interval = self.heartbeat_interval.unwrap_or(DEFAULT_SERVICE_TIMEOUT); let endpoints = self.endpoints.expect("Service endpoints are mandatory"); ServiceConfig { name, instance_id, heartbeat_interval, endpoints, } } pub fn with_name(mut self, name: impl Into<String>) -> Self { self.name = Some(name.into()); self } pub fn with_instance_id(mut self, instance_id: impl Into<String>) -> Self { self.instance_id = Some(instance_id.into()); self } pub fn with_heartbeat_interval(mut self, interval: i64) -> Self { assert!( interval > 0, "Heartbeat interval must be non-negative: {}", interval
> i64 { self.lease.ttl() } pub fn service(&self) -> &str { &self.service } pub fn instance(&self) -> &str { &self.instance } }
); self.heartbeat_interval = Some(interval); self } pub fn with_endpoints<S: AsRef<str>, E: AsRef<[S]>>(mut self, endpoints: E) -> Self { let eps: Vec<String> = endpoints .as_ref() .iter() .map(|e| e.as_ref().to_string()) .collect(); assert!(!eps.is_empty(), "Service must have at least one endpoint"); self.endpoints = Some(eps); self } } #[derive(Debug, Clone)] pub struct ServiceConfig { name: String, instance_id: String, heartbeat_interval: i64, endpoints: Vec<String>, } impl ServiceConfig { pub fn builder() -> ServiceConfigBuilder { ServiceConfigBuilder::default() } pub fn name(&self) -> &str { &self.name } pub fn instance_id(&self) -> &str { &self.instance_id } pub fn heartbeat_interval(&self) -> i64 { self.heartbeat_interval } pub fn endpoints(&self) -> Vec<String> { self.endpoints.clone() } } #[derive(Debug)] pub struct ServiceDescriptor { service: String, instance: String, lease: TimedLease, } impl ServiceDescriptor { pub(crate) fn new(service: &Service, lease: TimedLease) -> Self { Self { service: service.name().to_string(), instance: service.instance_id().to_string(), lease, } } pub async fn send_heartbeat(&mut self) -> Result<(), ServiceError> { match self.lease.keep_alive().await { Err(StoreError::LeaseLost { .. }) => Err(ServiceError::HeartbeatLost { service: self.service.to_string(), instance: self.instance.to_string(), }), Err(e) => Err(ServiceError::HeartbeatError { service: self.service.to_string(), instance: self.instance.to_string(), source: e, }), _ => Ok(()), } } pub async fn unregister(mut self) -> Result<(), ServiceError> { self.lease.revoke().await.context(ServiceUnregister { service: self.service.to_string(), instance: self.instance.to_string(), })?; Ok(()) } pub fn heartbeat_interval(&self) -
random
[ { "content": "fn get_container_mount() -> String {\n\n tempdir()\n\n .unwrap()\n\n .into_path()\n\n .as_path()\n\n .to_string_lossy()\n\n .to_string()\n\n}\n\n\n\nimpl Builder {\n\n pub fn new(num_nodes: u32) -> Self {\n\n assert!(\n\n num_nodes > 0 && num_nodes <= 5,\n\n \"Invalid number of ETCD nodes\"\n\n );\n\n\n\n Self { num_nodes }\n\n }\n\n\n\n pub async fn build(self) -> EtcdCluster {\n", "file_path": "tests/common/etcd.rs", "rank": 0, "score": 64677.10935036997 }, { "content": "struct ServiceController {\n\n descriptor: Arc<Mutex<Option<ServiceDescriptor>>>,\n\n hb_channel: Option<Sender<HeartbeatLoopCommand>>,\n\n ttl: i64,\n\n service: String,\n\n instance: String,\n\n}\n\n\n\nimpl ServiceController {\n\n fn new(descriptor: ServiceDescriptor) -> Self {\n\n let ttl = descriptor.heartbeat_interval();\n\n let service = descriptor.service().to_string();\n\n let instance = descriptor.instance().to_string();\n\n\n\n Self {\n\n ttl,\n\n descriptor: Arc::new(Mutex::new(Some(descriptor))),\n\n hb_channel: None,\n\n instance,\n\n service,\n", "file_path": "src/service.rs", "rank": 1, "score": 64639.033638008164 }, { "content": "struct ServiceRegistrator {\n\n registry: ServiceRegistry,\n\n config: ServiceConfig,\n\n}\n\n\n\nimpl ServiceRegistrator {\n\n fn new(registry: ServiceRegistry, config: ServiceConfig) -> Self {\n\n Self { registry, config }\n\n }\n\n\n\n async fn register_service(\n\n &mut self,\n\n register_on_faults: bool,\n\n ) -> Result<ServiceDescriptor, ServiceError> {\n\n loop {\n\n match self.registry.register_service(self.config.clone()).await {\n\n Ok(descriptor) => return Ok(descriptor),\n\n Err(e) => {\n\n if !register_on_faults {\n\n return Err(e);\n\n } else {\n\n // Repeat service registration.\n\n tokio::time::sleep(std::time::Duration::from_secs(3)).await;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/service.rs", "rank": 2, "score": 64639.033638008164 }, { "content": "struct ReinitializingState {}\n\n\n\nimpl Builder {\n\n pub fn new(service_config: ServiceConfig, registry_options: ServiceRegistryOptions) -> Self {\n\n Self {\n\n service_config,\n\n registry_options,\n\n entrypoint: None,\n\n initializer: None,\n\n shutdown_handler: None,\n\n hb_status_handler: None,\n\n hb_failure_policy: HeartbeatFailurePolicy::Panic,\n\n }\n\n }\n\n\n\n pub fn with_entrypoint(\n\n mut self,\n\n entrypoint: impl Future<Output = Result<(), String>> + Send + 'static,\n\n ) -> Self {\n\n self.entrypoint = Some(Box::pin(entrypoint));\n", "file_path": "src/service.rs", "rank": 3, "score": 57433.4169785724 }, { "content": "struct RunningState {\n\n service_descriptor: ServiceDescriptor,\n\n}\n", "file_path": "src/service.rs", "rank": 4, "score": 57433.4169785724 }, { "content": "struct NewState {}\n", "file_path": "src/service.rs", "rank": 5, "score": 57433.4169785724 }, { "content": "type HeartbeatStatusHandler = Box<dyn FnMut(HeartbeatStatus) + Send + 'static>;\n\n\n\npub struct Builder {\n\n service_config: ServiceConfig,\n\n registry_options: ServiceRegistryOptions,\n\n entrypoint: Option<AsyncHandler>,\n\n initializer: Option<AsyncHandler>,\n\n shutdown_handler: Option<ShutdownHandler>,\n\n hb_status_handler: Option<HeartbeatStatusHandler>,\n\n hb_failure_policy: HeartbeatFailurePolicy,\n\n}\n\n\n", "file_path": "src/service.rs", "rank": 6, "score": 51052.824321353204 }, { "content": "#[derive(Debug)]\n\nenum HeartbeatLoopCommand {\n\n Ok,\n\n StopHeartbeat(tokio::sync::oneshot::Sender<HeartbeatLoopCommand>),\n\n}\n\n\n", "file_path": "src/service.rs", "rank": 7, "score": 41556.40901782902 }, { "content": "fn parse_kv(kv: &KeyValue, parse_value: bool) -> Result<(String, Option<Value>), StoreError> {\n\n let key = kv.key_str().context(KeyXform)?.to_string();\n\n let value = if parse_value {\n\n Some(serde_json::from_slice::<Value>(kv.value()).context(SerializeValue)?)\n\n } else {\n\n None\n\n };\n\n\n\n Ok((key, value))\n\n}\n", "file_path": "src/store/watcher.rs", "rank": 8, "score": 39689.65003016169 }, { "content": "type AsyncHandler = Pin<Box<dyn Future<Output = Result<(), String>> + Send + 'static>>;\n", "file_path": "src/service.rs", "rank": 9, "score": 35370.06595667506 }, { "content": "type ShutdownHandler = Pin<Box<dyn Future<Output = Result<(), String>> + Send + 'static>>;\n", "file_path": "src/service.rs", "rank": 10, "score": 35370.06595667506 }, { "content": "enum ServiceState {\n\n New(NewState),\n\n Running(RunningState),\n\n Reinitializing(ReinitializingState),\n\n Done,\n\n}\n\n\n\nimpl ServiceState {\n\n fn name(&self) -> &'static str {\n\n match self {\n\n Self::New(_) => \"New\",\n\n Self::Running(_) => \"Running\",\n\n Self::Reinitializing(_) => \"Reinitializing\",\n\n Self::Done => \"Done\",\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/service.rs", "rank": 11, "score": 28429.799979958676 }, { "content": "#[async_trait::async_trait]\n\ntrait ServiceStateStep<S> {\n\n async fn state_step(&mut self, state: S) -> Result<ServiceState, ServiceError>;\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl ServiceStateStep<NewState> for DynamicService {\n\n async fn state_step(&mut self, _state: NewState) -> Result<ServiceState, ServiceError> {\n\n let registry = ServiceRegistry::new(self.registry_options.clone()).await?;\n\n let mut service_registrator =\n\n ServiceRegistrator::new(registry, self.service_config.clone());\n\n\n\n // Invoke service-specific initializer once before registering the service.\n\n if let Some(initializer) = self.initializer.take() {\n\n initializer.await.unwrap();\n\n }\n\n\n\n // Prepare service descriptor for the next step.\n\n let service_descriptor = service_registrator.register_service(false).await?;\n\n self.service_registrator = Some(service_registrator);\n\n\n", "file_path": "src/service.rs", "rank": 12, "score": 26320.433517542984 }, { "content": " mut self,\n\n heartbeat_handler: impl FnMut(HeartbeatStatus) + Send + 'static,\n\n ) -> Self {\n\n self.hb_status_handler = Some(Box::new(heartbeat_handler));\n\n self\n\n }\n\n\n\n pub fn with_heartbeat_failure_policy(mut self, policy: HeartbeatFailurePolicy) -> Self {\n\n self.hb_failure_policy = policy;\n\n self\n\n }\n\n\n\n pub fn build(self) -> DynamicService {\n\n let entrypoint = self\n\n .entrypoint\n\n .expect(\"Service entrypoint must be provided\");\n\n\n\n if self.hb_failure_policy == HeartbeatFailurePolicy::Stop {\n\n assert!(\n\n self.shutdown_handler.is_some(),\n", "file_path": "src/service.rs", "rank": 13, "score": 24125.48718409192 }, { "content": " }?;\n\n\n\n if curr_state_name != new_state.name() {\n\n tracing::debug!(\n\n \"Service {}:{} changed state {} => {}\",\n\n self.service_config.name(),\n\n self.service_config.instance_id(),\n\n curr_state_name,\n\n new_state.name()\n\n )\n\n }\n\n\n\n self.current_state = Some(new_state);\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn notify_heartbeat_status(&mut self, status: HeartbeatStatus) {\n\n self.hb_status_handler.as_mut().map(|f| f(status));\n", "file_path": "src/service.rs", "rank": 14, "score": 24122.539173699777 }, { "content": " }\n\n\n\n #[instrument(level = \"debug\", skip(self), err)]\n\n async fn shutdown_service(&mut self) -> Result<(), ServiceError> {\n\n if self.shutdown_handler.is_none() {\n\n Ok(())\n\n } else {\n\n self.shutdown_handler\n\n .take()\n\n .unwrap()\n\n .await\n\n .map_err(|e| ServiceError::ServiceShutdown {\n\n service: self.service_config.name().to_string(),\n\n instance: self.service_config.instance_id().to_string(),\n\n error: e,\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/service.rs", "rank": 15, "score": 24122.521309333686 }, { "content": " }\n\n }\n\n\n\n /// Start heartbeat loop for the service.\n\n /// In case this function returns error, it's always an indication of lost heartbet\n\n /// and the caller must take an appropriate action in response to heartbeat loss.\n\n async fn start_heartbeat(&mut self) -> Result<(), ServiceError> {\n\n assert!(self.hb_channel.is_none(), \"Heartbeat loop already started\");\n\n\n\n let (sender, mut receiver) = channel::<HeartbeatLoopCommand>(1);\n\n\n\n self.hb_channel = Some(sender);\n\n\n\n let ttl: u64 = self.ttl as u64;\n\n let service = self.service.clone();\n\n let instance = self.instance.clone();\n\n let descr = Arc::clone(&self.descriptor);\n\n\n\n tracing::info!(\n\n \"Service {}:{} starting heartbeat loop with interval {} secs.\",\n", "file_path": "src/service.rs", "rank": 16, "score": 24121.28506364593 }, { "content": " self\n\n }\n\n\n\n pub fn with_initializer(\n\n mut self,\n\n initializer: impl Future<Output = Result<(), String>> + Send + 'static,\n\n ) -> Self {\n\n self.initializer = Some(Box::pin(initializer));\n\n self\n\n }\n\n\n\n pub fn with_shutdown_handler(\n\n mut self,\n\n shutdown_handler: impl Future<Output = Result<(), String>> + Send + 'static,\n\n ) -> Self {\n\n self.shutdown_handler = Some(Box::pin(shutdown_handler));\n\n self\n\n }\n\n\n\n pub fn with_heartbeat_status_handler(\n", "file_path": "src/service.rs", "rank": 17, "score": 24121.1032966711 }, { "content": " \"Service {}:{} has lost heartbeat.\",\n\n self.service_config.name(),\n\n self.service_config.instance_id()\n\n ),\n\n HeartbeatFailurePolicy::Stop => {\n\n self.shutdown_service().await?;\n\n Ok(ServiceState::Done)\n\n },\n\n HeartbeatFailurePolicy::Restart => Ok(\n\n ServiceState::Reinitializing(ReinitializingState {})\n\n ),\n\n }\n\n },\n\n // Wait for the completion of service's main loop.\n\n _ = &mut self.entrypoint => {\n\n controller.stop_heartbeat().await;\n\n controller.unregister().await;\n\n\n\n Ok(ServiceState::Done)\n\n }\n", "file_path": "src/service.rs", "rank": 18, "score": 24119.86802311658 }, { "content": "use crate::{\n\n common::HeartbeatFailurePolicy, HeartbeatStatus, ServiceConfig, ServiceDescriptor,\n\n ServiceError, ServiceRegistry, ServiceRegistryOptions,\n\n};\n\nuse futures::Future;\n\nuse std::{pin::Pin, sync::Arc};\n\nuse tokio::sync::{\n\n mpsc::{channel, Sender},\n\n Mutex,\n\n};\n\n\n\nuse tracing::instrument;\n\n\n", "file_path": "src/service.rs", "rank": 19, "score": 24119.812026997897 }, { "content": " heartbeat_lost_time.take();\n\n tracing::info!(\n\n \"Service {}:{} successfully recovered heartbeat within TTL interval\",\n\n service,\n\n instance,\n\n );\n\n }\n\n }\n\n }\n\n // Service is being unregistered, break the heartbeat loop.\n\n None => {\n\n tracing::info!(\n\n \"Service {}:{} is being unregistered, stopping heartbeat loop\",\n\n service,\n\n instance,\n\n );\n\n receiver.close();\n\n break;\n\n }\n\n }\n", "file_path": "src/service.rs", "rank": 20, "score": 24119.8048726081 }, { "content": " self.notify_heartbeat_status(HeartbeatStatus::Restored);\n\n\n\n Ok(ServiceState::Running(RunningState { service_descriptor }))\n\n }\n\n}\n\n\n\nimpl DynamicService {\n\n pub async fn start(mut self) -> Result<(), ServiceError> {\n\n loop {\n\n let curr_state = self\n\n .current_state\n\n .take()\n\n .expect(\"Current state must be defined\");\n\n let curr_state_name = curr_state.name();\n\n\n\n let new_state = match curr_state {\n\n ServiceState::New(state) => self.state_step(state).await,\n\n ServiceState::Running(state) => self.state_step(state).await,\n\n ServiceState::Reinitializing(state) => self.state_step(state).await,\n\n ServiceState::Done => break,\n", "file_path": "src/service.rs", "rank": 21, "score": 24119.784183325744 }, { "content": " tracing::error!(\n\n \"Service {}:{} lost heartbeat\",\n\n service,\n\n instance,\n\n );\n\n return Err(e);\n\n }\n\n _ => {\n\n if heartbeat_lost_time.is_none() {\n\n heartbeat_lost_time = Some(std::time::Instant::now());\n\n\n\n tracing::error!(\n\n \"Service {}:{} failed to send heartbeat, error={}\",\n\n service,\n\n instance,\n\n e,\n\n );\n\n } else {\n\n // Check whether TTL is elapsed and report error in case\n\n // all heartbeat update attemps failed within TTL interval.\n", "file_path": "src/service.rs", "rank": 22, "score": 24119.222422468232 }, { "content": " let elapsed =\n\n heartbeat_lost_time.as_ref().unwrap().elapsed();\n\n if elapsed.as_secs() > ttl {\n\n tracing::error!(\n\n \"Service {}:{} lost heartbeat due to exceeding heartbeat TTL ({} sec)\",\n\n service,\n\n instance,\n\n ttl,\n\n );\n\n return Err(ServiceError::HeartbeatLost {\n\n service,\n\n instance,\n\n });\n\n }\n\n }\n\n }\n\n }\n\n } else {\n\n // Heartbeat succeeded, reset the failure time.\n\n if heartbeat_lost_time.is_some() {\n", "file_path": "src/service.rs", "rank": 23, "score": 24117.622956848834 }, { "content": " Ok(ServiceState::Running(RunningState { service_descriptor }))\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl ServiceStateStep<RunningState> for DynamicService {\n\n async fn state_step(&mut self, state: RunningState) -> Result<ServiceState, ServiceError> {\n\n let mut controller = ServiceController::new(state.service_descriptor);\n\n\n\n tokio::select! {\n\n // Wait for heartbeat loos and apply recovery action.\n\n r = controller.start_heartbeat() => {\n\n // Service loop must complete only due to errors.\n\n r.expect_err(\"Abnormal heartbeat loop completion\");\n\n\n\n self.notify_heartbeat_status(HeartbeatStatus::Lost);\n\n\n\n // Apply heartbeat loss policy.\n\n match self.hb_failure_policy {\n\n HeartbeatFailurePolicy::Panic => panic!(\n", "file_path": "src/service.rs", "rank": 24, "score": 24117.486539919388 }, { "content": " \"Shutdown handler is mandatory for 'Stop' heartbeat failure policy\"\n\n );\n\n }\n\n\n\n DynamicService {\n\n service_config: self.service_config,\n\n registry_options: self.registry_options,\n\n entrypoint,\n\n initializer: self.initializer,\n\n hb_failure_policy: self.hb_failure_policy,\n\n shutdown_handler: self.shutdown_handler,\n\n hb_status_handler: self.hb_status_handler,\n\n current_state: Some(ServiceState::New(NewState {})),\n\n service_registrator: None,\n\n }\n\n }\n\n}\n\n\n\npub struct DynamicService {\n\n entrypoint: AsyncHandler,\n", "file_path": "src/service.rs", "rank": 25, "score": 24116.623104731534 }, { "content": " }\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl ServiceStateStep<ReinitializingState> for DynamicService {\n\n async fn state_step(\n\n &mut self,\n\n _state: ReinitializingState,\n\n ) -> Result<ServiceState, ServiceError> {\n\n // We have encountered errors during heartbeat loop, so take into\n\n // account possible ETCd accessibility problems when re-registering the service.\n\n let service_descriptor = self\n\n .service_registrator\n\n .as_mut()\n\n .unwrap()\n\n .register_service(true)\n\n .await?;\n\n\n\n // Service successfully re-registered, notify heartbeat handler.\n", "file_path": "src/service.rs", "rank": 26, "score": 24115.80929777253 }, { "content": " self.service,\n\n self.instance,\n\n ttl\n\n );\n\n\n\n let mut heartbeat_lost_time = None;\n\n\n\n let h = tokio::spawn(async move {\n\n loop {\n\n match descr.lock().await.as_mut() {\n\n Some(d) => {\n\n // We care only about TTL loss reported explicitly, as it's\n\n // the only reliable trigger for heartbeat-loss related actions.\n\n // Direct I/O errors upon heartbeat update don't count as a valid\n\n // reason to judge about heartbeat loss unless all attempts to report\n\n // heartbeat failed within TTL time (measured since the moment of\n\n // the first heartbeat update failure).\n\n if let Err(e) = d.send_heartbeat().await {\n\n match e {\n\n ServiceError::HeartbeatLost { .. } => {\n", "file_path": "src/service.rs", "rank": 27, "score": 24115.535928215533 }, { "content": "\n\n // Waiti till either TTL update timeout triggers or request to stop\n\n // heartbeat loop is received.\n\n tokio::select! {\n\n _ = tokio::time::sleep(std::time::Duration::from_secs(ttl as u64 / 2)) => {},\n\n\n\n cmd = receiver.recv() => {\n\n match cmd.unwrap() {\n\n HeartbeatLoopCommand::StopHeartbeat(tx) => {\n\n tracing::info!(\n\n \"Service {}:{} stopped heartbeat loop\",\n\n service, instance,\n\n );\n\n tx.send(HeartbeatLoopCommand::Ok).expect(\"Command sender disappeared\");\n\n break;\n\n },\n\n c => panic!(\"Unexpected command for heartbeat loop control: {:?}\", c),\n\n }\n\n }\n\n }\n", "file_path": "src/service.rs", "rank": 28, "score": 24115.50944865028 }, { "content": " initializer: Option<AsyncHandler>,\n\n shutdown_handler: Option<ShutdownHandler>,\n\n hb_status_handler: Option<HeartbeatStatusHandler>,\n\n service_config: ServiceConfig,\n\n registry_options: ServiceRegistryOptions,\n\n hb_failure_policy: HeartbeatFailurePolicy,\n\n current_state: Option<ServiceState>,\n\n service_registrator: Option<ServiceRegistrator>,\n\n}\n\n\n", "file_path": "src/service.rs", "rank": 29, "score": 24114.370166492645 }, { "content": " }\n\n Ok(())\n\n });\n\n\n\n // Wait till heartbeat loop is explicitly stopped or error occurs during heartbeat update.\n\n h.await.unwrap()\n\n }\n\n\n\n /// Stop the heartbeat loop. This operation is idempotent, so it's possible to stop\n\n /// the heartbeat loop which is already stopped.\n\n async fn stop_heartbeat(&mut self) {\n\n if let Some(channel) = self.hb_channel.take() {\n\n let (tx, rx) = tokio::sync::oneshot::channel();\n\n // Take into account simultaneous heartbeat loop closure, which might happen either\n\n // in response to explicit service unregistration request or implicutly upon\n\n // heartbeat delivery failures.\n\n if channel\n\n .send(HeartbeatLoopCommand::StopHeartbeat(tx))\n\n .await\n\n .is_ok()\n", "file_path": "src/service.rs", "rank": 30, "score": 24113.565283196836 }, { "content": " {\n\n // Channel might have been closed straight after we have sent the message,\n\n // so be careful.\n\n if rx.await.is_err() {\n\n tracing::warn!(\"Heartbeat loop already stopped\");\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// Unregister the service and free the service descriptor.\n\n async fn unregister(self) {\n\n self.descriptor.lock().await.take();\n\n }\n\n}\n", "file_path": "src/service.rs", "rank": 31, "score": 24113.473312779315 }, { "content": "use crate::{common::ServiceKeyPathBuilder, Service, ServiceError, ServiceRegistry};\n\n\n\n#[derive(Default, Debug)]\n\npub struct ServiceRegistrySearchRequest {\n\n service: Option<String>,\n\n instance_id: Option<String>,\n\n}\n\n\n\nimpl ServiceRegistrySearchRequest {\n\n pub fn with_service(mut self, name: impl Into<String>) -> Self {\n\n self.service = Some(name.into());\n\n self\n\n }\n\n\n\n pub fn with_instance_id(mut self, instance_id: impl Into<String>) -> Self {\n\n self.instance_id = Some(instance_id.into());\n\n self\n\n }\n\n}\n\n\n", "file_path": "src/search.rs", "rank": 35, "score": 26.81761138257808 }, { "content": "}\n\n\n\n#[derive(Debug)]\n\npub enum HeartbeatStatus {\n\n Lost,\n\n Restored,\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct ServiceKeyPathBuilder {\n\n service: Option<String>,\n\n instance_id: Option<String>,\n\n}\n\n\n\nimpl ServiceKeyPathBuilder {\n\n pub fn new(service: impl Into<String>, instance_id: impl Into<String>) -> Self {\n\n Self {\n\n service: Some(service.into()),\n\n instance_id: Some(instance_id.into()),\n\n }\n", "file_path": "src/common.rs", "rank": 38, "score": 24.21278263102186 }, { "content": " }\n\n\n\n pub fn with_service(mut self, service: impl Into<String>) -> Self {\n\n self.service = Some(service.into());\n\n self\n\n }\n\n\n\n pub fn with_service_instance(mut self, instance_id: impl Into<String>) -> Self {\n\n self.instance_id = Some(instance_id.into());\n\n self\n\n }\n\n\n\n pub fn build(self) -> Result<String, ServiceError> {\n\n match (self.service, self.instance_id) {\n\n (Some(s), Some(i)) => Ok(format!(\"{}/{}/{}\", SERVICES_NAME_DOMAIN, s, i)),\n\n (Some(s), None) => Ok(format!(\"{}/{}\", SERVICES_NAME_DOMAIN, s)),\n\n (None, Some(_)) => Err(ServiceError::InvalidArgument {\n\n error: String::from(\"Service name is mandatory for service instance\"),\n\n }),\n\n _ => Ok(String::from(SERVICES_NAME_DOMAIN)),\n\n }\n\n }\n\n}\n", "file_path": "src/common.rs", "rank": 39, "score": 23.30552174088058 }, { "content": " /// Failed to shutdown the service\n\n #[snafu(display(\"Failed to shutdown service {}:{}. Error={}\", service, instance, error))]\n\n ServiceShutdown {\n\n service: String,\n\n instance: String,\n\n error: String,\n\n },\n\n}\n\n\n\npub const DEFAULT_SERVICE_TIMEOUT: i64 = 5;\n\n/// Top-level ETCD key prefix for services.\n\nconst SERVICES_NAME_DOMAIN: &str = \"/mayastor.io/services\";\n\n\n\npub const MAX_MISSED_HEARTBEATS: i64 = 3;\n\n\n\n#[derive(PartialEq, Debug)]\n\npub enum HeartbeatFailurePolicy {\n\n Panic,\n\n Stop,\n\n Restart,\n", "file_path": "src/common.rs", "rank": 44, "score": 19.79640105309533 }, { "content": " pub async fn register_service(\n\n &mut self,\n\n options: ServiceConfig,\n\n ) -> Result<ServiceDescriptor, ServiceError> {\n\n // Create the lease with heartbeat TTL.\n\n let lease = self\n\n .kv_store\n\n .grant_lease(MAX_MISSED_HEARTBEATS * options.heartbeat_interval())\n\n .await\n\n .context(ServiceRegister {\n\n service: options.name().to_string(),\n\n instance: options.instance_id().to_string(),\n\n })?;\n\n\n\n // Populate service instance in object store.\n\n let service_key = ServiceKeyPathBuilder::new(options.name(), options.instance_id())\n\n .build()\n\n .unwrap();\n\n\n\n let service = Service::new(options);\n", "file_path": "src/registry.rs", "rank": 45, "score": 17.521475819157732 }, { "content": "use crate::{\n\n common::{ServiceKeyPathBuilder, ServiceRegister, MAX_MISSED_HEARTBEATS},\n\n store::KeyValueStore,\n\n Service, ServiceConfig, ServiceDescriptor, ServiceError, ServiceWatcher,\n\n};\n\nuse snafu::ResultExt;\n\n\n\npub struct ServiceRegistry {\n\n pub(crate) kv_store: KeyValueStore,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ServiceRegistryOptions {\n\n endpoints: Vec<String>,\n\n}\n\n\n\n#[derive(Default, Debug)]\n\npub struct ServiceRegistryOptionsBuilder {\n\n endpoints: Option<Vec<String>>,\n\n}\n", "file_path": "src/registry.rs", "rank": 46, "score": 16.007021524666136 }, { "content": "\n\nimpl ServiceRegistryOptionsBuilder {\n\n pub fn build(mut self) -> ServiceRegistryOptions {\n\n let endpoints = self\n\n .endpoints\n\n .take()\n\n .expect(\"ETCD endpoints must be configured\");\n\n\n\n ServiceRegistryOptions { endpoints }\n\n }\n\n\n\n pub fn with_endpoints<E: AsRef<str>, S: AsRef<[E]>>(mut self, endpoints: S) -> Self {\n\n let eps: Vec<String> = endpoints\n\n .as_ref()\n\n .iter()\n\n .map(|e| e.as_ref().to_string())\n\n .collect();\n\n self.endpoints = Some(eps);\n\n self\n\n }\n", "file_path": "src/registry.rs", "rank": 47, "score": 15.74008443208076 }, { "content": "use crate::store::common::{LeaseKeepAlive, LeaseRevoke, StoreError};\n\nuse etcd_client::{LeaseClient, LeaseKeepAliveStream, LeaseKeeper};\n\nuse snafu::ResultExt;\n\nuse std::fmt;\n\n\n\npub struct TimedLease {\n\n id: i64,\n\n client: LeaseClient,\n\n keeper: LeaseKeeper,\n\n response_stream: LeaseKeepAliveStream,\n\n ttl: i64,\n\n}\n\n\n\nimpl fmt::Debug for TimedLease {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n f.debug_struct(\"TimedLease\")\n\n .field(\"id\", &self.id)\n\n .field(\"ttl\", &self.ttl)\n\n .finish()\n\n }\n", "file_path": "src/store/lease.rs", "rank": 48, "score": 15.308239134699058 }, { "content": "use crate::{\n\n common::{ServiceDeserialize, WatchService},\n\n store::{StoreWatchEvent, StoreWatcher},\n\n Service, ServiceError,\n\n};\n\nuse snafu::ResultExt;\n\nuse std::{collections::VecDeque, convert::TryFrom};\n\n\n\npub struct ServiceWatcher {\n\n watcher: StoreWatcher,\n\n active: bool,\n\n pending_events: VecDeque<StoreWatchEvent>,\n\n}\n\n\n\nimpl ServiceWatcher {\n\n pub(crate) fn new(watcher: StoreWatcher) -> Self {\n\n Self {\n\n watcher,\n\n active: true,\n\n pending_events: VecDeque::new(),\n", "file_path": "src/watcher.rs", "rank": 49, "score": 15.110855666280052 }, { "content": "use crate::store::StoreWatchEvent;\n\nuse etcd_client::{EventType, KeyValue, WatchStream};\n\nuse serde_json::Value;\n\nuse snafu::ResultExt;\n\n\n\nuse super::StoreError;\n\nuse crate::store::common::{KeyXform, SerializeValue};\n\n\n\npub struct StoreWatcher {\n\n key: String,\n\n watch_stream: WatchStream,\n\n}\n\n\n\nimpl StoreWatcher {\n\n pub fn new(key: String, watch_stream: WatchStream) -> Self {\n\n Self { key, watch_stream }\n\n }\n\n\n\n pub async fn watch(&mut self) -> Result<Vec<StoreWatchEvent>, StoreError> {\n\n loop {\n", "file_path": "src/store/watcher.rs", "rank": 50, "score": 14.549666494195424 }, { "content": "use composer::{Builder as ComposeBuilder, ComposeTest, ContainerSpec};\n\nuse std::collections::HashMap;\n\nuse tempfile::tempdir;\n\n\n\npub struct EtcdCluster {\n\n compose_test: ComposeTest,\n\n nodes: Vec<EtcdNode>,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct EtcdNode {\n\n pub id: u32,\n\n pub name: String,\n\n pub ip_address: String,\n\n}\n\n\n\npub struct Builder {\n\n num_nodes: u32,\n\n}\n\n\n\nconst ETCD_IMAGE: &str = \"quay.io/coreos/etcd:v3.5.1\";\n\nconst PORT_RANGE_START: u32 = 32379; //55379;\n\n\n", "file_path": "tests/common/etcd.rs", "rank": 51, "score": 13.780606824431286 }, { "content": "use crate::store::StoreError;\n\nuse snafu::Snafu;\n\n\n\n#[derive(Debug, Snafu)]\n\n#[snafu(visibility = \"pub\")]\n\npub enum ServiceError {\n\n /// Service does not exist.\n\n #[snafu(display(\"Service {} not found\", service))]\n\n NotFound { service: String },\n\n\n\n /// Can't open a connection with the underlying key-value store.\n\n #[snafu(display(\"Can't connect to key-value store: {}\", source))]\n\n StoreInitialize {\n\n source: etcd_client::Error,\n\n e: String,\n\n },\n\n\n\n /// Can't register new service\n\n #[snafu(display(\n\n \"Failed to register service {} with instance {}. Error='{}'\",\n", "file_path": "src/common.rs", "rank": 52, "score": 13.743714980289319 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ServiceEvent {\n\n ServiceAdded { service: Service },\n\n ServiceRemoved { key: String },\n\n}\n\n\n\nimpl TryFrom<StoreWatchEvent> for ServiceEvent {\n\n type Error = ServiceError;\n\n\n\n fn try_from(e: StoreWatchEvent) -> Result<Self, Self::Error> {\n\n match e {\n\n StoreWatchEvent::Put { key: _, value } => {\n\n let service =\n\n serde_json::from_value::<Service>(value).context(ServiceDeserialize {})?;\n\n Ok(ServiceEvent::ServiceAdded { service })\n\n }\n", "file_path": "src/watcher.rs", "rank": 53, "score": 13.585849896486723 }, { "content": "}\n\n\n\nimpl TimedLease {\n\n pub async fn new(id: i64, ttl: i64, mut client: LeaseClient) -> Result<Self, StoreError> {\n\n let (keeper, response_stream) = client\n\n .keep_alive(id)\n\n .await\n\n .context(LeaseKeepAlive { lease: id })?;\n\n\n\n Ok(Self {\n\n id,\n\n ttl,\n\n client,\n\n keeper,\n\n response_stream,\n\n })\n\n }\n\n\n\n pub fn id(&self) -> i64 {\n\n self.id\n", "file_path": "src/store/lease.rs", "rank": 54, "score": 12.957484988245735 }, { "content": " for _i in 1..7 {\n\n tokio::time::sleep(std::time::Duration::from_secs(1)).await;\n\n }\n\n Ok(())\n\n }\n\n\n\n // Helper to stop ETCD node.\n\n async fn stop_node(mut cluster: EtcdCluster) -> EtcdCluster {\n\n tokio::time::sleep(std::time::Duration::from_secs(2)).await;\n\n cluster.stop_node(0).await;\n\n cluster\n\n }\n\n\n\n let service_config = ServiceConfig::builder()\n\n .with_name(SERVICE_NAME)\n\n .with_instance_id(\"i1\")\n\n .with_endpoints([\"http://1.2.3.4:8080\"])\n\n .with_heartbeat_interval(1)\n\n .build();\n\n\n", "file_path": "tests/faults.rs", "rank": 55, "score": 12.89523028992802 }, { "content": "}\n\n\n\nimpl ServiceRegistryOptions {\n\n pub fn builder() -> ServiceRegistryOptionsBuilder {\n\n ServiceRegistryOptionsBuilder::default()\n\n }\n\n}\n\n\n\n/// Main Service Registry abstraction. Represents a set of services identified by\n\n/// the (name, instance) tuple.\n\n/// Allows registering services, searching for services and watching for added/removed\n\n/// services.\n\nimpl ServiceRegistry {\n\n /// Get service registry instance.\n\n pub async fn new(options: ServiceRegistryOptions) -> Result<Self, ServiceError> {\n\n let kv_store = KeyValueStore::new(&options.endpoints).await.unwrap();\n\n\n\n Ok(Self { kv_store })\n\n }\n\n\n", "file_path": "src/registry.rs", "rank": 56, "score": 12.754613260621404 }, { "content": " source: StoreError,\n\n },\n\n\n\n /// Heartbeat lost.\n\n #[snafu(display(\"Service {}:{} lost heartbeat\", service, instance,))]\n\n HeartbeatLost { service: String, instance: String },\n\n\n\n /// Can't send heartbeat.\n\n #[snafu(display(\n\n \"Service {}:{} failed to send heartbeat, error='{}'\",\n\n service,\n\n instance,\n\n source\n\n ))]\n\n HeartbeatError {\n\n service: String,\n\n instance: String,\n\n source: StoreError,\n\n },\n\n\n", "file_path": "src/common.rs", "rank": 57, "score": 12.30204910964045 }, { "content": "use etcd_client::{Client, GetOptions, PutOptions, WatchOptions};\n\n\n\nuse crate::store::{\n\n common::{Connect, GetPrefix, LeaseGrant, Put, SerializeValue, WatchCreate},\n\n StoreError, StoreWatcher, TimedLease,\n\n};\n\nuse serde::Serialize;\n\nuse serde_json::Value;\n\nuse snafu::ResultExt;\n\n\n\npub struct KeyValueStore {\n\n client: Client,\n\n}\n\n\n\n/// Abstraction for a key-value store that allows storing/loading JSON-encoded values\n\n/// and supports events watching.\n\nimpl KeyValueStore {\n\n pub async fn new<E: AsRef<str>, S: AsRef<[E]>>(endpoints: S) -> Result<Self, StoreError> {\n\n let client = Client::connect(endpoints, None).await.context(Connect {})?;\n\n Ok(Self { client })\n", "file_path": "src/store/store.rs", "rank": 58, "score": 12.19039220400877 }, { "content": "pub mod etcd;\n\n\n\nuse dynservice::ServiceRegistryOptions;\n\nuse etcd::EtcdCluster;\n\n\n\nimpl EtcdCluster {\n\n // Helper function to generate ServiceRegistry config for this ETCD cluster.\n\n pub fn as_service_registry_options(&self) -> ServiceRegistryOptions {\n\n let etcd_endpoints = self\n\n .get_nodes()\n\n .into_iter()\n\n .map(|n| n.ip_address)\n\n .collect::<Vec<_>>();\n\n\n\n ServiceRegistryOptions::builder()\n\n .with_endpoints(&etcd_endpoints)\n\n .build()\n\n }\n\n}\n", "file_path": "tests/common/mod.rs", "rank": 59, "score": 12.068686902163298 }, { "content": "impl ServiceRegistry {\n\n pub async fn search(\n\n &mut self,\n\n req: ServiceRegistrySearchRequest,\n\n ) -> Result<Vec<Service>, ServiceError> {\n\n let mut builder = ServiceKeyPathBuilder::default();\n\n\n\n if let Some(service) = req.service {\n\n builder = builder.with_service(service);\n\n }\n\n\n\n if let Some(instance_id) = req.instance_id {\n\n builder = builder.with_service_instance(instance_id);\n\n }\n\n\n\n let path = builder.build()?;\n\n let res =\n\n self.kv_store\n\n .get_prefix(path)\n\n .await\n", "file_path": "src/search.rs", "rank": 60, "score": 11.790263780632305 }, { "content": " // Service entrypoint that simulates some short work.\n\n async fn service_main() -> Result<(), String> {\n\n MAIN_CALLS.fetch_add(1, Ordering::SeqCst);\n\n for _i in 1..3 {\n\n tokio::time::sleep(std::time::Duration::from_secs(1)).await;\n\n }\n\n Ok(())\n\n }\n\n\n\n // Service initializer.\n\n async fn service_init() -> Result<(), String> {\n\n INIT_CALLS.fetch_add(1, Ordering::SeqCst);\n\n Ok(())\n\n }\n\n\n\n // Start a service and check that entrypoint is called.\n\n let service_config = ServiceConfig::builder()\n\n .with_name(SERVICE_NAME)\n\n .with_instance_id(\"i1\")\n\n .with_endpoints([\"http://1.2.3.4:8080\"])\n", "file_path": "tests/generic.rs", "rank": 61, "score": 11.775902988271447 }, { "content": "mod common;\n\nmod coreobjs;\n\nmod registry;\n\nmod search;\n\npub mod service;\n\nmod store;\n\nmod watcher;\n\n\n\npub use coreobjs::{Service, ServiceConfig, ServiceConfigBuilder, ServiceDescriptor};\n\n\n\npub use registry::{ServiceRegistry, ServiceRegistryOptions};\n\n\n\npub use search::ServiceRegistrySearchRequest;\n\n\n\npub use watcher::{ServiceEvent, ServiceWatcher};\n\n\n\npub use common::{HeartbeatFailurePolicy, HeartbeatStatus, ServiceError};\n", "file_path": "src/lib.rs", "rank": 62, "score": 11.387592888086852 }, { "content": " Ok(())\n\n }\n\n\n\n // Service entrypoint that simulates some short work.\n\n async fn service_shutdown() -> Result<(), String> {\n\n SHUTDOWN_CALLS.fetch_add(1, Ordering::SeqCst);\n\n Ok(())\n\n }\n\n\n\n // Helper to stop ETCD cluster.\n\n async fn stop_cluster(mut cluster: EtcdCluster) -> EtcdCluster {\n\n tokio::time::sleep(std::time::Duration::from_secs(2)).await;\n\n cluster.stop().await;\n\n cluster\n\n }\n\n\n\n let service_config = ServiceConfig::builder()\n\n .with_name(SERVICE_NAME)\n\n .with_instance_id(\"i1\")\n\n .with_endpoints([\"http://1.2.3.4:8080\"])\n", "file_path": "tests/faults.rs", "rank": 63, "score": 10.98786332072234 }, { "content": "\n\n tokio::time::sleep(std::time::Duration::from_secs(1)).await;\n\n\n\n let opts = ServiceConfig::builder()\n\n .with_name(SERVICE_NAME)\n\n .with_instance_id(\"i1\")\n\n .with_endpioints([\"http://1.2.3.4:8080\"])\n\n .build();\n\n println!(\"{:?}\", opts);\n\n let mut service_descriptor = registry.register_service(opts).await.unwrap();\n\n println!(\"Initialized !\");\n\n\n\n // Start heartbeat loop.\n\n let h = tokio::spawn(async move {\n\n for _i in 1..5 {\n\n println!(\"*\");\n\n service_descriptor\n\n .send_heartbeat()\n\n .await\n\n .expect(\"Heartbeat update failed\");\n", "file_path": "test_app.rs", "rank": 64, "score": 10.873210888521355 }, { "content": "use dynservice::{service::Builder, HeartbeatFailurePolicy, ServiceConfig};\n\nuse std::sync::atomic::{AtomicU64, Ordering};\n\n\n\nmod common;\n\nuse common::etcd::EtcdCluster;\n\n\n\nconst SERVICE_NAME: &str = \"test_service\";\n\n\n\nasync fn get_etcd_cluster() -> EtcdCluster {\n\n let mut cluster = common::etcd::Builder::new(3).build().await;\n\n cluster.start().await;\n\n cluster\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_etcd_node_failure() {\n\n let cluster = get_etcd_cluster().await;\n\n\n\n // Service entrypoint that simulates some short work.\n\n async fn service_main() -> Result<(), String> {\n", "file_path": "tests/faults.rs", "rank": 65, "score": 10.789877409048534 }, { "content": " tokio::time::sleep(std::time::Duration::from_secs(2)).await;\n\n }\n\n println!(\"> Unregistering service\");\n\n service_descriptor\n\n .unregister()\n\n .await\n\n .expect(\"Faield to unregister service\");\n\n });\n\n\n\n // Search the registry and lookup our service.\n\n let req = ServiceRegistrySearchRequest::default().with_service(SERVICE_NAME);\n\n let mut services = registry.search(req).await.unwrap();\n\n assert_eq!(services.len(), 1, \"No registered service found\");\n\n let service = services.get(0).unwrap();\n\n assert_eq!(service.name(), SERVICE_NAME, \"Service name mismatches\");\n\n\n\n // Wait for the heartbeat loop to complete and check if the service is removed.\n\n println!(\"* Waiting for heartbeat loop to complete\");\n\n h.await.unwrap();\n\n println!(\"* Heartbeat loop completed\");\n\n let req = ServiceRegistrySearchRequest::default().with_service(SERVICE_NAME);\n\n services = registry.search(req).await.unwrap();\n\n assert!(services.is_empty(), \"Service still exists after removal\");\n\n}\n", "file_path": "test_app.rs", "rank": 66, "score": 10.203989441934748 }, { "content": "use serde_json::Value;\n\nuse snafu::Snafu;\n\n\n\n#[derive(Snafu, Debug)]\n\n#[snafu(visibility = \"pub\")]\n\npub enum StoreError {\n\n /// Failed to connect to object store.\n\n #[snafu(display(\"Failed to connect to object store store. Error={}\", source))]\n\n Connect { source: etcd_client::Error },\n\n\n\n /// Failed to serialize value to JSON.\n\n #[snafu(display(\"Failed to serialize value. Error={}\", source))]\n\n SerializeValue { source: serde_json::Error },\n\n\n\n /// Failed to transform the key into string.\n\n #[snafu(display(\"Failed to transform key into string\"))]\n\n KeyXform { source: etcd_client::Error },\n\n\n\n /// Failed to store value.\n\n #[snafu(display(\n", "file_path": "src/store/common.rs", "rank": 67, "score": 10.126693900025806 }, { "content": " .with_heartbeat_interval(1)\n\n .build();\n\n\n\n let service = Builder::new(service_config, cluster.as_service_registry_options())\n\n .with_entrypoint(service_main())\n\n .with_shutdown_handler(service_shutdown())\n\n .with_heartbeat_failure_policy(HeartbeatFailurePolicy::Stop)\n\n .build();\n\n\n\n let (res, _) = tokio::join!(service.start(), stop_cluster(cluster),);\n\n\n\n // Service should complete gracefully and shutdown handler must have been called.\n\n res.expect(\"Service did not complete gracefully after ETCD cluster failure\");\n\n\n\n assert_eq!(\n\n 1,\n\n SHUTDOWN_CALLS.load(Ordering::Relaxed),\n\n \"Incorrect number of service entry point invocations\"\n\n );\n\n}\n", "file_path": "tests/faults.rs", "rank": 68, "score": 10.066817731223171 }, { "content": " }\n\n\n\n pub async fn put<V: Serialize>(\n\n &mut self,\n\n key: impl Into<String>,\n\n value: &V,\n\n lease: &TimedLease,\n\n ) -> Result<(), StoreError> {\n\n let val = serde_json::to_value(value).context(SerializeValue {})?;\n\n let payload = serde_json::to_vec(&val).context(SerializeValue {})?;\n\n let options = PutOptions::new().with_lease(lease.id());\n\n let key = key.into();\n\n\n\n self.client\n\n .put(key.to_string(), payload, Some(options))\n\n .await\n\n .context(Put {\n\n key,\n\n value: serde_json::to_string(&val).context(SerializeValue {})?,\n\n })?;\n", "file_path": "src/store/store.rs", "rank": 69, "score": 9.956879345609433 }, { "content": "use dynservice::{\n\n ServiceConfig, ServiceEvent, ServiceRegistry, ServiceRegistryOptions,\n\n ServiceRegistrySearchRequest,\n\n};\n\n\n\nconst SERVICE_NAME: &str = \"testService\";\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n println!(\"Hello, world!\");\n\n let options = ServiceRegistryOptions::builder()\n\n .with_endpoints([\"localhost:32379\"])\n\n .build();\n\n\n\n println!(\"Opening Service Registry\");\n\n let mut registry = ServiceRegistry::new(options)\n\n .await\n\n .expect(\"Failed to initialize Service Registry\");\n\n println!(\"Registry opened !!!\");\n\n\n", "file_path": "test_app.rs", "rank": 70, "score": 9.62462840455009 }, { "content": " service,\n\n instance,\n\n source\n\n ))]\n\n ServiceRegister {\n\n service: String,\n\n instance: String,\n\n source: StoreError,\n\n },\n\n\n\n /// Can't unregister existing service\n\n #[snafu(display(\n\n \"Failed to unregister service {} with instance {}. Error='{}'\",\n\n service,\n\n instance,\n\n source\n\n ))]\n\n ServiceUnregister {\n\n service: String,\n\n instance: String,\n", "file_path": "src/common.rs", "rank": 71, "score": 9.548443923306674 }, { "content": " // Start watcher loop.\n\n let mut service_watcher = registry.watch().await.unwrap();\n\n tokio::spawn(async move {\n\n println!(\"-> Watching for registry events...\");\n\n loop {\n\n let event = service_watcher.watch().await.unwrap();\n\n match event {\n\n ServiceEvent::ServiceAdded { service } => {\n\n println!(\n\n \"# Service instance registered: {}/{}\",\n\n service.name(),\n\n service.instance_id()\n\n )\n\n }\n\n ServiceEvent::ServiceRemoved { key } => {\n\n println!(\"# Service unergistered: {}\", key);\n\n }\n\n }\n\n }\n\n });\n", "file_path": "test_app.rs", "rank": 72, "score": 8.658678814330042 }, { "content": " .start(&node.name)\n\n .await\n\n .expect(\"Failed to start ETCD node\");\n\n }\n\n\n\n // Wait till all ETCD nodes are up and running.\n\n for node in self.nodes.iter() {\n\n self.wait_node(node).await;\n\n }\n\n }\n\n\n\n pub async fn stop(&mut self) {\n\n // Stop all containers.\n\n for n in self.nodes.clone().iter() {\n\n self.stop_node(n.id).await;\n\n }\n\n }\n\n\n\n pub async fn stop_node(&mut self, node_id: u32) {\n\n assert!(\n", "file_path": "tests/common/etcd.rs", "rank": 73, "score": 8.365921043669195 }, { "content": " Ok(_) => return,\n\n Err(_) => {\n\n // In case of error sleep for 1 sec.\n\n tokio::time::sleep(tokio::time::Duration::from_secs(1)).await;\n\n }\n\n }\n\n }\n\n // Panic if the node didn't start.\n\n panic!(\"ETCD node {} did not start within 5 seconds\", node.name);\n\n }\n\n\n\n pub async fn start_node(&mut self, node_id: u32) {\n\n assert!(\n\n (node_id as usize) < self.nodes.len(),\n\n \"Unknown ETCD node ID: {}\",\n\n node_id\n\n );\n\n\n\n let node = &self.nodes[node_id as usize];\n\n\n", "file_path": "tests/common/etcd.rs", "rank": 74, "score": 7.9949470396357345 }, { "content": "use dynservice::{service::Builder, ServiceConfig};\n\nuse std::sync::atomic::{AtomicU64, Ordering};\n\nmod common;\n\nuse common::etcd::EtcdCluster;\n\n\n\nconst SERVICE_NAME: &str = \"test_service\";\n\n\n\nasync fn get_etcd_cluster() -> EtcdCluster {\n\n let mut cluster = common::etcd::Builder::new(3).build().await;\n\n cluster.start().await;\n\n cluster\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_service_registration() {\n\n let cluster = get_etcd_cluster().await;\n\n\n\n static MAIN_CALLS: AtomicU64 = AtomicU64::new(0);\n\n static INIT_CALLS: AtomicU64 = AtomicU64::new(0);\n\n\n", "file_path": "tests/generic.rs", "rank": 75, "score": 7.891549956751203 }, { "content": " StoreWatchEvent::Delete { key } => Ok(ServiceEvent::ServiceRemoved { key }),\n\n }\n\n }\n\n}\n\n\n\nimpl ServiceWatcher {\n\n pub async fn watch(&mut self) -> Result<ServiceEvent, ServiceError> {\n\n // Check if there are any pending events to be delivered.\n\n if !self.pending_events.is_empty() {\n\n let watch_event = self.pending_events.pop_front().unwrap();\n\n return ServiceEvent::try_from(watch_event);\n\n }\n\n\n\n // Check if the watcher is still active.\n\n if !self.active {\n\n return Err(ServiceError::WatchStreamClosed {\n\n key: self.watcher.key().to_string(),\n\n });\n\n }\n\n\n", "file_path": "src/watcher.rs", "rank": 76, "score": 7.293018066451876 }, { "content": " #[snafu(display(\"Lease {} lost\", lease))]\n\n LeaseLost { lease: i64 },\n\n\n\n /// Invalid lease TTL.\n\n #[snafu(display(\"Invalid lease TTL: {}\", ttl))]\n\n InvalidLeaseTTL { ttl: i64 },\n\n\n\n /// Failed to create a key watcher.\n\n #[snafu(display(\"Failed to create watcher for a key '{}'. Error={}\", key, source))]\n\n WatchCreate {\n\n key: String,\n\n source: etcd_client::Error,\n\n },\n\n\n\n /// Error occurred during event watching.\n\n #[snafu(display(\"Failed to watch events for a key '{}'. Error={}\", key, source))]\n\n Watch {\n\n key: String,\n\n source: etcd_client::Error,\n\n },\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum StoreWatchEvent {\n\n Put { key: String, value: Value },\n\n Delete { key: String },\n\n}\n", "file_path": "src/store/common.rs", "rank": 77, "score": 7.15173293595392 }, { "content": " .map_err(|_e| ServiceError::RegistryRead {\n\n error: String::from(\"Failed to search registry\"),\n\n })?;\n\n\n\n let mut services: Vec<Service> = Vec::new();\n\n for s in res {\n\n match serde_json::from_value::<Service>(s) {\n\n Ok(service) => {\n\n services.push(service);\n\n }\n\n Err(e) => {\n\n log::warn!(\"Failed to deserialize service: {}\", e);\n\n }\n\n }\n\n }\n\n\n\n Ok(services)\n\n }\n\n}\n", "file_path": "src/search.rs", "rank": 78, "score": 6.9240610541911884 }, { "content": " Ok(())\n\n }\n\n\n\n pub async fn grant_lease(&mut self, ttl: i64) -> Result<TimedLease, StoreError> {\n\n if ttl <= 0 {\n\n return Err(StoreError::InvalidLeaseTTL { ttl });\n\n }\n\n\n\n let lease = self\n\n .client\n\n .lease_grant(ttl, None)\n\n .await\n\n .context(LeaseGrant { ttl })?;\n\n\n\n TimedLease::new(lease.id(), ttl, self.client.lease_client()).await\n\n }\n\n\n\n pub async fn get_prefix(&mut self, prefix: String) -> Result<Vec<Value>, StoreError> {\n\n let opts = GetOptions::new().with_prefix();\n\n\n", "file_path": "src/store/store.rs", "rank": 79, "score": 6.57177848552897 }, { "content": " }\n\n\n\n pub fn ttl(&self) -> i64 {\n\n self.ttl\n\n }\n\n\n\n pub async fn keep_alive(&mut self) -> Result<(), StoreError> {\n\n // Phase 1: update lease keep-alive.\n\n self.keeper\n\n .keep_alive()\n\n .await\n\n .context(LeaseKeepAlive { lease: self.id })?;\n\n\n\n // Phase 2: Check the response stream to validate if the lease stil exists.\n\n let m = self\n\n .response_stream\n\n .message()\n\n .await\n\n .context(LeaseKeepAlive { lease: self.id })?;\n\n\n", "file_path": "src/store/lease.rs", "rank": 80, "score": 6.499339786328986 }, { "content": "mod common;\n\nmod lease;\n\nmod store;\n\nmod watcher;\n\n\n\npub use common::{StoreError, StoreWatchEvent};\n\npub use lease::TimedLease;\n\npub use store::KeyValueStore;\n\npub use watcher::StoreWatcher;\n", "file_path": "src/store/mod.rs", "rank": 81, "score": 6.4173087572129415 }, { "content": " /// Invalid argument passed.\n\n #[snafu(display(\"Invalid argument: {}\", error))]\n\n InvalidArgument { error: String },\n\n\n\n /// Failed to read entried from the registry.\n\n #[snafu(display(\"Failed to read from registry: {}\", error))]\n\n RegistryRead { error: String },\n\n\n\n /// Failed to instantinate service from JSON object.\n\n #[snafu(display(\"Failed to deseriazlize service from JSON. Error={}\", source))]\n\n ServiceDeserialize { source: serde_json::Error },\n\n\n\n /// Error while watching the services.\n\n #[snafu(display(\"Failed to watch for service events on key {}. Error={}\", key, source))]\n\n WatchService { key: String, source: StoreError },\n\n\n\n /// Watch stream closed, no more events can be received.\n\n #[snafu(display(\"Watch stream for key '{}' is closed\", key))]\n\n WatchStreamClosed { key: String },\n\n\n", "file_path": "src/common.rs", "rank": 82, "score": 6.354516158710603 }, { "content": " if let Some(kv) = e.kv() {\n\n match parse_kv(kv, false) {\n\n Ok((key, _)) => events.push(StoreWatchEvent::Delete { key }),\n\n Err(e) => {\n\n tracing::error!(\n\n \"Failed to deserialize value in DELETE event: {}\",\n\n e\n\n );\n\n }\n\n };\n\n }\n\n }\n\n }\n\n }\n\n\n\n // Report at least one successfully received event.\n\n if !events.is_empty() {\n\n return Ok(events);\n\n }\n\n }\n\n }\n\n\n\n pub fn key(&self) -> &str {\n\n &self.key\n\n }\n\n}\n\n\n", "file_path": "src/store/watcher.rs", "rank": 83, "score": 5.66548187630281 }, { "content": " .build();\n\n\n\n let service = Builder::new(service_config, cluster.as_service_registry_options())\n\n .with_initializer(service_init())\n\n .with_entrypoint(service_main())\n\n .build();\n\n\n\n service.start().await.unwrap();\n\n\n\n // Make sure main routine and initializer were called.\n\n assert_eq!(\n\n 1,\n\n MAIN_CALLS.load(Ordering::Relaxed),\n\n \"Incorrect number of service entry point invocations\"\n\n );\n\n\n\n assert_eq!(\n\n 1,\n\n INIT_CALLS.load(Ordering::Relaxed),\n\n \"Incorrect number of service initializer invocations\"\n\n );\n\n}\n", "file_path": "tests/generic.rs", "rank": 84, "score": 5.398081435186729 }, { "content": " // Check lease's current TTL.\n\n if let Some(resp) = m {\n\n if resp.ttl() == 0 {\n\n return Err(StoreError::LeaseLost { lease: self.id });\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub async fn revoke(&mut self) -> Result<(), StoreError> {\n\n self.client\n\n .revoke(self.id)\n\n .await\n\n .context(LeaseRevoke { lease: self.id })?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/store/lease.rs", "rank": 85, "score": 4.865522364876213 }, { "content": " self.kv_store\n\n .put(service_key, &service, &lease)\n\n .await\n\n .unwrap();\n\n\n\n Ok(ServiceDescriptor::new(&service, lease))\n\n }\n\n\n\n pub async fn watch(&mut self) -> Result<ServiceWatcher, ServiceError> {\n\n // Watch all services.\n\n let key = ServiceKeyPathBuilder::default().build().unwrap();\n\n let watcher = self.kv_store.get_watcher(key, true).await.unwrap();\n\n Ok(ServiceWatcher::new(watcher))\n\n }\n\n}\n", "file_path": "src/registry.rs", "rank": 86, "score": 4.547433862369543 }, { "content": " let response = self\n\n .client\n\n .get(prefix.to_string(), Some(opts))\n\n .await\n\n .context(GetPrefix {\n\n prefix: prefix.to_string(),\n\n })?;\n\n\n\n let mut values: Vec<Value> = Vec::new();\n\n for kv in response.kvs() {\n\n match serde_json::from_slice(kv.value()) {\n\n Ok(v) => {\n\n values.push(v);\n\n }\n\n Err(e) => {\n\n tracing::error!(\"Failed to deserialize object. Error = {}\", e);\n\n }\n\n };\n\n }\n\n\n", "file_path": "src/store/store.rs", "rank": 87, "score": 4.461981249772624 }, { "content": " let mut watch_events = self.watcher.watch().await.context(WatchService {\n\n key: self.watcher.key().to_string(),\n\n })?;\n\n\n\n // Check whether watch stream is cancelled.\n\n if watch_events.is_empty() {\n\n tracing::warn!(\n\n \"Store watcher's stream is cancelled, no more service events can be received\"\n\n );\n\n self.active = false;\n\n return Err(ServiceError::WatchStreamClosed {\n\n key: self.watcher.key().to_string(),\n\n });\n\n }\n\n\n\n // In case there are more than 1 event observed, enqueue the remaining events.\n\n if watch_events.len() > 1 {\n\n watch_events.drain(1..watch_events.len()).for_each(|w| {\n\n self.pending_events.push_back(w);\n\n })\n\n }\n\n\n\n let watch_event = watch_events.remove(0);\n\n Ok(ServiceEvent::try_from(watch_event)?)\n\n }\n\n}\n", "file_path": "src/watcher.rs", "rank": 88, "score": 4.237339793414023 }, { "content": " (node_id as usize) < self.nodes.len(),\n\n \"Unknown ETCD node ID: {}\",\n\n node_id\n\n );\n\n\n\n self.compose_test\n\n .stop(&self.nodes[node_id as usize].name)\n\n .await\n\n .expect(\"Failed to stop ETCD node\");\n\n }\n\n\n\n async fn wait_node(&self, node: &EtcdNode) {\n\n let mut payload = HashMap::new();\n\n payload.insert(\"key\", \"Zm9v\"); // Zm9v is 'foo' in Base64\n\n let client = reqwest::Client::new();\n\n let a = format!(\"http://{}/v3/kv/range\", node.ip_address);\n\n\n\n for _i in 0..5 {\n\n let u = reqwest::Url::parse(&a).unwrap();\n\n match client.post(u).json(&payload).send().await {\n", "file_path": "tests/common/etcd.rs", "rank": 89, "score": 4.1951703311356585 }, { "content": " let service = Builder::new(service_config, cluster.as_service_registry_options())\n\n .with_entrypoint(service_main())\n\n .build();\n\n\n\n let (res, _) = tokio::join!(service.start(), stop_node(cluster),);\n\n\n\n res.expect(\"Service did not complete gracefully after ETCD node failure\");\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_etcd_cluster_failure() {\n\n let cluster = get_etcd_cluster().await;\n\n\n\n static SHUTDOWN_CALLS: AtomicU64 = AtomicU64::new(0);\n\n\n\n // Service entrypoint that simulates some short work.\n\n async fn service_main() -> Result<(), String> {\n\n for _i in 1..10 {\n\n tokio::time::sleep(std::time::Duration::from_secs(1)).await;\n\n }\n", "file_path": "tests/faults.rs", "rank": 90, "score": 4.14717795854456 }, { "content": " let mut builder = ComposeBuilder::new();\n\n let mut nodes: Vec<EtcdNode> = Vec::with_capacity(self.num_nodes as usize);\n\n builder = builder.autorun(false);\n\n\n\n let mut port_num = PORT_RANGE_START;\n\n\n\n // Calculate overall cluster topology before configuring ETCD nodes.\n\n let initial_cluster = (1..self.num_nodes + 1)\n\n .map(|n| format!(\"node{}=http://10.1.0.{}:2380\", n, n + 1))\n\n .collect::<Vec<String>>()\n\n .join(\",\");\n\n\n\n for n in 1..self.num_nodes + 1 {\n\n let port1 = port_num;\n\n let port2 = port_num + 1;\n\n let node_name = format!(\"node{}\", n);\n\n\n\n let mut spec = { ContainerSpec::from_image(&node_name, ETCD_IMAGE) };\n\n\n\n spec = spec.with_portmap(\"2379\", &port1.to_string());\n", "file_path": "tests/common/etcd.rs", "rank": 91, "score": 4.001701400751564 }, { "content": " ip_address: format!(\"127.0.0.1:{}\", port1),\n\n });\n\n\n\n port_num = port_num + 2;\n\n }\n\n\n\n let compose_test = builder.build().await.unwrap();\n\n\n\n EtcdCluster {\n\n compose_test,\n\n nodes,\n\n }\n\n }\n\n}\n\n\n\nimpl EtcdCluster {\n\n pub async fn start(&mut self) {\n\n // Start all containers.\n\n for node in self.nodes.iter() {\n\n self.compose_test\n", "file_path": "tests/common/etcd.rs", "rank": 92, "score": 3.806063992912327 }, { "content": " // Start ETCD container.\n\n self.compose_test\n\n .start(&node.name)\n\n .await\n\n .expect(\"Failed to start ETCD node\");\n\n\n\n // Make sure ETCD node started successfully.\n\n self.wait_node(node).await;\n\n }\n\n\n\n pub fn get_nodes(&self) -> Vec<EtcdNode> {\n\n self.nodes.clone()\n\n }\n\n}\n", "file_path": "tests/common/etcd.rs", "rank": 93, "score": 3.8059038114877795 }, { "content": " n + 1,\n\n n + 1\n\n ));\n\n\n\n spec = spec.with_arg(\"--listen-client-urls\");\n\n spec = spec.with_arg(\"http://0.0.0.0:2379,http://0.0.0.0:4001\");\n\n\n\n spec = spec.with_arg(\"--initial-cluster-token\");\n\n spec = spec.with_arg(\"etcd-test-cluster-1\");\n\n\n\n spec = spec.with_arg(\"--initial-cluster-state\");\n\n spec = spec.with_arg(\"new\");\n\n\n\n spec = spec.with_arg(\"--initial-cluster\");\n\n spec = spec.with_arg(&initial_cluster);\n\n\n\n builder = builder.add_container_spec(spec);\n\n nodes.push(EtcdNode {\n\n id: n - 1,\n\n name: node_name,\n", "file_path": "tests/common/etcd.rs", "rank": 94, "score": 3.7503395166763163 }, { "content": " Ok(values)\n\n }\n\n\n\n pub async fn get_watcher(\n\n &mut self,\n\n key: String,\n\n prefixed_watch: bool,\n\n ) -> Result<StoreWatcher, StoreError> {\n\n let options = if prefixed_watch {\n\n let options = WatchOptions::new().with_prefix();\n\n Some(options)\n\n } else {\n\n None\n\n };\n\n\n\n let (_, stream) = self\n\n .client\n\n .watch(key.clone(), options)\n\n .await\n\n .context(WatchCreate { key: key.clone() })?;\n\n\n\n Ok(StoreWatcher::new(key, stream))\n\n }\n\n}\n", "file_path": "src/store/store.rs", "rank": 95, "score": 3.1935239286595327 }, { "content": " spec = spec.with_portmap(\"2380\", &port2.to_string());\n\n spec = spec.with_cmd(\"/usr/local/bin/etcd\");\n\n //spec = spec.with_bind(\"/tmp/etcd\", \"/etcd-data\");\n\n\n\n spec = spec.with_bind(&get_container_mount(), \"/etcd-data\");\n\n spec = spec.with_arg(\"--data-dir\");\n\n spec = spec.with_arg(\"/etcd-data\");\n\n\n\n spec = spec.with_arg(\"--name\");\n\n spec = spec.with_arg(&node_name);\n\n\n\n spec = spec.with_arg(\"--initial-advertise-peer-urls\");\n\n spec = spec.with_arg(&format!(\"http://10.1.0.{}:2380\", n + 1));\n\n\n\n spec = spec.with_arg(\"--listen-peer-urls\");\n\n spec = spec.with_arg(\"http://0.0.0.0:2380\");\n\n\n\n spec = spec.with_arg(\"--advertise-client-urls\");\n\n spec = spec.with_arg(&format!(\n\n \"http://10.1.0.{}:2379,http://10.1.0.{}:4001\",\n", "file_path": "tests/common/etcd.rs", "rank": 96, "score": 3.131673626473479 }, { "content": " LeaseGrant {\n\n ttl: i64,\n\n source: etcd_client::Error,\n\n },\n\n\n\n /// Failed to update lease keep-alive.\n\n #[snafu(display(\"Failed to update keep alive for lease {}. Error={}\", lease, source))]\n\n LeaseKeepAlive {\n\n lease: i64,\n\n source: etcd_client::Error,\n\n },\n\n\n\n /// Failed to revoke lease.\n\n #[snafu(display(\"Failed to revoke lease {}. Error={}\", lease, source))]\n\n LeaseRevoke {\n\n lease: i64,\n\n source: etcd_client::Error,\n\n },\n\n\n\n /// Lease expired.\n", "file_path": "src/store/common.rs", "rank": 97, "score": 2.8648995005608033 }, { "content": " \"Failed to store entry with key '{}' and value '{}'. Error = {}\",\n\n key,\n\n value,\n\n source,\n\n ))]\n\n Put {\n\n key: String,\n\n value: String,\n\n source: etcd_client::Error,\n\n },\n\n\n\n /// Failed to get value(s).\n\n #[snafu(display(\"Failed to get values for prefix {}. Error={}\", prefix, source))]\n\n GetPrefix {\n\n prefix: String,\n\n source: etcd_client::Error,\n\n },\n\n\n\n /// Failed to grant lease.\n\n #[snafu(display(\"Failed to grant lease with TTL {}. Error = {}\", ttl, source))]\n", "file_path": "src/store/common.rs", "rank": 98, "score": 2.604881007654863 }, { "content": "# dynservice\n\nRust library for ETCD-backed dynamic service discovery\n", "file_path": "README.md", "rank": 99, "score": 2.3365337576708756 } ]
Rust
src/board.rs
Rejyr/sf21_22
974887cfb655eb48a23b9b425166884194719c00
use std::fmt::Display; use std::ops::ControlFlow; use board_game::board::Board as BoardTrait; use board_game::board::BoardMoves; use board_game::board::Outcome; use board_game::board::Player; use board_game::board::UnitSymmetryBoard; use chess::{BitBoard, Color}; use internal_iterator::Internal; use internal_iterator::InternalIterator; use internal_iterator::IteratorExt; use crate::consts::EMPTY_BB; use crate::consts::{RANKS, START_POS_BLACK, START_POS_WHITE}; use crate::move_gen::Move; use crate::move_gen::MoveGen; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Board { white: BitBoard, black: BitBoard, side_to_move: Color, size: usize, } impl Display for Board { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut white = self.white.0; let mut black = self.black.0; for i in 0..8 { let mut w_rank = ((white & RANKS[7]) >> 56) as u8; let mut b_rank = ((black & RANKS[7]) >> 56) as u8; for _ in 0..8 { let w_sq = w_rank & 1; let b_sq = b_rank & 1; match (w_sq, b_sq) { (0, 0) => f.write_str(" ")?, (1, 0) => f.write_str("♙")?, (0, 1) => f.write_str("♟︎")?, (1, 1) => panic!("Board has two pawns in the same place"), _ => {} } w_rank >>= 1; b_rank >>= 1; } f.write_fmt(format_args!("|{}\n", 8 - i))?; white <<= 8; black <<= 8; } f.write_str("--------*\n")?; f.write_str("abcdefgh\n")?; f.write_str("01234567\n")?; Ok(()) } } impl Board { pub fn new(size: usize) -> Self { assert!(size > 2 && size < 9, "Invalid size, must be 3 to 8"); Board { white: BitBoard(START_POS_WHITE[size - 1]), black: BitBoard(START_POS_BLACK[size - 1]), side_to_move: Color::White, size, } } pub fn pieces(&self, color: Color) -> BitBoard { match color { Color::White => self.white, Color::Black => self.black, } } pub fn pieces_mut(&mut self, color: Color) -> &mut BitBoard { match color { Color::White => &mut self.white, Color::Black => &mut self.black, } } pub fn pieces_to_move(&self) -> BitBoard { self.pieces(self.side_to_move) } pub fn pieces_to_move_mut(&mut self) -> &mut BitBoard { self.pieces_mut(self.side_to_move) } pub fn pieces_not_to_move(&self) -> BitBoard { self.pieces(!self.side_to_move) } pub fn pieces_not_to_move_mut(&mut self) -> &mut BitBoard { self.pieces_mut(!self.side_to_move) } pub fn side_to_move(&self) -> Color { self.side_to_move } pub fn empty(&self) -> BitBoard { !self.occupied() } pub fn occupied(&self) -> BitBoard { self.white | self.black } } impl UnitSymmetryBoard for Board {} impl BoardTrait for Board { type Move = Move; fn next_player(&self) -> board_game::board::Player { match self.side_to_move { Color::White => Player::A, Color::Black => Player::B, } } fn is_available_move(&self, mv: Self::Move) -> bool { MoveGen::new(self).any(|x| x == mv) } fn play(&mut self, mv: Self::Move) { let src_bb = BitBoard::from_square(mv.src()); let dest_bb = BitBoard::from_square(mv.dest()); *self.pieces_to_move_mut() ^= src_bb | dest_bb; *self.pieces_not_to_move_mut() &= !dest_bb; self.side_to_move = !self.side_to_move; } fn outcome(&self) -> Option<board_game::board::Outcome> { if self.white & BitBoard(RANKS[self.size - 1]) != EMPTY_BB { Some(Outcome::WonBy(Player::A)) } else if self.black & BitBoard(RANKS[0]) != EMPTY_BB { Some(Outcome::WonBy(Player::B)) } else if MoveGen::new(self).len() == 0 { Some(Outcome::Draw) } else { None } } fn can_lose_after_move() -> bool { false } } impl<'a> BoardMoves<'a, Board> for Board { type AllMovesIterator = AllMoves; type AvailableMovesIterator = Internal<MoveGen>; fn all_possible_moves() -> Self::AllMovesIterator { AllMoves } fn available_moves(&'a self) -> Self::AvailableMovesIterator { MoveGen::new(self).into_internal() } } #[doc(hidden)] pub struct AllMoves; impl InternalIterator for AllMoves { type Item = Move; fn try_for_each<R, F>(self, mut f: F) -> std::ops::ControlFlow<R> where F: FnMut(Self::Item) -> std::ops::ControlFlow<R>, { for from in chess::ALL_SQUARES { for to in chess::ALL_SQUARES { f(Move::new(from, to))?; } } ControlFlow::Continue(()) } }
use std::fmt::Display; use std::ops::ControlFlow; use board_game::board::Board as BoardTrait; use board_game::board::BoardMoves; use board_game::board::Outcome; use board_game::board::Player; use board_game::board::UnitSymmetryBoard; use chess::{BitBoard, Color}; use internal_iterator::Internal; use internal_iterator::InternalIterator; use internal_iterator::IteratorExt; use crate::consts::EMPTY_BB; use crate::consts::{RANKS, START_POS_BLACK, START_POS_WHITE}; use crate::move_gen::Move; use crate::move_gen::MoveGen; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Board { white: BitBoard, black: BitBoard, side_to_move: Color, size: usize, } impl Display for Board { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let mut white = self.white.0; let mut black = self.black.0; for i in 0..8 { let mut w_rank = ((white & RANKS[7]) >> 56) as u8; let mut b_rank = ((black & RANKS[7]) >> 56) as u8; for _ in 0..8 { let w_sq = w_rank & 1; let b_sq = b_rank & 1; match (w_sq, b_sq) { (0, 0) => f.write_str(" ")?, (1, 0) => f.write_str("♙")?, (0, 1) => f.write_str("♟︎")?, (1, 1) => panic!("Board has two pawns in the same place"), _ => {} } w_rank >>= 1; b_rank >>= 1; } f.write_fmt(format_args!("|{}\n", 8 - i))?; white <<= 8; black <<= 8; } f.write_str("--------*\n")?; f.write_str("abcdefgh\n")?; f.write_str("01234567\n")?; Ok(()) } } impl Board { pub fn new(size: usize) -> Self { assert!(size > 2 && size < 9, "Invalid size, must be 3 to 8"); Board { white: BitBoard(START_POS_WHITE[size - 1]), black: BitBoard(START_POS_BLACK[size - 1]), side_to_move: Color::White, size, } } pub fn pieces(&self, color: Color) -> BitBoard { match color { Color::White => self.white, Color::Black => self.black, } } pub fn pieces_mut(&mut self, color: Color) -> &mut BitBoard { match color { Color::White => &mut self.white, Color::Black => &mut self.black, } } pub fn pieces_to_move(&self) -> BitBoard { self.pieces(self.side_to_move) } pub fn pieces_to_move_mut(&mut self) -> &mut BitBoard { self.pieces_mut(self.side_to_move) } pub fn pieces_not_to_move(&self) -> BitBoard { self.pieces(!self.side_to_move) } pub fn pieces_not_to_move_mut(&mut self) -> &mut BitBoard { self.pieces_mut(!self.side_to_move) } pub fn side_to_move(&self) -> Color { self.side_to_move } pub fn empty(&self) -> BitBoard { !self.occupied() } pub fn occupied(&self) -> BitBoard { self.white | self.black } } impl UnitSymmetryBoard for Board {} impl BoardTrait for Board { type Move = Move; fn next_player(&self) -> board_game::board::Player { match self.side_to_move { Color::White => Player::A, Color::Black => Player::B, } } fn is_available_move(&self, mv: Self::Move) -> bool { MoveGen::new(self).any(|x| x == mv) } fn play(&mut self, mv: Self::Move) { let src_bb = BitBoard::from_square(mv.src()); let dest_bb = BitBoard::fro
fn outcome(&self) -> Option<board_game::board::Outcome> { if self.white & BitBoard(RANKS[self.size - 1]) != EMPTY_BB { Some(Outcome::WonBy(Player::A)) } else if self.black & BitBoard(RANKS[0]) != EMPTY_BB { Some(Outcome::WonBy(Player::B)) } else if MoveGen::new(self).len() == 0 { Some(Outcome::Draw) } else { None } } fn can_lose_after_move() -> bool { false } } impl<'a> BoardMoves<'a, Board> for Board { type AllMovesIterator = AllMoves; type AvailableMovesIterator = Internal<MoveGen>; fn all_possible_moves() -> Self::AllMovesIterator { AllMoves } fn available_moves(&'a self) -> Self::AvailableMovesIterator { MoveGen::new(self).into_internal() } } #[doc(hidden)] pub struct AllMoves; impl InternalIterator for AllMoves { type Item = Move; fn try_for_each<R, F>(self, mut f: F) -> std::ops::ControlFlow<R> where F: FnMut(Self::Item) -> std::ops::ControlFlow<R>, { for from in chess::ALL_SQUARES { for to in chess::ALL_SQUARES { f(Move::new(from, to))?; } } ControlFlow::Continue(()) } }
m_square(mv.dest()); *self.pieces_to_move_mut() ^= src_bb | dest_bb; *self.pieces_not_to_move_mut() &= !dest_bb; self.side_to_move = !self.side_to_move; }
function_block-function_prefixed
[ { "content": "pub fn inspect_moves(board: &Board) {\n\n if board\n\n .available_moves()\n\n .inspect(|mv| println!(\"{mv}\"))\n\n .count()\n\n == 0\n\n {\n\n println!(\"No moves\");\n\n }\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 0, "score": 106015.59361548017 }, { "content": "fn random_playout<B: Board>(mut board: B, rng: &mut impl Rng) -> Outcome {\n\n assert!(\n\n !board.is_done(),\n\n \"should never start random playout on a done board\"\n\n );\n\n\n\n loop {\n\n board.play(board.random_available_move(rng));\n\n\n\n if let Some(outcome) = board.outcome() {\n\n return outcome;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 1, "score": 91949.74997851442 }, { "content": "pub fn x(result: BotGameResult<Board>) -> BotResult {\n\n BotResult {\n\n wdl_l: result.wdl_l,\n\n debug_l: result.debug_l,\n\n debug_r: result.debug_r,\n\n time_l: result.time_l,\n\n time_r: result.time_r,\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 2, "score": 77434.49979453701 }, { "content": "/// Returns an evaluation of how far the pawns are by counting the [rear-fill](https://www.chessprogramming.org/Pawn_Fills)\n\npub fn advancement_eval(bb: u64, color: Color) -> u32 {\n\n match color {\n\n Color::White => S_fill(bb).count_ones(),\n\n Color::Black => N_fill(bb).count_ones(),\n\n }\n\n}\n\n\n", "file_path": "src/bot/heuristic.rs", "rank": 3, "score": 76737.85350580198 }, { "content": "// tweaked perft from board-game\n\npub fn perft<B: BoardTrait>(board: &B, depth: u32) -> u64 {\n\n fn perft_recurse<B: BoardTrait + Hash>(\n\n map: &mut HashMap<(B, u32), u64>,\n\n board: B,\n\n depth: u32,\n\n ) -> u64 {\n\n println!(\"{board}\");\n\n if depth == 0 {\n\n return 1;\n\n }\n\n if board.is_done() {\n\n return 0;\n\n }\n\n if depth == 1 {\n\n return board\n\n .available_moves()\n\n .inspect(|mv| println!(\"{mv}\"))\n\n .count() as u64;\n\n }\n\n\n", "file_path": "src/tests.rs", "rank": 4, "score": 74898.95515628954 }, { "content": "#[allow(non_snake_case)]\n\npub fn S_fill(mut bb: u64) -> u64 {\n\n bb |= bb >> 8;\n\n bb |= bb >> 16;\n\n bb |= bb >> 32;\n\n bb\n\n}\n\n\n", "file_path": "src/bot/heuristic.rs", "rank": 5, "score": 67225.33859496497 }, { "content": "#[allow(non_snake_case)]\n\npub fn N_fill(mut bb: u64) -> u64 {\n\n bb |= bb << 8;\n\n bb |= bb << 16;\n\n bb |= bb << 32;\n\n bb\n\n}\n\n\n\n/// South [pawn fill](https://www.chessprogramming.org/Pawn_Fills)\n\n/// using parallel prefix [Kogge-Stone routines](https://www.chessprogramming.org/Kogge-Stone_Algorithm)\n", "file_path": "src/bot/heuristic.rs", "rank": 6, "score": 67225.33859496497 }, { "content": "/// Returns the output file path for results\n\npub fn output_path() -> PathBuf {\n\n dirs::home_dir().unwrap().join(\"sf21_22_output\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "src/lib.rs", "rank": 7, "score": 49568.853367493 }, { "content": "#[test]\n\nfn play_move() {\n\n let mut board = Board::new(3);\n\n board.play(Move::new(Square::A1, Square::A2));\n\n println!(\"{board}\");\n\n board.play(Move::new(Square::B3, Square::A2));\n\n println!(\"{board}\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 8, "score": 45173.643791446666 }, { "content": "/// Run a single MCTS step.\n\n///\n\n/// Returns `(result, proven)`, where\n\n/// * `result` is from the pov of the player that just played on `curr_board`.\n\n/// * `proven` is whether this result is fully proven\n\n///\n\n/// This function has already increments `curr_node` before it returns.\n\nfn mcts_solver_step<B: Board>(\n\n tree: &mut Tree<B>,\n\n curr_node: usize,\n\n curr_board: &B,\n\n exploration_weight: f32,\n\n heuristic: impl Heuristic<B, V = i32> + Clone,\n\n rng: &mut impl Rng,\n\n) -> (OutcomeWDL, bool) {\n\n //TODO should we decrement visit count? -> meh, then we're pulling search time towards partially solved branches\n\n //TODO should we backprop all previous backpropped losses and draws as wins now? -> meh, then we're overestimating this entire branch\n\n\n\n if let Some(outcome) = tree[curr_node].solution() {\n\n return (outcome, true);\n\n }\n\n\n\n // initialize children\n\n let children = match tree[curr_node].children {\n\n Some(children) => children,\n\n None => {\n\n let start = NonZeroUsize::new(tree.nodes.len()).unwrap();\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 9, "score": 45095.89513303531 }, { "content": "fn mcts_build_tree<B: Board>(\n\n root_board: &B,\n\n iterations: u64,\n\n exploration_weight: f32,\n\n heuristic: impl Heuristic<B, V = i32> + Clone,\n\n rng: &mut impl Rng,\n\n) -> Tree<B> {\n\n assert!(iterations > 0);\n\n\n\n let mut tree = Tree::new(root_board.clone());\n\n\n\n let root_outcome = root_board\n\n .outcome()\n\n .map(|o| o.pov(root_board.next_player().other()));\n\n tree.nodes.push(new_node(None, root_outcome));\n\n\n\n for _ in 0..iterations {\n\n //we've solved the root node, so we're done\n\n if tree[0].solution().is_some() {\n\n break;\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 10, "score": 45094.16253701764 }, { "content": "#[test]\n\nfn board_perft() {\n\n let board_3x3 = Board::new(3);\n\n\n\n assert_eq!(perft(&board_3x3, 1), 3);\n\n assert_eq!(perft(&board_3x3, 2), 10);\n\n assert_eq!(perft(&board_3x3, 3), 28);\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 11, "score": 44831.51270309666 }, { "content": "/// Returns an evaluation of the amount of pawns by counting the [population count](https://www.chessprogramming.org/Population_Count)\n\npub fn material_eval(bb: u64) -> u32 {\n\n bb.count_ones()\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\n/// A simplified [`SolverHeuristic`](SolverHeuristic) by converting to i32\n\npub struct SolverHeuristicSimplified;\n\n\n\nimpl Heuristic<Board> for SolverHeuristicSimplified {\n\n type V = i32;\n\n\n\n fn value(&self, board: &Board, depth: u32) -> Self::V {\n\n SolverHeuristic.value(board, depth).to_i32()\n\n }\n\n\n\n fn merge(old: Self::V, new: Self::V) -> (Self::V, std::cmp::Ordering) {\n\n (max(old, new), new.cmp(&old))\n\n }\n\n}\n\n\n", "file_path": "src/bot/heuristic.rs", "rank": 12, "score": 43771.38707751096 }, { "content": "#[test]\n\nfn outcome() {\n\n let mut board_win_white = Board::new(3);\n\n board_win_white.play(Move::new(Square::A1, Square::A2));\n\n board_win_white.play(Move::new(Square::C3, Square::C2));\n\n board_win_white.play(Move::new(Square::A2, Square::B3));\n\n println!(\"White wins:\\n{board_win_white}\");\n\n assert_eq!(board_win_white.outcome(), Some(Outcome::WonBy(Player::A)));\n\n\n\n let mut board_win_black = Board::new(3);\n\n board_win_black.play(Move::new(Square::A1, Square::A2));\n\n board_win_black.play(Move::new(Square::B3, Square::A2));\n\n board_win_black.play(Move::new(Square::C1, Square::C2));\n\n board_win_black.play(Move::new(Square::A2, Square::B1));\n\n println!(\"Black wins:\\n{board_win_black}\");\n\n assert_eq!(board_win_black.outcome(), Some(Outcome::WonBy(Player::B)));\n\n\n\n let mut board_draw = Board::new(3);\n\n board_draw.play(Move::new(Square::A1, Square::A2));\n\n board_draw.play(Move::new(Square::B3, Square::B2));\n\n board_draw.play(Move::new(Square::C1, Square::C2));\n\n inspect_moves(&board_draw);\n\n println!(\"Draw:\\n{board_draw}\");\n\n assert_eq!(board_draw.outcome(), Some(Outcome::Draw));\n\n}\n", "file_path": "src/tests.rs", "rank": 13, "score": 26089.52711528881 }, { "content": "fn main() {\n\n const MIN_MAX_DEPTH: u32 = 10;\n\n const MCTS_ITERATIONS: u64 = 10_000;\n\n const MCTS_EXPLORATION: f32 = 2.0;\n\n\n\n const TRIALS_PER: u32 = 1000;\n\n const GAMES_PER_SIDE: u32 = TRIALS_PER / 4;\n\n const BOTH_SIDES: bool = true;\n\n\n\n let file = File::create(output_path()).unwrap();\n\n let mut buf = BufWriter::new(file);\n\n for size in SIZES {\n\n println!(\"size: {size}\");\n\n buf.write_fmt(format_args!(\"\\n\\nsize: {size}\\n\\n\")).unwrap();\n\n\n\n println!(\"Running (at {}): Random\", OffsetDateTime::now_utc());\n\n r!(buf, size, || RandomBot::new(thread_rng()), || {\n\n RandomBot::new(thread_rng())\n\n });\n\n r!(buf, size, || RandomBot::new(thread_rng()), || {\n", "file_path": "src/main.rs", "rank": 14, "score": 26089.52711528881 }, { "content": "fn new_node<M>(last_move: Option<M>, outcome: Option<OutcomeWDL>) -> Node<M> {\n\n let kind = match outcome {\n\n None => SNodeKind::Estimate(WDL::default()),\n\n Some(outcome) => SNodeKind::Solved(outcome),\n\n };\n\n\n\n Node {\n\n last_move,\n\n visits: 0,\n\n children: None,\n\n kind,\n\n }\n\n}\n\n\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 26, "score": 23001.45633104732 }, { "content": " /// Only push moves\n\n Push,\n\n}\n\n\n\n/// Incremental move generation through Iterator\n\npub struct MoveGen {\n\n /// All pieces to move and their possible moves\n\n moves: Vec<SquareAndBitBoard>,\n\n /// The current SquareAndBitBoard used for move gen\n\n index: usize,\n\n}\n\n\n\n/// A move\n\n#[derive(Clone, Copy, Eq, PartialEq, Default, Debug, Hash)]\n\npub struct Move {\n\n /// From which square\n\n src: Square,\n\n /// To which square\n\n dest: Square,\n\n}\n", "file_path": "src/move_gen.rs", "rank": 27, "score": 22126.80775422706 }, { "content": "//! Move generation for hexapawn\n\n\n\nuse std::{cmp::Ordering, fmt::Display};\n\n\n\nuse chess::{get_pawn_attacks, get_pawn_quiets, BitBoard, Square};\n\n\n\nuse crate::{board::Board, consts::EMPTY_BB};\n\n\n\n/// A struct containing a square (the position of a pawn) and a bitboard (possible moves)\n\npub struct SquareAndBitBoard {\n\n sq: Square,\n\n bb: BitBoard,\n\n}\n\n\n\n/// Move generation masks\n\npub enum Mask {\n\n /// All moves\n\n None,\n\n /// Only capture moves\n\n Capture,\n", "file_path": "src/move_gen.rs", "rank": 28, "score": 22126.681700867513 }, { "content": " pub fn dest(&self) -> Square {\n\n self.dest\n\n }\n\n}\n\n\n\nimpl MoveGen {\n\n /// Creates a new `MoveGen` with a move generation mask\n\n pub fn with_mask(board: &Board, mask: Mask) -> MoveGen {\n\n let mut movelist = vec![];\n\n // for every piece to move\n\n for src in board.pieces_to_move() {\n\n let moves = match mask {\n\n // pawn captures and pushes\n\n Mask::None => {\n\n get_pawn_quiets(src, board.side_to_move(), board.occupied())\n\n ^ get_pawn_attacks(src, board.side_to_move(), board.pieces_not_to_move())\n\n }\n\n // only pawn captures\n\n Mask::Capture => {\n\n get_pawn_attacks(src, board.side_to_move(), board.pieces_not_to_move())\n", "file_path": "src/move_gen.rs", "rank": 29, "score": 22125.315784146118 }, { "content": "}\n\n\n\nimpl ExactSizeIterator for MoveGen {\n\n fn len(&self) -> usize {\n\n let mut len = 0;\n\n for moves in &self.moves {\n\n if moves.bb == EMPTY_BB {\n\n break;\n\n }\n\n\n\n len += moves.bb.popcnt() as usize;\n\n }\n\n len\n\n }\n\n}\n\n\n\nimpl Iterator for MoveGen {\n\n type Item = Move;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n", "file_path": "src/move_gen.rs", "rank": 30, "score": 22125.315149438495 }, { "content": " }\n\n // only pawn pushes\n\n Mask::Push => get_pawn_quiets(src, board.side_to_move(), board.occupied()),\n\n };\n\n // if there are moves, add it\n\n if moves != EMPTY_BB {\n\n movelist.push(SquareAndBitBoard { sq: src, bb: moves })\n\n }\n\n }\n\n\n\n MoveGen {\n\n moves: movelist,\n\n index: 0,\n\n }\n\n }\n\n\n\n /// Creates a new `MoveGen` with no mask\n\n pub fn new(board: &Board) -> MoveGen {\n\n MoveGen::with_mask(board, Mask::None)\n\n }\n", "file_path": "src/move_gen.rs", "rank": 31, "score": 22121.354189695096 }, { "content": "}\n\n\n\nimpl PartialOrd for Move {\n\n fn partial_cmp(&self, other: &Self) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl Move {\n\n /// Creates a new `Move` from a source and destination\n\n pub fn new(src: Square, dest: Square) -> Move {\n\n Move { src, dest }\n\n }\n\n\n\n /// Returns the source of the `Move`\n\n pub fn src(&self) -> Square {\n\n self.src\n\n }\n\n\n\n /// Returns the destination of the `Move`\n", "file_path": "src/move_gen.rs", "rank": 32, "score": 22121.16205132588 }, { "content": " // Get the currently used SquareAndBitBoard. if there's none,finish\n\n let moves = &mut self.moves.get_mut(self.index)?;\n\n // Get the least signifigant ones bit\n\n let dest = moves.bb.to_square();\n\n\n\n // remove that bit from the SquareAndBitBoard\n\n moves.bb ^= BitBoard::from_square(dest);\n\n // if the SquareAndBitBoard is out of moves, increment the index\n\n if moves.bb == EMPTY_BB {\n\n self.index += 1;\n\n }\n\n // create a move from the square to move and a possible move\n\n Some(Move::new(moves.sq, dest))\n\n }\n\n}\n", "file_path": "src/move_gen.rs", "rank": 33, "score": 22120.701562639886 }, { "content": "\n\nimpl Display for Move {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}{}\", self.src, self.dest)\n\n }\n\n}\n\n\n\nimpl Ord for Move {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n // if sources aren't equal, compare them\n\n if self.src != other.src {\n\n self.src.cmp(&other.src)\n\n // if destinations aren't equal, compare them\n\n } else if self.dest != other.dest {\n\n self.dest.cmp(&other.dest)\n\n // otherwise, they're equal\n\n } else {\n\n Ordering::Equal\n\n }\n\n }\n", "file_path": "src/move_gen.rs", "rank": 34, "score": 22120.02962131722 }, { "content": "fn uct_heuristic<M>(\n\n node: &Node<M>,\n\n parent_visits: i64,\n\n exploration_weight: f32,\n\n heuristic_val: f32,\n\n) -> f32 {\n\n //TODO continue investigating this, what uct value to use for solved (in practice lost and drawn) nodes?\n\n // if exploration_weight < 0.0 {\n\n // let value_unit = (self.wdl().value() + 1.0) / 2.0;\n\n // let explore = ((parent_visits as f32).ln() / self.visits as f32).sqrt();\n\n //\n\n // return value_unit - exploration_weight * explore;\n\n // }\n\n\n\n match node.kind {\n\n SNodeKind::Estimate(wdl) => {\n\n let visits = wdl.sum() as f32;\n\n let value = wdl.cast::<f32>().value() / visits;\n\n let value_unit = (value + 1.0) / 2.0;\n\n\n\n let explore = ((parent_visits as f32).ln() / visits).sqrt();\n\n\n\n value_unit + exploration_weight * explore + (heuristic_val / (visits + 1.0))\n\n }\n\n SNodeKind::Solved(outcome) => (outcome.sign::<f32>() + 1.0) / 2.0,\n\n }\n\n}\n\n\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 35, "score": 19788.930742346365 }, { "content": "pub mod board;\n\npub mod bot;\n\npub mod consts;\n\npub mod move_gen;\n\n\n\n// pub const SIZES: [usize; 2] = [3, 4];\n\n/// All possible sizes of [`Board`](board::Board), from 3 to 8\n\npub const SIZES: [usize; 6] = [3, 4, 5, 6, 7, 8];\n\n\n\nuse std::path::PathBuf;\n\n\n\n/// Returns the output file path for results\n", "file_path": "src/lib.rs", "rank": 36, "score": 14.411728943125825 }, { "content": " // we need keys (B, depth) because otherwise we risk miscounting if the same board is encountered at different depths\n\n let key = (board, depth);\n\n let board = &key.0;\n\n\n\n if let Some(&p) = map.get(&key) {\n\n return p;\n\n }\n\n\n\n let mut p = 0;\n\n board.available_moves().for_each(|mv: B::Move| {\n\n let new_board = board.clone_and_play(mv);\n\n println!(\"{mv}\");\n\n println!(\"{new_board}\");\n\n p += perft_recurse(map, new_board, depth - 1);\n\n });\n\n\n\n map.insert(key, p);\n\n p\n\n }\n\n let mut map = HashMap::default();\n\n perft_recurse(&mut map, board.clone(), depth)\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 37, "score": 12.652827656690782 }, { "content": " &mut self.rng,\n\n )\n\n }\n\n}\n\n\n\nimpl<R: Rng, B: Board, H: Heuristic<B, V = i32> + Clone> Bot<B> for MCTSHeuristicBot<B, H, R> {\n\n fn select_move(&mut self, board: &B) -> B::Move {\n\n assert!(!board.is_done());\n\n self.build_tree(board).best_move()\n\n }\n\n}\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 38, "score": 12.487109201200688 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\n/// Returns an evaluation of how far the player's pawns are\n\npub struct AdvancementHeuristic;\n\n\n\nimpl Heuristic<Board> for AdvancementHeuristic {\n\n type V = i32;\n\n\n\n fn value(&self, board: &Board, depth: u32) -> Self::V {\n\n // if the board is done, it's infinity for winning, negative infinity for losing\n\n if board.is_done() {\n\n return SolverHeuristicSimplified.value(board, depth);\n\n }\n\n\n\n // return how far the pawns are\n\n advancement_eval(board.pieces_to_move().0, board.side_to_move()) as i32\n\n }\n\n\n", "file_path": "src/bot/heuristic.rs", "rank": 39, "score": 12.125711637142818 }, { "content": "#[derive(Debug, Clone)]\n\n/// Returns an evaluation of \\# player's pawns - \\# opponent's pawns\n\npub struct MaterialHeuristic;\n\n\n\nimpl Heuristic<Board> for MaterialHeuristic {\n\n type V = i32;\n\n\n\n fn value(&self, board: &Board, depth: u32) -> Self::V {\n\n // if the board is done, it's infinity for winning, negative infinity for losing\n\n if board.is_done() {\n\n return SolverHeuristicSimplified.value(board, depth);\n\n }\n\n\n\n // return the difference between the amount of the player's pawns and the amount of the opponent's pawns\n\n material_eval(board.pieces_to_move().0) as i32\n\n - material_eval(board.pieces_not_to_move().0) as i32\n\n }\n\n\n\n fn merge(old: Self::V, new: Self::V) -> (Self::V, std::cmp::Ordering) {\n\n (max(old, new), new.cmp(&old))\n", "file_path": "src/bot/heuristic.rs", "rank": 40, "score": 11.884861012242506 }, { "content": "}\n\n\n\nimpl<R: Rng + Debug> Bot<Board> for AlwaysCaptureBot<R> {\n\n fn select_move(&mut self, board: &Board) -> <Board as board_game::board::Board>::Move {\n\n MoveGen::with_mask(board, Mask::Capture)\n\n .choose(&mut self.rng)\n\n .unwrap_or_else(|| board.random_available_move(&mut self.rng))\n\n }\n\n}\n\n\n\nimpl<R: Rng> AlwaysCaptureBot<R> {\n\n /// Creates a new [`AlwaysCaptureBot`](AlwaysCaptureBot)\n\n pub fn new(rng: R) -> Self {\n\n AlwaysCaptureBot { rng }\n\n }\n\n}\n", "file_path": "src/bot/heuristic.rs", "rank": 41, "score": 11.761833796142128 }, { "content": " fn merge(old: Self::V, new: Self::V) -> (Self::V, std::cmp::Ordering) {\n\n (max(old, new), new.cmp(&old))\n\n }\n\n}\n\n\n\n/// The [`AlwaysPush`](AlwaysPushBot) bot. It always pushes a pawn or chooses a random move\n\npub struct AlwaysPushBot<R: Rng> {\n\n rng: R,\n\n}\n\n\n\nimpl<R: Rng> Debug for AlwaysPushBot<R> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"AlwaysPushBot\")\n\n }\n\n}\n\n\n\nimpl<R: Rng + Debug> Bot<Board> for AlwaysPushBot<R> {\n\n fn select_move(&mut self, board: &Board) -> <Board as BoardTrait>::Move {\n\n MoveGen::with_mask(board, Mask::Push)\n\n .choose(&mut self.rng)\n", "file_path": "src/bot/heuristic.rs", "rank": 42, "score": 11.664212538532936 }, { "content": "use std::{collections::HashMap, hash::Hash};\n\n\n\nuse super::board::*;\n\nuse super::move_gen::*;\n\n\n\nuse board_game::board::Board as BoardTrait;\n\nuse board_game::board::BoardMoves;\n\nuse board_game::board::Outcome;\n\nuse board_game::board::Player;\n\nuse chess::Square;\n\nuse internal_iterator::InternalIterator;\n\n\n\n// tweaked perft from board-game\n", "file_path": "src/tests.rs", "rank": 43, "score": 11.482373830287152 }, { "content": "//! Useful bitboard constants\n\n\n\nuse chess::BitBoard;\n\n\n\n/// An empty bitboard\n\npub const EMPTY: u64 = 0;\n\n/// An empty bitboard of [`chess`](chess)'s [`BitBoard`](chess::BitBoard) type\n\npub const EMPTY_BB: BitBoard = BitBoard(EMPTY);\n\n/// A full bitboard\n\npub const UNIVERSAL: u64 = 0xFFFFFFFFFFFFFFFF;\n\n\n\n/// The files of a bitboard, from file A \\[0\\] to file H \\[7\\]\n\npub const FILES: [u64; 8] = [\n\n 0x0101010101010101,\n\n 0x0202020202020202,\n\n 0x0404040404040404,\n\n 0x0808080808080808,\n\n 0x1010101010101010,\n\n 0x2020202020202020,\n\n 0x4040404040404040,\n", "file_path": "src/consts.rs", "rank": 44, "score": 10.526760711612477 }, { "content": "impl<B: Board, H: Heuristic<B, V = i32> + Clone, R: Rng> MCTSHeuristicBot<B, H, R> {\n\n /// Creates a new [`MCTSHeuristicbot`](MCTSHeuristicBot)\n\n pub fn new(iterations: u64, exploration_weight: f32, heuristic: H, rng: R) -> Self {\n\n assert!(iterations > 0);\n\n MCTSHeuristicBot {\n\n iterations,\n\n exploration_weight,\n\n heuristic,\n\n rng,\n\n place_holder: PhantomData,\n\n }\n\n }\n\n\n\n /// Creates a `MCTS`[`Tree`](Tree)\n\n pub fn build_tree(&mut self, board: &B) -> Tree<B> {\n\n mcts_build_tree(\n\n board,\n\n self.iterations,\n\n self.exploration_weight,\n\n self.heuristic.clone(),\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 45, "score": 10.439425247423657 }, { "content": " .unwrap_or_else(|| board.random_available_move(&mut self.rng))\n\n }\n\n}\n\n\n\nimpl<R: Rng> AlwaysPushBot<R> {\n\n /// Creates a new [`AlwaysPushBot`](AlwaysPushBot)\n\n pub fn new(rng: R) -> Self {\n\n AlwaysPushBot { rng }\n\n }\n\n}\n\n\n\n/// The [`AlwaysCapture`](AlwaysCaptureBot) bot. It always captures a pawn or chooses a random move\n\npub struct AlwaysCaptureBot<R: Rng> {\n\n rng: R,\n\n}\n\n\n\nimpl<R: Rng> Debug for AlwaysCaptureBot<R> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"AlwaysCaptureBot\")\n\n }\n", "file_path": "src/bot/heuristic.rs", "rank": 46, "score": 9.45519977672364 }, { "content": "//! Heuristics ([`Advancement`](AdvancementHeuristic), [`Material`](MaterialHeuristic))\n\n//! and the heuristic bots ([`AlwaysPush`](AlwaysPushBot), [`AlwaysCapture`](AlwaysCaptureBot))\n\n\n\nuse std::{cmp::max, fmt::Debug};\n\n\n\nuse board_game::{\n\n ai::{minimax::Heuristic, solver::SolverHeuristic, Bot},\n\n board::Board as BoardTrait,\n\n};\n\nuse chess::Color;\n\nuse rand::{prelude::IteratorRandom, Rng};\n\n\n\nuse crate::{\n\n board::Board,\n\n move_gen::{Mask, MoveGen},\n\n};\n\n\n\n/// North [pawn fill](https://www.chessprogramming.org/Pawn_Fills)\n\n/// using parallel prefix [Kogge-Stone routines](https://www.chessprogramming.org/Kogge-Stone_Algorithm)\n\n#[allow(non_snake_case)]\n", "file_path": "src/bot/heuristic.rs", "rank": 47, "score": 9.160689376626381 }, { "content": " 0,\n\n 0x0000000000070707,\n\n 0x000000000F0F0F0F,\n\n 0x0000001F1F1F1F1F,\n\n 0x00003F3F3F3F3F3F,\n\n 0x007F7F7F7F7F7F7F,\n\n 0xFFFFFFFFFFFFFFFF,\n\n];\n\n\n\n/// The starting positions for white.\n\n/// [`START_POS_WHITE`](START_POS_WHITE)`\\[0\\]` and [`START_POS_WHITE`](START_POS_WHITE)`\\[1\\]` are padding\n\npub const START_POS_WHITE: [u64; 8] = [\n\n 0,\n\n 0,\n\n 0x0000000000000007,\n\n 0x000000000000000F,\n\n 0x000000000000001F,\n\n 0x000000000000003F,\n\n 0x000000000000007F,\n\n 0x00000000000000FF,\n", "file_path": "src/consts.rs", "rank": 48, "score": 7.56052207110849 }, { "content": "\n\n // TODO: can use static eval for move choice, might not have large effect\n\n curr_board.available_moves().for_each(|mv: B::Move| {\n\n let next_board = curr_board.clone_and_play(mv);\n\n let outcome = next_board.outcome().pov(curr_board.next_player());\n\n let node = new_node(Some(mv), outcome);\n\n tree.nodes.push(node);\n\n });\n\n\n\n let length = tree.nodes.len() - start.get();\n\n let children = IdxRange { start, length };\n\n tree[curr_node].children = Some(children);\n\n\n\n //TODO maybe do this even earlier, and immediately stop pushing nodes -> but then children are inconsistent :(\n\n // so what? who care about children somewhere deep in the tree!\n\n let outcome =\n\n OutcomeWDL::best_maybe(children.iter().map(|c| tree[c].solution()).into_internal());\n\n if let Some(outcome) = outcome.flip() {\n\n tree[curr_node].mark_solved(outcome);\n\n return (outcome, true);\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 49, "score": 7.4689803342099435 }, { "content": "//! A copy of [`board-game`](board-game)'s [`MCTSBot`](board-game::ai::mcts::MCTSBot)\n\n//! with progressive bias\n\n\n\nuse std::fmt::{Debug, Formatter};\n\nuse std::marker::PhantomData;\n\nuse std::num::NonZeroUsize;\n\n\n\nuse board_game::ai::mcts::{IdxRange, Node, SNodeKind, Tree};\n\nuse board_game::ai::minimax::Heuristic;\n\nuse decorum::N32;\n\nuse internal_iterator::{InternalIterator, IteratorExt};\n\nuse rand::Rng;\n\n\n\nuse board_game::ai::Bot;\n\nuse board_game::board::{Board, Outcome};\n\nuse board_game::wdl::{Flip, OutcomeWDL, POV, WDL};\n\nuse rand::prelude::IteratorRandom;\n\n\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 50, "score": 7.191787452807622 }, { "content": " }\n\n\n\n mcts_solver_step(\n\n &mut tree,\n\n 0,\n\n root_board,\n\n exploration_weight,\n\n heuristic.clone(),\n\n rng,\n\n );\n\n }\n\n\n\n tree\n\n}\n\n\n\n/// A copy of [`board-game`](board-game)'s [`MCTSBot`](board-game::ai::mcts::MCTSBot)\n\n/// with progressive bias\n\npub struct MCTSHeuristicBot<B: Board, H: Heuristic<B>, R: Rng> {\n\n /// How many iterations (MCTS playouts)\n\n iterations: u64,\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 51, "score": 6.937938653813104 }, { "content": " } else {\n\n children\n\n }\n\n }\n\n };\n\n\n\n // check if there are unvisited children\n\n let unvisited = children.iter().filter(|&c| tree[c].is_unvisited());\n\n let picked_unvisited = unvisited.choose(rng);\n\n\n\n // result is from the POV of curr_board.next_player\n\n let (result, proven) = if let Some(picked_child) = picked_unvisited {\n\n let picked_mv = tree[picked_child].last_move.unwrap();\n\n let next_board = curr_board.clone_and_play(picked_mv);\n\n\n\n let outcome = random_playout(next_board, rng).pov(curr_board.next_player().other());\n\n tree[picked_child].increment(outcome);\n\n\n\n (outcome.flip(), false)\n\n } else {\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 52, "score": 6.644198644672009 }, { "content": " 0x8080808080808080,\n\n];\n\n\n\n// ranks 1-8\n\n/// The ranks of a bitboard, from rank 1 \\[0\\] to rank 8 \\[7\\]\n\npub const RANKS: [u64; 8] = [\n\n 0x00000000000000FF,\n\n 0x000000000000FF00,\n\n 0x0000000000FF0000,\n\n 0x00000000FF000000,\n\n 0x000000FF00000000,\n\n 0x0000FF0000000000,\n\n 0x00FF000000000000,\n\n 0xFF00000000000000,\n\n];\n\n\n\n/// The playing area for hexapawn.\n\n/// [`BOARD_MASKS`](BOARD_MASKS)`\\[0\\]` and [`BOARD_MASKS`](BOARD_MASKS)`[1]` are padding\n\npub const BOARD_MASKS: [u64; 8] = [\n\n 0,\n", "file_path": "src/consts.rs", "rank": 53, "score": 6.59651317725878 }, { "content": " /// The exploration factor, used in UCT\n\n exploration_weight: f32,\n\n /// A Heuristic\n\n heuristic: H,\n\n /// Random number generation for random playouts\n\n rng: R,\n\n /// A marker for the type of board\n\n place_holder: PhantomData<B>,\n\n}\n\n\n\nimpl<B: Board, H: Heuristic<B>, R: Rng> Debug for MCTSHeuristicBot<B, H, R> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"MCTSHeuristicBot {{ iterations: {}, exploration_weight: {}, heuristic: {:?} }}\",\n\n self.iterations, self.exploration_weight, self.heuristic,\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 54, "score": 6.521246461860917 }, { "content": " //pick the max-uct child\n\n //TODO we're including lost and drawn nodes here, is there nothing better we can do?\n\n // at least this is what the paper seems to suggest\n\n let parent_visits = tree[curr_node].visits;\n\n\n\n let picked = children\n\n .iter()\n\n .max_by_key(|&c| {\n\n N32::from(uct_heuristic(\n\n &tree[c],\n\n parent_visits,\n\n exploration_weight,\n\n heuristic.value(curr_board, 0) as f32,\n\n ))\n\n })\n\n .unwrap();\n\n\n\n //continue recursing\n\n let picked_mv = tree[picked].last_move.unwrap();\n\n let next_board = curr_board.clone_and_play(picked_mv);\n", "file_path": "src/bot/mcts_heuristic_bot.rs", "rank": 55, "score": 6.3025333452108985 }, { "content": "//! Implemented bots not in [`board-game`](board-game)\n\n\n\npub mod heuristic;\n\npub mod mcts_heuristic_bot;\n", "file_path": "src/bot/mod.rs", "rank": 56, "score": 5.810767774663091 }, { "content": "use std::{\n\n fmt::Debug,\n\n fs::File,\n\n io::{BufWriter, Write},\n\n};\n\n\n\nuse board_game::{\n\n ai::{mcts::MCTSBot, minimax::MiniMaxBot, simple::RandomBot},\n\n util::bot_game::{run, BotGameResult},\n\n wdl::WDL,\n\n};\n\nuse rand::thread_rng;\n\nuse sf21_22::{\n\n board::Board,\n\n bot::{\n\n heuristic::{\n\n AdvancementHeuristic, AlwaysCaptureBot, AlwaysPushBot, MaterialHeuristic,\n\n SolverHeuristicSimplified,\n\n },\n\n mcts_heuristic_bot::MCTSHeuristicBot,\n\n },\n\n output_path, SIZES,\n\n};\n\nuse time::OffsetDateTime;\n\n\n", "file_path": "src/main.rs", "rank": 57, "score": 5.576526699980373 }, { "content": "macro_rules! r {\n\n ($buf:expr, $size:expr, $l:expr, $r:expr) => {\n\n $buf.write_fmt(format_args!(\n\n \"{:?}\",\n\n x(run(\n\n || Board::new($size),\n\n $l,\n\n $r,\n\n GAMES_PER_SIDE,\n\n BOTH_SIDES,\n\n |_, _| {}\n\n ))\n\n ))\n\n .unwrap();\n\n };\n\n}\n\n\n\npub struct BotResult {\n\n wdl_l: WDL<u32>,\n\n debug_l: String,\n", "file_path": "src/main.rs", "rank": 58, "score": 5.453884794427269 }, { "content": "];\n\n\n\n/// The starting positions for black.\n\n/// [`START_POS_BLACK`](START_POS_BLACK)`\\[0\\]` and [`START_POS_BLACK`](START_POS_BLACK)`\\[1\\]` are padding\n\npub const START_POS_BLACK: [u64; 8] = [\n\n 0,\n\n 0,\n\n 0x0000000000070000,\n\n 0x000000000F000000,\n\n 0x0000001F00000000,\n\n 0x00003F0000000000,\n\n 0x007F000000000000,\n\n 0xFF00000000000000,\n\n];\n", "file_path": "src/consts.rs", "rank": 59, "score": 5.1932937836246085 }, { "content": " debug_r: String,\n\n time_l: f32,\n\n time_r: f32,\n\n}\n\n\n\nimpl Debug for BotResult {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n // writeln!(f, \"L: {} W{}|D{}|L{} R: {}\", self.debug_l, self.wdl_l.win, self.wdl_l.draw, self.wdl_l.loss, self.debug_r)\n\n writeln!(\n\n f,\n\n \"L: {} (t: {:.4}) | W:{},D:{},L:{} | R: {} (t: {:.4})\",\n\n self.debug_l,\n\n self.time_l,\n\n self.wdl_l.win,\n\n self.wdl_l.draw,\n\n self.wdl_l.loss,\n\n self.debug_r,\n\n self.time_r\n\n )\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 60, "score": 2.6266555858502074 }, { "content": " buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng()),\n\n || RandomBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng()),\n\n || AlwaysPushBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng()),\n\n || AlwaysCaptureBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n", "file_path": "src/main.rs", "rank": 61, "score": 2.312621207730686 }, { "content": " buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng()),\n\n || RandomBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng()),\n\n || AlwaysPushBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng()),\n\n || AlwaysCaptureBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n", "file_path": "src/main.rs", "rank": 62, "score": 2.312621207730686 }, { "content": " buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng()),\n\n || RandomBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng()),\n\n || AlwaysPushBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng()),\n\n || AlwaysCaptureBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n", "file_path": "src/main.rs", "rank": 63, "score": 2.2698227607488803 }, { "content": " AlwaysPushBot::new(thread_rng())\n\n });\n\n r!(buf, size, || RandomBot::new(thread_rng()), || {\n\n AlwaysCaptureBot::new(thread_rng())\n\n });\n\n r!(buf, size, || RandomBot::new(thread_rng()), || {\n\n MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng())\n\n });\n\n r!(buf, size, || RandomBot::new(thread_rng()), || {\n\n MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng())\n\n });\n\n r!(buf, size, || RandomBot::new(thread_rng()), || {\n\n MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng())\n\n });\n\n r!(buf, size, || RandomBot::new(thread_rng()), || MCTSBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n thread_rng()\n\n ));\n\n r!(buf, size, || RandomBot::new(thread_rng()), || {\n", "file_path": "src/main.rs", "rank": 64, "score": 2.2515529385166015 }, { "content": " )\n\n });\n\n r!(buf, size, || AlwaysPushBot::new(thread_rng()), || {\n\n MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng(),\n\n )\n\n });\n\n\n\n println!(\"Running (at {}): AlwaysCapture\", OffsetDateTime::now_utc());\n\n r!(buf, size, || AlwaysCaptureBot::new(thread_rng()), || {\n\n RandomBot::new(thread_rng())\n\n });\n\n r!(buf, size, || AlwaysCaptureBot::new(thread_rng()), || {\n\n AlwaysPushBot::new(thread_rng())\n\n });\n\n r!(buf, size, || AlwaysCaptureBot::new(thread_rng()), || {\n\n AlwaysCaptureBot::new(thread_rng())\n", "file_path": "src/main.rs", "rank": 65, "score": 2.1504320637009298 }, { "content": " thread_rng(),\n\n )\n\n });\n\n\n\n println!(\"Running (at {}): AlwaysPush\", OffsetDateTime::now_utc());\n\n r!(buf, size, || AlwaysPushBot::new(thread_rng()), || {\n\n RandomBot::new(thread_rng())\n\n });\n\n r!(buf, size, || AlwaysPushBot::new(thread_rng()), || {\n\n AlwaysPushBot::new(thread_rng())\n\n });\n\n r!(buf, size, || AlwaysPushBot::new(thread_rng()), || {\n\n AlwaysCaptureBot::new(thread_rng())\n\n });\n\n r!(buf, size, || AlwaysPushBot::new(thread_rng()), || {\n\n MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng())\n\n });\n\n r!(buf, size, || AlwaysPushBot::new(thread_rng()), || {\n\n MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng())\n\n });\n", "file_path": "src/main.rs", "rank": 66, "score": 2.133113068871966 }, { "content": " r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng()\n\n ),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n", "file_path": "src/main.rs", "rank": 67, "score": 2.112724392119434 }, { "content": " );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n ),\n\n || AlwaysPushBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n", "file_path": "src/main.rs", "rank": 68, "score": 2.088737880652947 }, { "content": " });\n\n r!(buf, size, || AlwaysCaptureBot::new(thread_rng()), || {\n\n MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng())\n\n });\n\n r!(buf, size, || AlwaysCaptureBot::new(thread_rng()), || {\n\n MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng())\n\n });\n\n r!(buf, size, || AlwaysCaptureBot::new(thread_rng()), || {\n\n MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng())\n\n });\n\n r!(buf, size, || AlwaysCaptureBot::new(thread_rng()), || {\n\n MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng())\n\n });\n\n r!(buf, size, || AlwaysCaptureBot::new(thread_rng()), || {\n\n MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng(),\n\n )\n", "file_path": "src/main.rs", "rank": 69, "score": 2.0847691182290706 }, { "content": " r!(buf, size, || AlwaysPushBot::new(thread_rng()), || {\n\n MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng())\n\n });\n\n r!(buf, size, || AlwaysPushBot::new(thread_rng()), || {\n\n MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng())\n\n });\n\n r!(buf, size, || AlwaysPushBot::new(thread_rng()), || {\n\n MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng(),\n\n )\n\n });\n\n r!(buf, size, || AlwaysPushBot::new(thread_rng()), || {\n\n MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng(),\n", "file_path": "src/main.rs", "rank": 70, "score": 2.0659146035813105 }, { "content": " MaterialHeuristic,\n\n thread_rng()\n\n ),\n\n || AlwaysPushBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng()\n\n ),\n\n || AlwaysCaptureBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n", "file_path": "src/main.rs", "rank": 71, "score": 2.0423625438975668 }, { "content": " thread_rng()\n\n ),\n\n || RandomBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n ),\n\n || AlwaysPushBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n", "file_path": "src/main.rs", "rank": 72, "score": 2.0423625438975668 }, { "content": " \"Running (at {}): MCTSAdvancement\",\n\n OffsetDateTime::now_utc()\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng()\n\n ),\n\n || RandomBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n", "file_path": "src/main.rs", "rank": 73, "score": 2.019938635348021 }, { "content": " || RandomBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng()),\n\n || AlwaysPushBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng()),\n\n || AlwaysCaptureBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng())\n\n );\n", "file_path": "src/main.rs", "rank": 74, "score": 1.9769176793429972 }, { "content": " r!(\n\n buf,\n\n size,\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng()),\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng())\n\n );\n\n r!(\n\n buf,\n", "file_path": "src/main.rs", "rank": 75, "score": 1.9353253853100665 }, { "content": " ),\n\n || AlwaysCaptureBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n ),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n", "file_path": "src/main.rs", "rank": 76, "score": 1.9148207522254328 }, { "content": " MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n ),\n\n || AlwaysCaptureBot::new(thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n ),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n", "file_path": "src/main.rs", "rank": 77, "score": 1.9148207522254328 }, { "content": " buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n ),\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n ),\n\n || MCTSHeuristicBot::new(\n", "file_path": "src/main.rs", "rank": 78, "score": 1.895096535593455 }, { "content": " r!(\n\n buf,\n\n size,\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n )\n\n );\n\n\n\n println!(\"Running (at {}): MCTSSolver\", OffsetDateTime::now_utc());\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n", "file_path": "src/main.rs", "rank": 79, "score": 1.8568425479809996 }, { "content": " AdvancementHeuristic,\n\n thread_rng()\n\n ),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n ),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n", "file_path": "src/main.rs", "rank": 80, "score": 1.8382889071083945 }, { "content": " thread_rng()\n\n )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n )\n\n );\n\n\n\n println!(\"Running (at {}): MCTS\", OffsetDateTime::now_utc());\n\n r!(\n\n buf,\n\n size,\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng()),\n", "file_path": "src/main.rs", "rank": 81, "score": 1.8382889071083945 }, { "content": " size,\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng()\n\n )\n\n );\n", "file_path": "src/main.rs", "rank": 82, "score": 1.8382889071083945 }, { "content": " size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng()\n\n ),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng()\n\n ),\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng())\n\n );\n", "file_path": "src/main.rs", "rank": 83, "score": 1.8201023754276235 }, { "content": " MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng(),\n\n )\n\n });\n\n r!(buf, size, || RandomBot::new(thread_rng()), || {\n\n MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng(),\n\n )\n\n });\n\n r!(buf, size, || RandomBot::new(thread_rng()), || {\n\n MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n", "file_path": "src/main.rs", "rank": 84, "score": 1.7847879027020017 }, { "content": " );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n", "file_path": "src/main.rs", "rank": 85, "score": 1.7847879027020017 }, { "content": " });\n\n r!(buf, size, || AlwaysCaptureBot::new(thread_rng()), || {\n\n MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng(),\n\n )\n\n });\n\n r!(buf, size, || AlwaysCaptureBot::new(thread_rng()), || {\n\n MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng(),\n\n )\n\n });\n\n\n\n println!(\"Running (at {}): MiniMax\", OffsetDateTime::now_utc());\n\n r!(\n", "file_path": "src/main.rs", "rank": 86, "score": 1.7847879027020017 }, { "content": " );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n", "file_path": "src/main.rs", "rank": 87, "score": 1.7847879027020017 }, { "content": " );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n", "file_path": "src/main.rs", "rank": 88, "score": 1.7508177230231197 }, { "content": " || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng()),\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng())\n", "file_path": "src/main.rs", "rank": 89, "score": 1.61767319440811 }, { "content": " || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng()),\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng())\n", "file_path": "src/main.rs", "rank": 90, "score": 1.61767319440811 }, { "content": " || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng()),\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, AdvancementHeuristic, thread_rng())\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng()),\n\n || MCTSBot::new(MCTS_ITERATIONS, MCTS_EXPLORATION, thread_rng())\n", "file_path": "src/main.rs", "rank": 91, "score": 1.5806115259744922 }, { "content": " )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n ),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n )\n\n );\n\n\n\n println!(\n", "file_path": "src/main.rs", "rank": 92, "score": 1.570646188756517 }, { "content": " thread_rng()\n\n )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n ),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng()\n\n )\n\n );\n\n r!(\n", "file_path": "src/main.rs", "rank": 93, "score": 1.570646188756517 }, { "content": " buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n ),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n )\n\n );\n\n }\n\n buf.flush().unwrap()\n\n}\n\n\n\n#[macro_export]\n", "file_path": "src/main.rs", "rank": 94, "score": 1.4941124046208496 }, { "content": " MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng()\n\n ),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng()\n\n )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng()\n", "file_path": "src/main.rs", "rank": 95, "score": 1.4246906789250864 }, { "content": " MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n SolverHeuristicSimplified,\n\n thread_rng()\n\n ),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n MaterialHeuristic,\n\n thread_rng()\n", "file_path": "src/main.rs", "rank": 96, "score": 1.3818858233359967 }, { "content": " thread_rng()\n\n )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, MaterialHeuristic, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n )\n\n );\n\n\n\n println!(\n\n \"Running (at {}): MiniMaxMaterial\",\n\n OffsetDateTime::now_utc()\n\n );\n\n r!(\n", "file_path": "src/main.rs", "rank": 97, "score": 1.3614336744274493 }, { "content": " thread_rng()\n\n )\n\n );\n\n r!(\n\n buf,\n\n size,\n\n || MiniMaxBot::new(MIN_MAX_DEPTH, SolverHeuristicSimplified, thread_rng()),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n )\n\n );\n\n\n\n println!(\n\n \"Running (at {}): MiniMaxAdvancement\",\n\n OffsetDateTime::now_utc()\n\n );\n\n r!(\n", "file_path": "src/main.rs", "rank": 98, "score": 1.3415780869801996 }, { "content": " ),\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n )\n\n );\n\n\n\n println!(\"Running (at {}): MCTSMaterial\", OffsetDateTime::now_utc());\n\n r!(\n\n buf,\n\n size,\n\n || MCTSHeuristicBot::new(\n\n MCTS_ITERATIONS,\n\n MCTS_EXPLORATION,\n\n AdvancementHeuristic,\n\n thread_rng()\n\n ),\n\n || RandomBot::new(thread_rng())\n", "file_path": "src/main.rs", "rank": 99, "score": 1.303555150184991 } ]
Rust
amethyst_input/src/gilrs_events_system.rs
niconicoj/amethyst
83fa765987fabb3a5f43e1b2f23381c880d52a6c
use std::{ collections::{hash_map::DefaultHasher, HashMap}, fmt, hash::{Hash, Hasher}, marker::PhantomData, }; use derivative::Derivative; use derive_new::new; use gilrs::{Axis, Button, Event, EventType, GamepadId, Gilrs}; use amethyst_core::{ ecs::prelude::{System, SystemData, World, Write}, shrev::EventChannel, SystemDesc, }; use super::{ controller::{ControllerAxis, ControllerButton, ControllerEvent}, BindingTypes, InputEvent, InputHandler, }; #[derive(Debug)] pub enum GilrsSystemError { ContextInit(String), ControllerSubsystemInit(String), } impl fmt::Display for GilrsSystemError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { GilrsSystemError::ContextInit(ref msg) => { write!(f, "Failed to initialize SDL: {}", msg) } GilrsSystemError::ControllerSubsystemInit(ref msg) => { write!(f, "Failed to initialize SDL controller subsystem: {}", msg) } } } } #[derive(Derivative, Debug, new)] #[derivative(Default(bound = ""))] pub struct GilrsEventsSystemDesc<T> where T: BindingTypes, { marker: PhantomData<T>, } impl<'a, 'b, T> SystemDesc<'a, 'b, GilrsEventsSystem<T>> for GilrsEventsSystemDesc<T> where T: BindingTypes, { fn build(self, world: &mut World) -> GilrsEventsSystem<T> { <GilrsEventsSystem<T> as System<'_>>::SystemData::setup(world); GilrsEventsSystem::new(world) .unwrap_or_else(|e| panic!("Failed to build SdlEventsSystem. Error: {}", e)) } } #[allow(missing_debug_implementations)] pub struct GilrsEventsSystem<T: BindingTypes> { gilrs_handle: Gilrs, opened_controllers: HashMap<GamepadId, u32>, marker: PhantomData<T>, } type GilrsEventsData<'a, T> = ( Write<'a, InputHandler<T>>, Write<'a, EventChannel<InputEvent<T>>>, ); impl<'a, T: BindingTypes> System<'a> for GilrsEventsSystem<T> { type SystemData = GilrsEventsData<'a, T>; fn run(&mut self, (mut handler, mut output): Self::SystemData) { while let Some(Event { id, event, time: _ }) = self.gilrs_handle.next_event() { self.handle_gilrs_event(&id, &event, &mut handler, &mut output); } } } impl<T: BindingTypes> GilrsEventsSystem<T> { pub fn new(world: &mut World) -> Result<Self, GilrsSystemError> { let gilrs_handle: Gilrs = Gilrs::new().unwrap(); GilrsEventsData::<T>::setup(world); let mut sys = GilrsEventsSystem { gilrs_handle, opened_controllers: HashMap::new(), marker: PhantomData, }; let (mut handler, mut output) = GilrsEventsData::fetch(world); sys.initialize_controllers(&mut handler, &mut output); Ok(sys) } fn handle_gilrs_event( &mut self, gamepad_id: &GamepadId, event_type: &EventType, handler: &mut InputHandler<T>, output: &mut EventChannel<InputEvent<T>>, ) { use self::ControllerEvent::*; if let Some(idx) = self.opened_controllers.get(gamepad_id) { match *event_type { EventType::AxisChanged(axis, value, _code) => { handler.send_controller_event( &ControllerAxisMoved { which: *idx, axis: axis.into(), value: value, }, output, ); } EventType::ButtonReleased(button, _code) => { handler.send_controller_event( &ControllerButtonReleased { which: *idx, button: button.into(), }, output, ); } EventType::ButtonPressed(button, _code) => { handler.send_controller_event( &ControllerButtonPressed { which: *idx, button: button.into(), }, output, ); } EventType::Disconnected => { if let Some(idx) = self.close_controller(*gamepad_id) { handler .send_controller_event(&ControllerDisconnected { which: idx }, output); } } EventType::Connected => { if let Some(idx) = self.open_controller(*gamepad_id) { handler.send_controller_event(&ControllerConnected { which: idx }, output); } } _ => {} } } else { match *event_type { EventType::Connected => { if let Some(idx) = self.open_controller(*gamepad_id) { handler.send_controller_event(&ControllerConnected { which: idx }, output); } } _ => {} } } } fn open_controller(&mut self, which: GamepadId) -> Option<u32> { match self.gilrs_handle.connected_gamepad(which) { Some(_) => { let idx = self.my_hash(which) as u32; self.opened_controllers.insert(which, idx); Some(idx) } None => None, } } fn close_controller(&mut self, which: GamepadId) -> Option<u32> { self.opened_controllers.remove(&which) } fn initialize_controllers( &mut self, handler: &mut InputHandler<T>, output: &mut EventChannel<InputEvent<T>>, ) { use crate::controller::ControllerEvent::ControllerConnected; for (_id, gamepad) in self.gilrs_handle.gamepads() { let idx = self.my_hash(gamepad.id()) as u32; self.opened_controllers.insert(gamepad.id(), idx); handler.send_controller_event(&ControllerConnected { which: idx }, output); } } fn my_hash<U>(&self, obj: U) -> u64 where U: Hash, { let mut hasher = DefaultHasher::new(); obj.hash(&mut hasher); hasher.finish() } } impl From<Button> for ControllerButton { fn from(button: Button) -> Self { match button { Button::South => ControllerButton::A, Button::East => ControllerButton::B, Button::West => ControllerButton::X, Button::North => ControllerButton::Y, Button::DPadDown => ControllerButton::DPadDown, Button::DPadLeft => ControllerButton::DPadLeft, Button::DPadRight => ControllerButton::DPadRight, Button::DPadUp => ControllerButton::DPadUp, Button::LeftTrigger => ControllerButton::LeftShoulder, Button::RightTrigger => ControllerButton::RightShoulder, Button::LeftThumb => ControllerButton::LeftStick, Button::RightThumb => ControllerButton::RightStick, Button::Select => ControllerButton::Back, Button::Start => ControllerButton::Start, Button::Mode => ControllerButton::Guide, Button::LeftTrigger2 => ControllerButton::LeftTrigger, Button::RightTrigger2 => ControllerButton::RightTrigger, _ => ControllerButton::Unknown, } } } impl From<Axis> for ControllerAxis { fn from(axis: Axis) -> Self { match axis { Axis::LeftStickX => ControllerAxis::LeftX, Axis::LeftStickY => ControllerAxis::LeftY, Axis::RightStickX => ControllerAxis::RightX, Axis::RightStickY => ControllerAxis::RightY, Axis::LeftZ => ControllerAxis::LeftTrigger, Axis::RightZ => ControllerAxis::RightTrigger, _ => ControllerAxis::Unknown, } } }
use std::{ collections::{hash_map::DefaultHasher, HashMap}, fmt, hash::{Hash, Has
er: PhantomData<T>, } type GilrsEventsData<'a, T> = ( Write<'a, InputHandler<T>>, Write<'a, EventChannel<InputEvent<T>>>, ); impl<'a, T: BindingTypes> System<'a> for GilrsEventsSystem<T> { type SystemData = GilrsEventsData<'a, T>; fn run(&mut self, (mut handler, mut output): Self::SystemData) { while let Some(Event { id, event, time: _ }) = self.gilrs_handle.next_event() { self.handle_gilrs_event(&id, &event, &mut handler, &mut output); } } } impl<T: BindingTypes> GilrsEventsSystem<T> { pub fn new(world: &mut World) -> Result<Self, GilrsSystemError> { let gilrs_handle: Gilrs = Gilrs::new().unwrap(); GilrsEventsData::<T>::setup(world); let mut sys = GilrsEventsSystem { gilrs_handle, opened_controllers: HashMap::new(), marker: PhantomData, }; let (mut handler, mut output) = GilrsEventsData::fetch(world); sys.initialize_controllers(&mut handler, &mut output); Ok(sys) } fn handle_gilrs_event( &mut self, gamepad_id: &GamepadId, event_type: &EventType, handler: &mut InputHandler<T>, output: &mut EventChannel<InputEvent<T>>, ) { use self::ControllerEvent::*; if let Some(idx) = self.opened_controllers.get(gamepad_id) { match *event_type { EventType::AxisChanged(axis, value, _code) => { handler.send_controller_event( &ControllerAxisMoved { which: *idx, axis: axis.into(), value: value, }, output, ); } EventType::ButtonReleased(button, _code) => { handler.send_controller_event( &ControllerButtonReleased { which: *idx, button: button.into(), }, output, ); } EventType::ButtonPressed(button, _code) => { handler.send_controller_event( &ControllerButtonPressed { which: *idx, button: button.into(), }, output, ); } EventType::Disconnected => { if let Some(idx) = self.close_controller(*gamepad_id) { handler .send_controller_event(&ControllerDisconnected { which: idx }, output); } } EventType::Connected => { if let Some(idx) = self.open_controller(*gamepad_id) { handler.send_controller_event(&ControllerConnected { which: idx }, output); } } _ => {} } } else { match *event_type { EventType::Connected => { if let Some(idx) = self.open_controller(*gamepad_id) { handler.send_controller_event(&ControllerConnected { which: idx }, output); } } _ => {} } } } fn open_controller(&mut self, which: GamepadId) -> Option<u32> { match self.gilrs_handle.connected_gamepad(which) { Some(_) => { let idx = self.my_hash(which) as u32; self.opened_controllers.insert(which, idx); Some(idx) } None => None, } } fn close_controller(&mut self, which: GamepadId) -> Option<u32> { self.opened_controllers.remove(&which) } fn initialize_controllers( &mut self, handler: &mut InputHandler<T>, output: &mut EventChannel<InputEvent<T>>, ) { use crate::controller::ControllerEvent::ControllerConnected; for (_id, gamepad) in self.gilrs_handle.gamepads() { let idx = self.my_hash(gamepad.id()) as u32; self.opened_controllers.insert(gamepad.id(), idx); handler.send_controller_event(&ControllerConnected { which: idx }, output); } } fn my_hash<U>(&self, obj: U) -> u64 where U: Hash, { let mut hasher = DefaultHasher::new(); obj.hash(&mut hasher); hasher.finish() } } impl From<Button> for ControllerButton { fn from(button: Button) -> Self { match button { Button::South => ControllerButton::A, Button::East => ControllerButton::B, Button::West => ControllerButton::X, Button::North => ControllerButton::Y, Button::DPadDown => ControllerButton::DPadDown, Button::DPadLeft => ControllerButton::DPadLeft, Button::DPadRight => ControllerButton::DPadRight, Button::DPadUp => ControllerButton::DPadUp, Button::LeftTrigger => ControllerButton::LeftShoulder, Button::RightTrigger => ControllerButton::RightShoulder, Button::LeftThumb => ControllerButton::LeftStick, Button::RightThumb => ControllerButton::RightStick, Button::Select => ControllerButton::Back, Button::Start => ControllerButton::Start, Button::Mode => ControllerButton::Guide, Button::LeftTrigger2 => ControllerButton::LeftTrigger, Button::RightTrigger2 => ControllerButton::RightTrigger, _ => ControllerButton::Unknown, } } } impl From<Axis> for ControllerAxis { fn from(axis: Axis) -> Self { match axis { Axis::LeftStickX => ControllerAxis::LeftX, Axis::LeftStickY => ControllerAxis::LeftY, Axis::RightStickX => ControllerAxis::RightX, Axis::RightStickY => ControllerAxis::RightY, Axis::LeftZ => ControllerAxis::LeftTrigger, Axis::RightZ => ControllerAxis::RightTrigger, _ => ControllerAxis::Unknown, } } }
her}, marker::PhantomData, }; use derivative::Derivative; use derive_new::new; use gilrs::{Axis, Button, Event, EventType, GamepadId, Gilrs}; use amethyst_core::{ ecs::prelude::{System, SystemData, World, Write}, shrev::EventChannel, SystemDesc, }; use super::{ controller::{ControllerAxis, ControllerButton, ControllerEvent}, BindingTypes, InputEvent, InputHandler, }; #[derive(Debug)] pub enum GilrsSystemError { ContextInit(String), ControllerSubsystemInit(String), } impl fmt::Display for GilrsSystemError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { GilrsSystemError::ContextInit(ref msg) => { write!(f, "Failed to initialize SDL: {}", msg) } GilrsSystemError::ControllerSubsystemInit(ref msg) => { write!(f, "Failed to initialize SDL controller subsystem: {}", msg) } } } } #[derive(Derivative, Debug, new)] #[derivative(Default(bound = ""))] pub struct GilrsEventsSystemDesc<T> where T: BindingTypes, { marker: PhantomData<T>, } impl<'a, 'b, T> SystemDesc<'a, 'b, GilrsEventsSystem<T>> for GilrsEventsSystemDesc<T> where T: BindingTypes, { fn build(self, world: &mut World) -> GilrsEventsSystem<T> { <GilrsEventsSystem<T> as System<'_>>::SystemData::setup(world); GilrsEventsSystem::new(world) .unwrap_or_else(|e| panic!("Failed to build SdlEventsSystem. Error: {}", e)) } } #[allow(missing_debug_implementations)] pub struct GilrsEventsSystem<T: BindingTypes> { gilrs_handle: Gilrs, opened_controllers: HashMap<GamepadId, u32>, mark
random
[ { "content": "/// Basic building block of rendering in [RenderingBundle].\n\n///\n\n/// Can be used to register rendering-related systems to the dispatcher,\n\n/// building render graph by registering render targets, adding [RenderableAction]s to them\n\n/// and signalling when the graph has to be rebuild.\n\npub trait RenderPlugin<B: Backend>: std::fmt::Debug {\n\n /// Hook for adding systems and bundles to the dispatcher.\n\n fn on_build<'a, 'b>(\n\n &mut self,\n\n _world: &mut World,\n\n _builder: &mut DispatcherBuilder<'a, 'b>,\n\n ) -> Result<(), Error> {\n\n Ok(())\n\n }\n\n\n\n /// Hook for providing triggers to rebuild the render graph.\n\n fn should_rebuild(&mut self, _world: &World) -> bool {\n\n false\n\n }\n\n\n\n /// Hook for extending the rendering plan.\n\n fn on_plan(\n\n &mut self,\n\n plan: &mut RenderPlan<B>,\n\n factory: &mut Factory<B>,\n", "file_path": "amethyst_rendy/src/bundle.rs", "rank": 0, "score": 145936.8592979741 }, { "content": "/// Trait to describe how rendering tiles may be culled for the tilemap to render\n\npub trait DrawTiles2DBounds: 'static + std::fmt::Debug + Send + Sync {\n\n /// Returns the region to render the tiles\n\n fn bounds<T: Tile, E: CoordinateEncoder>(map: &TileMap<T, E>, world: &World) -> Region;\n\n}\n\n\n\n/// Default bounds that returns the entire tilemap\n\n#[derive(Default, Debug)]\n\npub struct DrawTiles2DBoundsDefault;\n\nimpl DrawTiles2DBounds for DrawTiles2DBoundsDefault {\n\n fn bounds<T: Tile, E: CoordinateEncoder>(map: &TileMap<T, E>, world: &World) -> Region {\n\n Region::new(\n\n Point3::new(0, 0, 0),\n\n Point3::from(*map.dimensions() - Vector3::new(1, 1, 1)),\n\n )\n\n }\n\n}\n\n\n\n/// Draw opaque tilemap without lighting.\n\n#[derive(Clone, PartialEq, Derivative)]\n\n#[derivative(Default(bound = \"\"), Debug(bound = \"\"))]\n", "file_path": "amethyst_tiles/src/pass.rs", "rank": 1, "score": 138091.2397221115 }, { "content": "/// Define drawing opaque 3d meshes with specified shaders and texture set\n\npub trait Base3DPassDef: 'static + std::fmt::Debug + Send + Sync {\n\n /// The human readable name of this pass\n\n const NAME: &'static str;\n\n\n\n /// The [mtl::StaticTextureSet] type implementation for this pass\n\n type TextureSet: for<'a> StaticTextureSet<'a>;\n\n\n\n /// Returns the vertex `SpirvShader` which will be used for this pass\n\n fn vertex_shader() -> &'static SpirvShader;\n\n\n\n /// Returns the vertex `SpirvShader` which will be used for this pass on skinned meshes\n\n fn vertex_skinned_shader() -> &'static SpirvShader;\n\n\n\n /// Returns the fragment `SpirvShader` which will be used for this pass\n\n fn fragment_shader() -> &'static SpirvShader;\n\n\n\n /// Returns the `VertexFormat` of this pass\n\n fn base_format() -> Vec<VertexFormat>;\n\n\n\n /// Returns the `VertexFormat` of this pass for skinned meshes\n", "file_path": "amethyst_rendy/src/pass/base_3d.rs", "rank": 2, "score": 136548.97561550487 }, { "content": "# How to Use Assets\n\n\n\nThis guide covers the basic usage of assets into Amethyst for existing supported formats. For a list of supported formats, please [use this search for \"Format\"][doc_search_format] in the API documentation, and filter by the following crates:\n\n\n\n* amethyst_assets\n\n* amethyst_audio\n\n* amethyst_gltf\n\n* amethyst_locale\n\n* amethyst_ui\n\n\n\n## Steps\n\n\n\n1. Instantiate the Amethyst application with the assets directory.\n\n\n\n ```rust,edition2018,no_run,noplaypen\n\n # extern crate amethyst;\n\n #\n\n use amethyst::{\n\n prelude::*,\n\n # ecs::{World, WorldExt},\n\n utils::application_root_dir,\n\n };\n\n #\n\n # pub struct LoadingState;\n\n # impl SimpleState for LoadingState {}\n\n\n\n fn main() -> amethyst::Result<()> {\n\n // Sets up the application to read assets in\n\n // `<app_dir>/assets`\n\n let app_root = application_root_dir()?;\n\n let assets_dir = app_root.join(\"assets\");\n\n\n\n //..\n\n # let world = World::new();\n\n # let game_data = GameDataBuilder::default();\n\n\n\n let mut game = Application::new(assets_dir, LoadingState, game_data)?;\n\n #\n\n # game.run();\n\n # Ok(())\n\n }\n\n ```\n\n\n\n2. Ensure that the [`Processor<A>` system][doc_processor_system] for asset type `A` is registered in the dispatcher.\n\n\n\n For asset type `A`, `Processor<A>` is a `System` that will asynchronously load `A` assets. Usually the crate that provides `A` will also register `Processor<A>` through a `SystemBundle`. Examples:\n\n\n\n * `FontAsset` is provided by `amethyst_ui`, `UiBundle` registers `Processor<FontAsset>`.\n\n * `Source` is provided by `amethyst_audio`, `AudioBundle` registers `Processor<Source>`.\n\n * `SpriteSheet` is not added by a bundle, so `Processor<SpriteSheet>` needs to be added\n\n to the builder.\n\n\n", "file_path": "book/src/assets/how_to_use_assets.md", "rank": 3, "score": 42693.825446255854 }, { "content": " When [`loader.load(..)`][doc_load] is used to load an [`Asset`][doc_asset], the method returns immediately with a handle for the asset. The asset loading is handled asynchronously in the background, so if the handle is used to retrieve the asset, such as with [`world.read_resource::<AssetStorage<Texture>>()`][doc_read_resource][`.get(texture_handle)`][doc_asset_get], it will return `None` until the `Texture` has finished loading.\n\n\n\n ```rust,edition2018,no_run,noplaypen\n\n # extern crate amethyst;\n\n # use amethyst::{\n\n # assets::{Handle, ProgressCounter},\n\n # prelude::*,\n\n # renderer::Texture,\n\n # };\n\n #\n\n # pub struct GameState {\n\n # /// Handle to the player texture.\n\n # texture_handle: Handle<Texture>,\n\n # }\n\n #\n\n # impl SimpleState for GameState {}\n\n #\n\n # pub struct LoadingState {\n\n # /// Tracks loaded assets.\n\n # progress_counter: ProgressCounter,\n\n # /// Handle to the player texture.\n\n # texture_handle: Option<Handle<Texture>>,\n\n # }\n\n #\n\n impl SimpleState for LoadingState {\n\n fn update(\n\n &mut self,\n\n _data: &mut StateData<'_, GameData<'_, '_>>,\n\n ) -> SimpleTrans {\n\n if self.progress_counter.is_complete() {\n\n Trans::Switch(Box::new(GameState {\n\n texture_handle: self.texture_handle\n\n .take()\n\n .expect(\n\n \"Expected `texture_handle` to exist when \\\n\n `progress_counter` is complete.\"\n\n ),\n\n }))\n\n } else {\n\n Trans::None\n\n }\n\n }\n\n }\n\n ```\n\n\n\n The asset handle can now be used:\n\n\n\n ```rust,edition2018,no_run,noplaypen\n\n # extern crate amethyst;\n\n # use amethyst::{\n\n # assets::Handle,\n\n # prelude::*,\n\n # renderer::Texture,\n\n # };\n\n #\n\n # pub struct GameState {\n\n # /// Handle to the player texture.\n\n # texture_handle: Handle<Texture>,\n\n # }\n\n #\n\n impl SimpleState for GameState {\n\n fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) {\n\n // Create the player entity.\n\n data.world\n\n .create_entity()\n\n // Use the texture handle as a component\n\n .with(self.texture_handle.clone())\n\n .build();\n\n }\n\n }\n\n ```\n\n\n\n[doc_asset]: https://docs.amethyst.rs/stable/amethyst_assets/trait.Asset.html\n\n[doc_asset_get]: https://docs.amethyst.rs/stable/amethyst_assets/struct.AssetStorage.html#method.get\n\n[doc_loader]: https://docs.amethyst.rs/stable/amethyst_assets/struct.Loader.html\n\n[doc_load]: https://docs.amethyst.rs/stable/amethyst_assets/struct.Loader.html#method.load\n\n[doc_processor_system]: https://docs.amethyst.rs/stable/amethyst_assets/struct.Processor.html\n\n[doc_read_resource]: https://docs.amethyst.rs/stable/specs/world/struct.World.html#method.read_resource\n\n[doc_search_format]: https://docs.amethyst.rs/stable/amethyst/?search=Format\n", "file_path": "book/src/assets/how_to_use_assets.md", "rank": 4, "score": 42693.36472398433 }, { "content": "3. Use the [`Loader`][doc_loader] resource to load the asset.\n\n\n\n ```rust,edition2018,no_run,noplaypen\n\n # extern crate amethyst;\n\n # use amethyst::{\n\n # assets::{AssetStorage, Handle, Loader, ProgressCounter},\n\n # ecs::{World, WorldExt},\n\n # prelude::*,\n\n # renderer::{formats::texture::ImageFormat, Texture},\n\n # utils::application_root_dir,\n\n # };\n\n #\n\n pub struct LoadingState {\n\n /// Tracks loaded assets.\n\n progress_counter: ProgressCounter,\n\n /// Handle to the player texture.\n\n texture_handle: Option<Handle<Texture>>,\n\n }\n\n\n\n impl SimpleState for LoadingState {\n\n fn on_start(&mut self, data: StateData<'_, GameData<'_, '_>>) {\n\n let loader = &data.world.read_resource::<Loader>();\n\n let texture_handle = loader.load(\n\n \"player.png\",\n\n ImageFormat::default(),\n\n &mut self.progress_counter,\n\n &data.world.read_resource::<AssetStorage<Texture>>(),\n\n );\n\n\n\n self.texture_handle = Some(texture_handle);\n\n }\n\n }\n\n #\n\n # fn main() -> amethyst::Result<()> {\n\n # let app_root = application_root_dir()?;\n\n # let assets_dir = app_root.join(\"assets\");\n\n #\n\n # let game_data = GameDataBuilder::default();\n\n # let mut game = Application::new(\n\n # assets_dir,\n\n # LoadingState {\n\n # progress_counter: ProgressCounter::new(),\n\n # texture_handle: None,\n\n # },\n\n # game_data,\n\n # )?;\n\n #\n\n # game.run();\n\n # Ok(())\n\n # }\n\n ```\n\n\n\n4. Wait for the asset to be loaded.\n\n\n", "file_path": "book/src/assets/how_to_use_assets.md", "rank": 5, "score": 42693.26522852502 }, { "content": "//! Renderer error types.\n\n\n\nuse std::{error, fmt};\n\n\n\n/// Common renderer error type.\n\n#[derive(Debug)]\n\npub(crate) enum Error {\n\n /// Failed to parse a Spritesheet from RON.\n\n LoadSpritesheetError(ron::de::Error),\n\n}\n\n\n\nimpl error::Error for Error {}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use self::Error::*;\n\n\n\n match *self {\n\n LoadSpritesheetError(..) => write!(fmt, \"Failed to parse SpriteSheet\"),\n\n }\n\n }\n\n}\n", "file_path": "amethyst_rendy/src/error.rs", "rank": 6, "score": 17.51361655463021 }, { "content": "# Coding Conventions\n\n\n\nThis document outlines coding conventions used in the Amethyst.\n\n\n\nWe follow the [Rust API Guidelines].\n\nThis document only cover topics which aren't already outlined there.\n\n\n\n[Rust API Guidelines]: https://rust-lang-nursery.github.io/api-guidelines/about.html\n\n\n\n## Terminology\n\n\n\nIn this document we use the keywords _must_, _should_, _must not_, and _should not_.\n\n\n\nThese loosely conform to [RFC 2119]. Here is a summary of the keywords used:\n\n\n\n* _must_ indicates something that is required.\n\n* _should_ is a recommendation, that can be ignored if there is a good reason.\n\n\n\nAdding _not_ inverts the meaning of the keywords.\n\n\n\n[RFC 2119]: https://www.ietf.org/rfc/rfc2119.txt\n\n\n\n## Error Handling\n\n\n\n#### Defining crate-local errors\n\n\n\nCustom errors _must_ be defined in a module called `error`.\n\n\n\nThe error _must_ implement `std::error::Error`, which in turn requires `std::fmt::Display` and `std::fmt::Debug`).\n\n\n\nThe `std::fmt::Display` implementation _must not_ format the wrapped error since this is already provided through\n\n`source` (see below).\n\n\n\nThe error _should_ implement `std::fmt::Debug` through the `Debug` derive (see below), unless this is not supported.\n\n\n\nThe error _must_ implement `From<T>` conversion traits for any error it wraps (e.g. `From<io::Error>`).\n\nThe error _must not_ implement conversion methods from non-error types like `u32`.\n\n\n\nA lot of this can be implemented using [`err-derive`], as showcased below.\n\n\n\n[`err-derive`]: https://crates.io/crates/err-derive\n\n\n\n###### Example\n\n\n\n```rust\n\n/// crate::error\n\n\n\nuse std::{fmt, error, io};\n\nuse err_derive::Error;\n\n\n\n#[derive(Debug, Error)]\n\npub enum Error {\n\n /// I/O Error.\n\n #[error(display = \"I/O Error\")]\n\n Io(#[cause] io::Error),\n\n /// Permission denied.\n\n #[error(display = \"Permission Denied\")]\n\n PermissionDenied,\n\n #[error(display = \"Non-exhaustive Error\")]\n\n #[doc(hidden)]\n\n __Nonexhaustive,\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(e: io::Error) -> Error {\n\n Error::IoError(e)\n\n }\n\n}\n\n```\n\n\n", "file_path": "docs/CODING_CONVENTIONS.md", "rank": 7, "score": 17.40703510149619 }, { "content": "//! Defines binding structure used for saving and loading input settings.\n\n\n\nuse std::{\n\n borrow::Borrow,\n\n error::Error,\n\n fmt::{Debug, Display, Formatter, Result as FmtResult},\n\n hash::Hash,\n\n};\n\n\n\nuse derivative::Derivative;\n\nuse fnv::FnvHashMap as HashMap;\n\nuse serde::{Deserialize, Serialize};\n\nuse smallvec::SmallVec;\n\n\n\nuse super::{axis, Axis, Button};\n\n\n\n/// Define a set of types used for bindings configuration.\n\n/// Usually defaulted to `StringBindings`, which uses `String`s.\n\n///\n\n/// By defining your own set of types (usually enums),\n", "file_path": "amethyst_input/src/bindings.rs", "rank": 8, "score": 17.226810305357965 }, { "content": "//! Utilities for game state management.\n\n\n\nuse amethyst_input::is_close_requested;\n\n\n\nuse derivative::Derivative;\n\n\n\nuse crate::{ecs::World, GameData, StateEvent};\n\n\n\nuse std::fmt::{Debug, Display, Formatter, Result as FmtResult};\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\n/// Error type for errors occurring in `StateMachine`\n\n#[derive(Debug)]\n\npub enum StateError {\n\n NoStatesPresent,\n\n}\n\n\n\nimpl Display for StateError {\n", "file_path": "src/state.rs", "rank": 9, "score": 17.088579589566656 }, { "content": "//! Provides structures and functions used to get audio outputs.\n\n\n\n// We have to use types from this to provide an output iterator type.\n\nuse std::{\n\n fmt::{Debug, Formatter, Result as FmtResult},\n\n io::Cursor,\n\n sync::Arc,\n\n};\n\n\n\nuse cpal::traits::DeviceTrait;\n\nuse log::error;\n\nuse rodio::{\n\n default_output_device, output_devices, Decoder, Device, Devices, OutputDevices, Sink,\n\n Source as RSource,\n\n};\n\n\n\nuse amethyst_core::ecs::World;\n\n\n\nuse crate::{sink::AudioSink, source::Source, DecoderError};\n\n\n", "file_path": "amethyst_audio/src/output.rs", "rank": 10, "score": 17.06467496349648 }, { "content": "pub use backtrace::Backtrace;\n\nuse std::{\n\n borrow::Cow,\n\n env, error, fmt, result,\n\n sync::atomic::{self, AtomicUsize},\n\n};\n\n\n\nconst RUST_BACKTRACE: &str = \"RUST_BACKTRACE\";\n\n\n\n/// Internal parts of `Error`.\n\n#[derive(Debug)]\n", "file_path": "amethyst_error/src/lib.rs", "rank": 11, "score": 16.639206411743665 }, { "content": "use std::{\n\n error,\n\n fmt::{Debug, Display, Formatter, Result as FmtResult},\n\n hash::Hash,\n\n marker::PhantomData,\n\n};\n\n\n\nuse derivative::Derivative;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse amethyst_assets::{AssetStorage, Handle, Loader, PrefabData, ProgressCounter};\n\nuse amethyst_core::ecs::prelude::{Entity, Read, ReadExpect, WriteStorage};\n\nuse amethyst_derive::PrefabData;\n\nuse amethyst_error::Error;\n\n\n\nuse crate::{Animation, AnimationHierarchy, AnimationSampling, AnimationSet, RestState, Sampler};\n\n\n\n/// `PrefabData` for loading a single `Animation`\n\n///\n\n/// This should be used primarily from inside other `PrefabData`, because this will not place\n", "file_path": "amethyst_animation/src/prefab.rs", "rank": 12, "score": 16.467329654129934 }, { "content": "use std::{collections::HashMap, fmt::Debug};\n\n\n\nuse amethyst_core::{\n\n ecs::{Entity, ReadExpect, System, SystemData, Write, WriteStorage},\n\n shrev::{EventChannel, ReaderId},\n\n ParentHierarchy,\n\n};\n\nuse amethyst_derive::SystemDesc;\n\n\n\nuse crate::{UiButtonAction, UiButtonActionType::*, UiImage, UiText};\n\n\n\n#[derive(Debug)]\n", "file_path": "amethyst_ui/src/button/system.rs", "rank": 13, "score": 15.964621151952233 }, { "content": "//! Provides utilities to remove large amounts of entities with a single command.\n\n\n\nuse std::{fmt::Debug, ops::Deref};\n\n\n\nuse amethyst_assets::PrefabData;\n\nuse amethyst_core::ecs::{\n\n storage::MaskedStorage, world::EntitiesRes, Component, DenseVecStorage, Entity, Join, Storage,\n\n WriteStorage,\n\n};\n\nuse amethyst_derive::PrefabData;\n\nuse amethyst_error::Error;\n\n\n\nuse log::error;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// A marker `Component` used to remove entities and clean up your scene.\n\n/// The generic parameter `I` is the type of id you want to use.\n\n/// Generally an int or an enum.\n\n///\n\n/// # Example\n", "file_path": "amethyst_utils/src/removal.rs", "rank": 14, "score": 15.477887823778833 }, { "content": "//! Demonstrates loading a custom prefab using the Amethyst engine.\n\n\n\nuse std::fmt::Debug;\n\n\n\nuse amethyst::{\n\n assets::{\n\n AssetStorage, Handle, Prefab, PrefabData, PrefabLoader, PrefabLoaderSystemDesc,\n\n ProgressCounter, RonFormat,\n\n },\n\n ecs::{\n\n storage::DenseVecStorage, Component, Entities, Entity, Join, ReadStorage, World, WorldExt,\n\n WriteStorage,\n\n },\n\n prelude::*,\n\n utils::application_root_dir,\n\n Error,\n\n};\n\nuse derive_new::new;\n\nuse serde::{Deserialize, Serialize};\n\n\n", "file_path": "examples/prefab_adapter/main.rs", "rank": 16, "score": 15.455930909776438 }, { "content": "use std::{fmt, marker::PhantomData, path::PathBuf};\n\n\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse sdl2::{\n\n self,\n\n controller::{AddMappingError, Axis, Button, GameController},\n\n event::Event,\n\n EventPump, GameControllerSubsystem, Sdl,\n\n};\n\n\n\nuse amethyst_core::{\n\n ecs::prelude::{System, SystemData, World, Write},\n\n shrev::EventChannel,\n\n SystemDesc,\n\n};\n\n\n\nuse super::{\n\n controller::{ControllerAxis, ControllerButton, ControllerEvent},\n\n BindingTypes, InputEvent, InputHandler,\n", "file_path": "amethyst_input/src/sdl_events_system.rs", "rank": 17, "score": 15.415439064169755 }, { "content": "//! Loads RON files into a structure for easy / statically typed usage.\n\n\n\n#![crate_name = \"amethyst_config\"]\n\n#![warn(\n\n missing_debug_implementations,\n\n missing_docs,\n\n rust_2018_idioms,\n\n rust_2018_compatibility\n\n)]\n\n#![warn(clippy::all)]\n\n#![allow(clippy::new_without_default)]\n\n\n\nuse std::{\n\n error::Error,\n\n fmt, io,\n\n path::{Path, PathBuf},\n\n};\n\n\n\nuse ron::{self, de::Error as DeError, ser::Error as SerError};\n\nuse serde::{Deserialize, Serialize};\n", "file_path": "amethyst_config/src/lib.rs", "rank": 18, "score": 15.350803034594529 }, { "content": "//! Demonstrates loading a simple prefab using the Amethyst engine.\n\n\n\nuse std::fmt::Debug;\n\n\n\nuse amethyst::{\n\n assets::{\n\n AssetStorage, Handle, Prefab, PrefabData, PrefabLoader, PrefabLoaderSystemDesc,\n\n ProgressCounter, RonFormat,\n\n },\n\n core::Parent,\n\n derive::PrefabData,\n\n ecs::{\n\n storage::DenseVecStorage, Component, Entities, Entity, Join, ReadStorage, World,\n\n WriteStorage,\n\n },\n\n prelude::*,\n\n utils::application_root_dir,\n\n Error,\n\n};\n\nuse derive_new::new;\n", "file_path": "examples/prefab_basic/main.rs", "rank": 19, "score": 15.285792973768576 }, { "content": "//! Demonstrates loading a custom prefab using the Amethyst engine.\n\n\n\nuse std::fmt::Debug;\n\n\n\nuse amethyst::{\n\n assets::{\n\n AssetStorage, Handle, Prefab, PrefabData, PrefabLoader, PrefabLoaderSystemDesc,\n\n ProgressCounter, RonFormat,\n\n },\n\n core::{Named, Parent},\n\n derive::PrefabData,\n\n ecs::{\n\n storage::DenseVecStorage, Component, Entities, Entity, Join, ReadStorage, World,\n\n WriteStorage,\n\n },\n\n prelude::*,\n\n utils::application_root_dir,\n\n Error,\n\n};\n\nuse derive_new::new;\n", "file_path": "examples/prefab_multi/main.rs", "rank": 20, "score": 15.23225833316344 }, { "content": "//! ECS input bundle\n\n\n\nuse crate::{BindingError, BindingTypes, Bindings, InputSystemDesc};\n\nuse amethyst_config::{Config, ConfigError};\n\nuse amethyst_core::{\n\n ecs::prelude::{DispatcherBuilder, World},\n\n SystemBundle, SystemDesc,\n\n};\n\nuse amethyst_error::Error;\n\nuse derivative::Derivative;\n\nuse std::{error, fmt, path::Path};\n\n\n\n#[cfg(feature = \"sdl_controller\")]\n\nuse crate::sdl_events_system::ControllerMappings;\n\n\n\n/// Bundle for adding the `InputHandler`.\n\n///\n\n/// This also adds the Winit EventHandler and the `InputEvent<T>` EventHandler\n\n/// where `T::Action` is the type for Actions you have assigned here.\n\n///\n", "file_path": "amethyst_input/src/bundle.rs", "rank": 21, "score": 15.12114741524593 }, { "content": "use derivative::Derivative;\n\nuse serde::de::DeserializeOwned;\n\nuse std::{\n\n fmt::{Debug, Formatter},\n\n marker::PhantomData,\n\n};\n\n\n\nuse amethyst_assets::{\n\n AssetPrefab, AssetStorage, Format, Handle, Loader, Prefab, PrefabData, PrefabLoaderSystem,\n\n PrefabLoaderSystemDesc, Progress, ProgressCounter,\n\n};\n\nuse amethyst_audio::Source as Audio;\n\nuse amethyst_core::{\n\n ecs::{\n\n prelude::{Entities, Entity, Read, ReadExpect, World, Write, WriteStorage},\n\n shred::{ResourceId, SystemData},\n\n },\n\n HiddenPropagate,\n\n};\n\nuse amethyst_error::{format_err, Error, ResultExt};\n", "file_path": "amethyst_ui/src/prefab.rs", "rank": 22, "score": 15.12114741524593 }, { "content": "//! Provides utilities for building and describing scenes in your game.\n\n\n\nuse amethyst_assets::{PrefabData, ProgressCounter};\n\nuse amethyst_controls::ControlTagPrefab;\n\nuse amethyst_core::{ecs::prelude::Entity, Transform};\n\nuse amethyst_derive::PrefabData;\n\nuse amethyst_error::Error;\n\nuse amethyst_rendy::{\n\n camera::CameraPrefab, formats::GraphicsPrefab, light::LightPrefab, rendy::mesh::MeshBuilder,\n\n shape::FromShape,\n\n};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::fmt::Debug;\n\n\n\nuse crate::removal::Removal;\n\n\n\n/// Basic `Prefab` scene node, meant to be used for fast prototyping, and most likely replaced\n\n/// for more complex scenarios.\n\n///\n\n/// ### Type parameters:\n", "file_path": "amethyst_utils/src/scene.rs", "rank": 23, "score": 15.117378996883552 }, { "content": "//! Demonstrates loading a custom prefab using the Amethyst engine.\n\n\n\nuse std::fmt::Debug;\n\n\n\nuse amethyst::{\n\n assets::{\n\n AssetStorage, Handle, Prefab, PrefabData, PrefabLoader, PrefabLoaderSystemDesc,\n\n ProgressCounter, RonFormat,\n\n },\n\n core::{Named, Parent},\n\n derive::PrefabData,\n\n ecs::{\n\n storage::{DenseVecStorage, VecStorage},\n\n Component, Entities, Entity, Join, ReadStorage, World, WorldExt, WriteStorage,\n\n },\n\n prelude::*,\n\n utils::application_root_dir,\n\n Error,\n\n};\n\nuse derivative::Derivative;\n", "file_path": "examples/prefab_custom/main.rs", "rank": 24, "score": 15.074257720340979 }, { "content": "pub use log::LevelFilter;\n\n\n\nuse log::debug;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse std::{borrow::Cow, env, fmt, io, path::PathBuf, str::FromStr};\n\n\n\n/// An enum that contains options for logging to the terminal.\n\n#[derive(Clone, Copy, PartialEq, Eq, Debug, Serialize, Deserialize)]\n\npub enum StdoutLog {\n\n /// Disables logging to the terminal.\n\n Off,\n\n /// Enables logging to the terminal without colored output.\n\n Plain,\n\n /// Enables logging to the terminal with colored output on supported platforms.\n\n Colored,\n\n}\n\n\n\n/// Logger configuration object.\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n", "file_path": "src/logger.rs", "rank": 25, "score": 14.97103666488541 }, { "content": "# How to Define Custom Control Bindings\n\n\n\nInstead of using `StringBindings` for an `InputBundle` you probably want to use a custom type in production, as `StringBindings` are mainly meant to be used for prototyping and not very efficient.\n\n\n\nUsing a custom type to handle input instead of using `String` has many advantages:\n\n\n\n* A `String` uses quite a lot of memory compared to something like an enum.\n\n* Inputting a `String` when retrieving input data is error-prone if you mistype it or change the name.\n\n* A custom type can hold additional information.\n\n\n\n## Defining Custom Input `BindingTypes`\n\n\n\nDefining a custom type for the `InputBundle` is done by implementing the `BindingTypes` trait. This trait contains two types, an `Axis` type and an `Action` type. These types are usually defined as enums.\n\n\n\n```rust,edition2018,no_run,noplaypen\n\n# extern crate amethyst;\n\n# extern crate serde;\n\nuse std::fmt::{self, Display};\n\n\n\nuse amethyst::input::{BindingTypes, Bindings};\n\nuse serde::{Serialize, Deserialize};\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)]\n\nenum AxisBinding {\n\n Horizontal,\n\n Vertical,\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialEq, Eq, Serialize, Deserialize)]\n\nenum ActionBinding {\n\n Shoot,\n\n}\n\n\n\nimpl Display for AxisBinding {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{:?}\", self)\n\n }\n\n}\n\n\n\nimpl Display for ActionBinding {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{:?}\", self)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\nstruct MovementBindingTypes;\n\n\n\nimpl BindingTypes for MovementBindingTypes {\n\n type Axis = AxisBinding;\n\n type Action = ActionBinding;\n\n}\n\n```\n\n\n\nThe `Axis` and `Action` type both need to derive all the traits listed above, the first five are used by Amethyst and the last two are for reading and writing to files correctly. They also need to implement `Display` if you want to add a bindings config file.\n\n\n", "file_path": "book/src/input/how_to_define_custom_control_bindings.md", "rank": 26, "score": 14.901402060853872 }, { "content": "use derivative::Derivative;\n\nuse rand::{self, distributions::Alphanumeric, Rng};\n\nuse std::{\n\n collections::{\n\n hash_map::{Keys, Values, ValuesMut},\n\n HashMap,\n\n },\n\n fmt::Display,\n\n hash::Hash,\n\n ops::Index,\n\n};\n\n\n\n/// A widget is an object that keeps track of all components and entities\n\n/// that make up an element of the user interface. Using the widget_components!\n\n/// macro, it's possible to generate methods that let you easily retrieve\n\n/// all components for a widget, and basically annotate which components the\n\n/// widget will definitely or maybe contain.\n\n/// Widgets are stored in their respective `Widgets` resource and referred to\n\n/// by their associated Id type. A widget will generally only contain fields\n\n/// for the entity Ids it consist of.\n", "file_path": "amethyst_ui/src/widgets.rs", "rank": 27, "score": 14.869454699690571 }, { "content": "use crate::{storage::ProcessingState, FormatRegisteredData, Handle, Reload, SingleFile, Source};\n\nuse amethyst_core::ecs::storage::UnprotectedStorage;\n\nuse amethyst_error::{Error, ResultExt};\n\nuse std::{fmt::Debug, ops::Deref, sync::Arc};\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\n/// One of the three core traits of this crate.\n\n///\n\n/// You want to implement this for every type of asset like\n\n///\n\n/// * `Mesh`\n\n/// * `Texture`\n\n/// * `Terrain`\n\n///\n\n/// and so on. Now, an asset may be available in different formats.\n\n/// That's why we have the `Data` associated type here. You can specify\n\n/// an intermediate format here, like the vertex data for a mesh or the samples\n\n/// for audio data.\n\n///\n\n/// This data is then generated by the `Format` trait.\n", "file_path": "amethyst_assets/src/asset.rs", "rank": 28, "score": 14.589379944586167 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use std::fmt::Debug;\n\n\n\n use super::*;\n\n use winit::{\n\n DeviceId, ElementState, Event, KeyboardInput, ModifiersState, ScanCode, WindowEvent,\n\n WindowId,\n\n };\n\n\n\n const HIDPI: f32 = 1.0;\n\n\n\n #[test]\n\n fn key_action_response() {\n\n // Register an action triggered by a key\n\n // Press the key and check for a press event of both the key and the action.\n\n // Release the key and check for a release event of both the key and the action.\n\n\n\n let mut handler = InputHandler::<StringBindings>::new();\n\n let mut events = EventChannel::<InputEvent<StringBindings>>::new();\n", "file_path": "amethyst_input/src/input_handler.rs", "rank": 29, "score": 14.513534622922798 }, { "content": "use std::{cmp::Ordering, fmt::Debug, hash::Hash, marker, time::Duration};\n\n\n\nuse derivative::Derivative;\n\nuse fnv::FnvHashMap;\n\nuse log::error;\n\nuse minterpolate::{get_input_index, InterpolationFunction, InterpolationPrimitive};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse amethyst_assets::{Asset, AssetStorage, Handle, PrefabData};\n\nuse amethyst_core::{\n\n ecs::prelude::{Component, DenseVecStorage, Entity, VecStorage, WriteStorage},\n\n shred::SystemData,\n\n timing::{duration_to_secs, secs_to_duration},\n\n};\n\nuse amethyst_derive::PrefabData;\n\nuse amethyst_error::Error;\n\n\n\n/// Blend method for sampler blending\n\n#[derive(Clone, Copy, Debug, PartialOrd, PartialEq, Eq, Hash)]\n\npub enum BlendMethod {\n\n /// Simple linear blending\n\n Linear,\n\n}\n\n\n\n/// Extra data to extract from `World`, for use when applying or fetching a sample\n", "file_path": "amethyst_animation/src/resources.rs", "rank": 30, "score": 14.288104399535552 }, { "content": " #[must_use]\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\nimpl Ord for MortonRegion {\n\n #[must_use]\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n if self.min == other.min && self.max == other.max {\n\n std::cmp::Ordering::Equal\n\n } else if morton::min(self.min, other.min) == self.min {\n\n std::cmp::Ordering::Less\n\n } else {\n\n std::cmp::Ordering::Greater\n\n }\n\n }\n\n}\n\nimpl From<Region> for MortonRegion {\n\n #[must_use]\n\n fn from(region: Region) -> Self {\n", "file_path": "amethyst_tiles/src/iters.rs", "rank": 31, "score": 13.142713409881086 }, { "content": "//! Provides the directory of the executable.\n\n\n\nuse std::{env, io, path};\n\n\n\n/// Returns the cargo manifest directory when running the executable with cargo or the directory in\n\n/// which the executable resides otherwise, traversing symlinks if necessary.\n\n///\n\n/// The algorithm used is:\n\n///\n\n/// * If the `CARGO_MANIFEST_DIR` environment variable is defined it is used as application root.\n\n/// This simplifies running development projects through `cargo run`.\n\n/// See the [cargo reference documentation][cargo-ref] for more details.\n\n/// * If the executable name can be found using [`std::env::current_exe`], resolve all symlinks and\n\n/// use the directory it resides in as application root.\n\n///\n\n/// If none of the above works, an error is returned.\n\n///\n\n/// [cargo-ref]: https://doc.rust-lang.org/cargo/reference/environment-variables.html\n\n/// [`std::env::current_exe`]: https://doc.rust-lang.org/std/env/fn.current_exe.html\n", "file_path": "amethyst_utils/src/app_root_dir.rs", "rank": 32, "score": 12.637357480117638 }, { "content": " }\n\n}\n\n\n\n// Unit tests\n\n#[cfg(test)]\n\nmod tests {\n\n use std::{thread, time::Duration};\n\n\n\n use super::Stopwatch;\n\n\n\n // Timing varies more on macOS CI\n\n fn get_uncertainty() -> u32 {\n\n let is_macos = !std::env::var(\"MACOS\").unwrap_or_default().is_empty();\n\n let is_ci = std::env::var(\"CI\").is_ok();\n\n if is_macos && is_ci {\n\n 20\n\n } else {\n\n 10\n\n }\n\n }\n", "file_path": "amethyst_core/src/timing.rs", "rank": 33, "score": 12.544099582725973 }, { "content": "//! Contains the `Error` type and company as used by Amethyst.\n\n//!\n\n//! **Note:** This type is not intended to be used outside of Amethyst.\n\n//! If you are integrating a crate from amethyst to use this, it is recommended that you treat this\n\n//! type as an opaque [`std::error::Error`].\n\n//!\n\n//! [`std::error::Error`]: https://doc.rust-lang.org/std/error/trait.Error.html\n\n\n\n// Parts copied from failure:\n\n// https://github.com/rust-lang-nursery/failure\n\n\n\n#![warn(\n\n missing_debug_implementations,\n\n missing_docs,\n\n rust_2018_idioms,\n\n rust_2018_compatibility\n\n)]\n\n#![warn(clippy::all)]\n\n#![allow(clippy::new_without_default)]\n\n\n", "file_path": "amethyst_error/src/lib.rs", "rank": 34, "score": 12.36358851833157 }, { "content": "use dirs::config_dir;\n\nuse std::{\n\n fs::{create_dir_all, read_to_string, remove_file},\n\n path::Path,\n\n};\n\nuse vergen::{self, ConstantsFlags};\n\n\n", "file_path": "build.rs", "rank": 35, "score": 12.126295441378833 }, { "content": "// SERVER\n\nuse std::time::Duration;\n\n\n\nuse amethyst::{\n\n core::{bundle::SystemBundle, frame_limiter::FrameRateLimitStrategy, SystemDesc},\n\n ecs::{DispatcherBuilder, Read, System, SystemData, World, Write},\n\n network::simulation::{tcp::TcpNetworkBundle, NetworkSimulationEvent, TransportResource},\n\n prelude::*,\n\n shrev::{EventChannel, ReaderId},\n\n utils::application_root_dir,\n\n Result,\n\n};\n\nuse log::{error, info};\n\nuse std::net::TcpListener;\n\n\n", "file_path": "examples/net_server/main.rs", "rank": 36, "score": 12.097076142079764 }, { "content": "#### Avoid overloading the default `Result`\n\n\n\nWe _should not_ import a `Result` that overloads the default import.\n\nInstead, if we are using a `Result` alias, use it through a module (e.g. `io::Result`).\n\n\n\nThis is a future-proofing pattern that prevents having to deal with conflicting `Result` types during refactoring or\n\ncode re-use.\n\nOverloading `Result` also makes it harder to use the default `Result` when it's needed.\n\nThis is especially useful when multiple modules export their own `Result` types.\n\n\n\nCrates _should not_ define their own `Result`.\n\nInstead prefer using `Result` directly with the crate-local error type, like this:\n\n\n\n```rust\n\nuse crate::error::Error;\n\n\n\nfn foo() -> Result<u32, Error> {\n\n Ok(42)\n\n}\n\n```\n\n\n\n###### Do\n\n\n\n```rust\n\nuse std::io;\n\n\n\nfn foo() -> io::Result<u32> {\n\n Ok(42)\n\n}\n\n```\n\n\n\n###### Don't\n\n\n\n```rust\n\nuse std::io::Result;\n\n\n\nfn foo() -> Result<u32> {\n\n Ok(42)\n\n}\n\n```\n", "file_path": "docs/CODING_CONVENTIONS.md", "rank": 37, "score": 12.073033401004412 }, { "content": "/// Returns the cargo manifest directory when running the executable with cargo or the directory in\n\n/// which the executable resides otherwise, traversing symlinks if necessary.\n\n///\n\n/// The algorithm used is:\n\n///\n\n/// * If the `CARGO_MANIFEST_DIR` environment variable is defined it is used as application root.\n\n/// This simplifies running development projects through `cargo run`.\n\n/// See the [cargo reference documentation][cargo-ref] for more details.\n\n/// * If the executable name can be found using [`std::env::current_exe`], resolve all symlinks and\n\n/// use the directory it resides in as application root.\n\n///\n\n/// If none of the above works, an error is returned.\n\n///\n\n/// [cargo-ref]: https://doc.rust-lang.org/cargo/reference/environment-variables.html\n\n/// [`std::env::current_exe`]: https://doc.rust-lang.org/std/env/fn.current_exe.html\n\npub fn application_root_dir() -> Result<path::PathBuf, io::Error> {\n\n if let Some(manifest_dir) = env::var_os(\"CARGO_MANIFEST_DIR\") {\n\n return Ok(path::PathBuf::from(manifest_dir));\n\n }\n\n\n\n let mut exe = dunce::canonicalize(env::current_exe()?)?;\n\n\n\n // Modify in-place to avoid an extra copy.\n\n if exe.pop() {\n\n return Ok(exe);\n\n }\n\n\n\n Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"Failed to find an application root\",\n\n ))\n\n}\n\n\n", "file_path": "amethyst_utils/src/app_root_dir.rs", "rank": 38, "score": 12.06820450391666 }, { "content": " inner: HotReloadStrategyInner,\n\n}\n\n\n\nimpl HotReloadStrategy {\n\n /// Causes hot reloads every `n` seconds.\n\n pub fn every(n: u8) -> Self {\n\n use std::u64::MAX;\n\n\n\n HotReloadStrategy {\n\n inner: HotReloadStrategyInner::Every {\n\n interval: n,\n\n last: Instant::now(),\n\n frame_number: MAX,\n\n },\n\n }\n\n }\n\n\n\n /// This allows to use `trigger` for hot reloading.\n\n pub fn when_triggered() -> Self {\n\n use std::u64::MAX;\n", "file_path": "amethyst_assets/src/reload.rs", "rank": 39, "score": 12.04573747048935 }, { "content": " use super::{Error, ResultExt};\n\n\n\n #[test]\n\n fn test_error_from_string() {\n\n assert_eq!(\"foo\", Error::from_string(\"foo\").to_string());\n\n }\n\n\n\n #[test]\n\n fn test_error_from_error() {\n\n use std::io;\n\n let e = io::Error::new(io::ErrorKind::Other, \"i/o other\");\n\n assert_eq!(\"i/o other\", Error::new(e).to_string());\n\n }\n\n\n\n #[test]\n\n fn test_result_ext_source() {\n\n use std::io;\n\n\n\n let e = io::Error::new(io::ErrorKind::Other, \"wrapped\");\n\n let a = Error::new(e);\n", "file_path": "amethyst_error/src/lib.rs", "rank": 40, "score": 12.013102635630666 }, { "content": "//! Skinned mesh and bone implementation for renderer.\n\nuse amethyst_assets::PrefabData;\n\nuse amethyst_core::{\n\n ecs::prelude::{Component, DenseVecStorage, Entity, FlaggedStorage, WriteStorage},\n\n math::Matrix4,\n\n};\n\nuse amethyst_error::Error;\n\nuse rendy::{\n\n hal::format::Format,\n\n mesh::{AsAttribute, AsVertex, VertexFormat},\n\n};\n\nuse std::result::Result as StdResult;\n\n\n\n/// Type for joint weights attribute of vertex\n\n#[repr(C)]\n\n#[derive(Clone, Copy, Debug, PartialEq, PartialOrd)]\n\npub struct JointWeights(pub [f32; 4]);\n\n\n\nimpl From<[f32; 4]> for JointWeights {\n\n fn from(from: [f32; 4]) -> Self {\n", "file_path": "amethyst_rendy/src/skinning.rs", "rank": 41, "score": 11.97568675713475 }, { "content": "use std::collections::HashMap;\n\n\n\nuse amethyst_animation::{JointPrefab, SkinPrefab, SkinnablePrefab};\n\nuse amethyst_assets::Prefab;\n\nuse amethyst_core::math::{convert, Matrix4};\n\nuse amethyst_error::Error;\n\nuse amethyst_rendy::skinning::JointTransformsPrefab;\n\n\n\nuse super::Buffers;\n\nuse crate::GltfPrefab;\n\n\n", "file_path": "amethyst_gltf/src/format/skin.rs", "rank": 42, "score": 11.915624923196168 }, { "content": "//! `SleepAndYield` can potentially be as accurate as `Yield` while using less CPU time, but you\n\n//! will have to test different grace period timings to determine how much time needs to be left\n\n//! to ensure that the main thread doesn't sleep too long and miss the start of the next frame.\n\n//!\n\n//! [`Application`]: ../../amethyst/type.Application.html\n\n//! [`FrameRateLimitStrategy`]: ./enum.FrameRateLimitStrategy.html\n\n//! [`thread::yield_now`]: https://doc.rust-lang.org/std/thread/fn.yield_now.html\n\n//! [`thread::sleep`]: https://doc.rust-lang.org/stable/std/thread/fn.sleep.html\n\n\n\nuse std::{\n\n thread::{sleep, yield_now},\n\n time::{Duration, Instant},\n\n};\n\n\n\nuse derive_new::new;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nconst ZERO: Duration = Duration::from_millis(0);\n\n\n\n/// Frame rate limiting strategy.\n", "file_path": "amethyst_core/src/frame_limiter.rs", "rank": 43, "score": 11.908294297340177 }, { "content": "//! Defining a custom asset and format.\n\n\n\nuse std::{str::from_utf8, sync::Arc, thread::sleep, time::Duration};\n\n\n\nuse rayon::ThreadPoolBuilder;\n\n\n\nuse amethyst_assets::*;\n\nuse amethyst_core::ecs::prelude::VecStorage;\n\nuse amethyst_error::Error;\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "amethyst_assets/examples/ll.rs", "rank": 44, "score": 11.822001464124327 }, { "content": "use std::collections::HashMap;\n\n\n\nuse amethyst_error::Error;\n\n\n\nuse amethyst_animation::{\n\n AnimationPrefab, AnimationSetPrefab, InterpolationFunction, InterpolationPrimitive, Sampler,\n\n SamplerPrimitive, TransformChannel,\n\n};\n\nuse amethyst_core::{\n\n math::{convert, Vector3, Vector4},\n\n Transform,\n\n};\n\n\n\nuse super::Buffers;\n\nuse crate::error;\n\n\n", "file_path": "amethyst_gltf/src/format/animation.rs", "rank": 45, "score": 11.808003110351665 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse amethyst_core::{\n\n ecs::{System, SystemData, World, WorldExt},\n\n shrev::{EventChannel, ReaderId},\n\n SystemDesc,\n\n};\n\nuse amethyst_error::Error;\n\n\n\nuse amethyst_derive::SystemDesc;\n\n\n\n#[test]\n", "file_path": "amethyst_derive/tests/system_desc.rs", "rank": 46, "score": 11.799784028637465 }, { "content": " {\n\n self.loader.load_from_data(data, progress, &self.storage)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::Arc;\n\n\n\n use rayon::ThreadPoolBuilder;\n\n\n\n use amethyst_core::{\n\n ecs::{Builder, RunNow, World, WorldExt},\n\n SystemDesc, Time, Transform,\n\n };\n\n\n\n use crate::Loader;\n\n\n\n use super::*;\n\n\n", "file_path": "amethyst_assets/src/prefab/mod.rs", "rank": 47, "score": 11.74845945485274 }, { "content": "use super::Buffers;\n\nuse crate::{error, GltfSceneOptions};\n\nuse amethyst_core::math::{zero, Vector3};\n\nuse amethyst_error::Error;\n\nuse amethyst_rendy::{\n\n rendy::mesh::{Color, MeshBuilder, Normal, Position, Tangent, TexCoord},\n\n skinning::JointCombined,\n\n};\n\nuse log::{trace, warn};\n\nuse mikktspace::{generate_tangents, Geometry};\n\nuse std::{iter::repeat, ops::Range};\n\n\n", "file_path": "amethyst_gltf/src/format/mesh.rs", "rank": 48, "score": 11.667945286254437 }, { "content": "use crate::CoordinateEncoder;\n\nuse amethyst_core::math::Vector3;\n\nuse luts::{\n\n MORTON256_X, MORTON256_Y, MORTON256_Z, MORTON512_DECODE_X, MORTON512_DECODE_Y,\n\n MORTON512_DECODE_Z,\n\n};\n\nuse std::cmp::Ordering;\n\n\n\nmod luts;\n\n\n\n#[inline]\n\n#[cfg(target_feature = \"bmi2\")]\n", "file_path": "amethyst_tiles/src/morton/mod.rs", "rank": 49, "score": 11.602139923733827 }, { "content": "use std::{\n\n hash::Hash,\n\n marker::{self, PhantomData},\n\n time::Duration,\n\n};\n\n\n\nuse derivative::Derivative;\n\nuse fnv::FnvHashMap;\n\nuse log::error;\n\nuse minterpolate::InterpolationPrimitive;\n\n\n\nuse amethyst_assets::{AssetStorage, Handle};\n\nuse amethyst_core::{\n\n ecs::prelude::{\n\n Component, Entities, Entity, Join, Read, ReadStorage, System, SystemData, World,\n\n WriteStorage,\n\n },\n\n timing::secs_to_duration,\n\n SystemDesc,\n\n};\n", "file_path": "amethyst_animation/src/systems/control.rs", "rank": 50, "score": 11.523867563106315 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse derive_new::new;\n\nuse log::error;\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\nuse amethyst_assets::AssetStorage;\n\nuse amethyst_core::{\n\n ecs::prelude::{Read, System, SystemData, World, WriteExpect},\n\n shred::Resource,\n\n SystemDesc,\n\n};\n\n\n\nuse crate::{\n\n output::init_output,\n\n sink::AudioSink,\n\n source::{Source, SourceHandle},\n\n};\n\n\n", "file_path": "amethyst_audio/src/systems/dj.rs", "rank": 51, "score": 11.493716121753128 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse amethyst::{\n\n core::{\n\n deferred_dispatcher_operation::{AddSystem, AddSystemDesc, DispatcherOperation},\n\n SystemDesc,\n\n },\n\n ecs::prelude::*,\n\n prelude::*,\n\n};\n\n\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\n\n\nuse crate::GameUpdate;\n\n\n\n/// State with a custom dispatcher.\n\n///\n\n/// This allows you to specify which systems you want to run within the state. This should be\n\n/// constructed using the `CustomDispatcherStateBuilder`.\n", "file_path": "amethyst_test/src/state/custom_dispatcher_state.rs", "rank": 52, "score": 11.477155985470706 }, { "content": "use std::fs;\n\n\n\nuse font_kit::handle::Handle as FontKitHandle;\n\nuse log::{error, warn};\n\n\n\nuse amethyst_assets::{AssetStorage, Format, Loader};\n\n\n\nuse crate::{\n\n font::systemfont::default_system_font,\n\n format::{FontAsset, FontHandle, TtfFormat},\n\n};\n\n\n\n/// Get the system default fonts.\n\n/// If unable to, gets the local square.ttf font.\n", "file_path": "amethyst_ui/src/font/default.rs", "rank": 53, "score": 11.466229541194807 }, { "content": "/// [`String`]: https://doc.rust-lang.org/std/string/struct.String.html\n\n/// [str]: https://doc.rust-lang.org/std/primitive.str.html\n\n/// [`Named::new`]: #method.new\n\n///\n\n/// # Examples\n\n///\n\n/// Creating a name from string constant:\n\n///\n\n/// ```\n\n/// use amethyst::core::{Named, WithNamed};\n\n/// use amethyst::ecs::prelude::*;\n\n///\n\n/// let mut world = World::new();\n\n/// world.register::<Named>();\n\n///\n\n/// world\n\n/// .create_entity()\n\n/// .named(\"Super Cool Entity\")\n\n/// .build();\n\n/// ```\n", "file_path": "amethyst_core/src/named.rs", "rank": 54, "score": 11.45922443633983 }, { "content": "//! High level example\n\n\n\n#![allow(unused)]\n\n\n\nuse std::sync::Arc;\n\n\n\nuse rayon::{ThreadPool, ThreadPoolBuilder};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse amethyst_assets::*;\n\nuse amethyst_core::{\n\n ecs::prelude::{\n\n Builder, Dispatcher, DispatcherBuilder, Read, ReadExpect, System, VecStorage, World,\n\n WorldExt, Write,\n\n },\n\n Time,\n\n};\n\nuse amethyst_error::{format_err, Error, ResultExt};\n\n\n", "file_path": "amethyst_assets/examples/hl.rs", "rank": 55, "score": 11.4455135911257 }, { "content": "//!\n\n//! # Usage\n\n//!\n\n//! The following shows a simple example of testing a `State`. More examples are in the\n\n//! [Examples](#Examples) section.\n\n//!\n\n//! ```rust\n\n//! # use std::marker::PhantomData;\n\n//! #\n\n//! # use amethyst_test::prelude::*;\n\n//! # use amethyst::{\n\n//! # ecs::prelude::*,\n\n//! # prelude::*,\n\n//! # };\n\n//! #\n\n//! # #[derive(Debug)]\n\n//! # struct LoadResource;\n\n//! #\n\n//! # #[derive(Debug)]\n\n//! # struct LoadingState;\n", "file_path": "amethyst_test/src/lib.rs", "rank": 56, "score": 11.444014020097354 }, { "content": "use std::{\n\n iter::Iterator,\n\n mem::replace,\n\n sync::{\n\n atomic::{AtomicBool, Ordering},\n\n Arc,\n\n },\n\n};\n\n\n\nuse derive_new::new;\n\nuse rodio::SpatialSink;\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\nuse amethyst_core::{\n\n ecs::prelude::{\n\n Entities, Entity, Join, Read, ReadStorage, System, SystemData, World, WriteStorage,\n\n },\n\n math::convert,\n", "file_path": "amethyst_audio/src/systems/audio.rs", "rank": 57, "score": 11.432224829640415 }, { "content": " TextureSub,\n\n },\n\n types::{Backend, Texture},\n\n util,\n\n};\n\nuse amethyst_window::ScreenDimensions;\n\nuse derivative::Derivative;\n\nuse glsl_layout::AsStd140;\n\nuse std::marker::PhantomData;\n\n\n\nuse crate::{\n\n iters::Region,\n\n map::{Map, MapStorage, Tile, TileMap},\n\n pod::{TileArgs, TileMapArgs},\n\n CoordinateEncoder, MortonEncoder2D,\n\n};\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n", "file_path": "amethyst_tiles/src/pass.rs", "rank": 58, "score": 11.414118938545613 }, { "content": "pub struct Registry<T: ?Sized> {\n\n pub map: BTreeMap<&'static str, Option<DeserializeFn<T>>>,\n\n pub names: Vec<&'static str>,\n\n}\n\n\n\npub struct SeqLookupVisitor<'a, T: ?Sized + 'static> {\n\n pub expected: &'a dyn Expected,\n\n pub registry: &'static Registry<T>,\n\n}\n\n\n\nimpl<'de, 'a, T: ?Sized + 'static> Visitor<'de> for SeqLookupVisitor<'a, T> {\n\n type Value = DeserializeFn<T>;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n Expected::fmt(self.expected, formatter)\n\n }\n\n\n\n fn visit_str<E: de::Error>(self, key: &str) -> Result<Self::Value, E> {\n\n match self.registry.map.get(key) {\n\n Some(Some(value)) => Ok(*value),\n", "file_path": "amethyst_assets/src/dyn_format.rs", "rank": 59, "score": 11.347077871651098 }, { "content": "use std::time::Duration;\n\n\n\nuse amethyst::{\n\n assets::ProgressCounter,\n\n core::Stopwatch,\n\n ecs::{World, WorldExt},\n\n State, StateData, Trans,\n\n};\n\n\n\nuse derivative::Derivative;\n\nuse log::warn;\n\n\n\nuse crate::GameUpdate;\n\n\n\n/// Time limit before outputting a warning message.\n\nconst LOADING_TIME_LIMIT: Duration = Duration::from_secs(10);\n\n\n\n/// Reads a `ProgressCounter` resource and waits for it to be `complete()`.\n\n#[derive(Derivative)]\n\n#[derivative(Debug)]\n", "file_path": "amethyst_test/src/wait_for_load.rs", "rank": 60, "score": 11.3318158371124 }, { "content": "use std::{\n\n marker::PhantomData,\n\n sync::{\n\n atomic::{AtomicUsize, Ordering},\n\n Arc, Weak,\n\n },\n\n};\n\n\n\nuse crossbeam_queue::SegQueue;\n\nuse derivative::Derivative;\n\nuse log::{debug, error, trace, warn};\n\nuse rayon::ThreadPool;\n\n\n\nuse amethyst_core::{\n\n ecs::{\n\n hibitset::BitSet,\n\n prelude::{Component, Read, ReadExpect, System, SystemData, VecStorage, World, Write},\n\n storage::UnprotectedStorage,\n\n },\n\n SystemDesc, Time,\n", "file_path": "amethyst_assets/src/storage.rs", "rank": 61, "score": 11.321387776986384 }, { "content": "pub use nalgebra as math;\n\npub use num_traits as num;\n\npub use specs as ecs;\n\npub use specs::{shred, shrev};\n\n\n\nuse std::sync::Arc;\n\n\n\npub use crate::{\n\n bundle::SystemBundle,\n\n event::EventReader,\n\n system_ext::{Pausable, SystemExt},\n\n timing::*,\n\n transform::*,\n\n};\n\n\n\npub use self::{\n\n axis::{Axis2, Axis3},\n\n hidden::{Hidden, HiddenPropagate},\n\n hide_system::{HideHierarchySystem, HideHierarchySystemDesc},\n\n named::{Named, WithNamed},\n", "file_path": "amethyst_core/src/lib.rs", "rank": 62, "score": 11.31765116124437 }, { "content": "use std::{path::Path, sync::Arc};\n\n\n\nuse amethyst_assets::Source as AssetSource;\n\nuse amethyst_error::Error;\n\nuse gltf::{self, json, Gltf};\n\n\n\nuse crate::error;\n\n\n\n#[derive(Debug)]\n\npub enum ImageFormat {\n\n Png,\n\n Jpeg,\n\n}\n\n\n\nimpl ImageFormat {\n\n fn from_mime_type(mime: &str) -> Self {\n\n match mime {\n\n \"image/jpeg\" => ImageFormat::Jpeg,\n\n \"image/png\" => ImageFormat::Png,\n\n _ => unreachable!(),\n", "file_path": "amethyst_gltf/src/format/importer.rs", "rank": 63, "score": 11.298869040295953 }, { "content": "//! Provides the ability to store `Systems`, `Bundles`, `Barriers`, in a normal vector for deferred dispatcher construction.\n\n\n\nuse std::marker::PhantomData;\n\n\n\nuse derivative::Derivative;\n\n\n\nuse amethyst_error::Error;\n\n\n\nuse crate::{\n\n ecs::prelude::{DispatcherBuilder, RunNow, System, World},\n\n RunNowDesc, SystemBundle, SystemDesc,\n\n};\n\n\n\n/// Trait to capture deferred dispatcher builder operations.\n", "file_path": "amethyst_core/src/deferred_dispatcher_operation.rs", "rank": 64, "score": 11.266535968874848 }, { "content": "//! An example showing how to create a dispatcher inside of a State.\n\n\n\nuse amethyst::{\n\n ecs::{Dispatcher, DispatcherBuilder, WorldExt},\n\n prelude::*,\n\n shrev::EventChannel,\n\n utils::application_root_dir,\n\n Error,\n\n};\n\n\n\nuse std::marker::PhantomData;\n\n\n", "file_path": "examples/state_dispatcher/main.rs", "rank": 65, "score": 11.244388444483743 }, { "content": "use std::{marker, time::Duration};\n\n\n\nuse itertools::Itertools;\n\nuse minterpolate::InterpolationPrimitive;\n\n\n\nuse amethyst_assets::AssetStorage;\n\nuse amethyst_core::{\n\n duration_to_nanos, duration_to_secs,\n\n ecs::prelude::{Component, Join, Read, System, WriteStorage},\n\n nanos_to_duration, secs_to_duration, Time,\n\n};\n\n\n\nuse crate::resources::{\n\n AnimationSampling, ApplyData, BlendMethod, ControlState, EndControl, Sampler, SamplerControl,\n\n SamplerControlSet,\n\n};\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n", "file_path": "amethyst_animation/src/systems/sampling.rs", "rank": 66, "score": 11.176939430597631 }, { "content": "use std::{\n\n fs::File,\n\n path::{Path, PathBuf},\n\n time::UNIX_EPOCH,\n\n};\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\nuse amethyst_error::{format_err, Error, ResultExt};\n\n\n\nuse crate::{error, source::Source};\n\n\n\n/// Directory source.\n\n///\n\n/// Please note that there is a default directory source\n\n/// inside the `Loader`, which is automatically used when you call\n\n/// `load`. In case you want another, second, directory for assets,\n\n/// you can instantiate one yourself, too. Please use `Loader::load_from` then.\n\n#[derive(Debug)]\n", "file_path": "amethyst_assets/src/source/dir.rs", "rank": 67, "score": 11.171485953217317 }, { "content": "};\n\nuse amethyst_window::ScreenDimensions;\n\nuse derivative::Derivative;\n\nuse glsl_layout::{vec2, vec4, AsStd140};\n\nuse std::cmp::Ordering;\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\n/// A [RenderPlugin] for rendering UI elements.\n\n#[derive(Debug, Default)]\n\npub struct RenderUi {\n\n target: Target,\n\n}\n\n\n\nimpl RenderUi {\n\n /// Select render target on which UI should be rendered.\n\n pub fn with_target(mut self, target: Target) -> Self {\n\n self.target = target;\n\n self\n", "file_path": "amethyst_ui/src/pass.rs", "rank": 68, "score": 11.169602892306164 }, { "content": " let pixel_top = 20;\n\n let offsets = [-5.0, -10.0];\n\n\n\n assert_eq!(\n\n Sprite::from((\n\n (10., 20.), // Sprite w and h\n\n [-5., -10.], // Offsets\n\n [0., 10. / 30., 1., 20. / 40.], // Texture coordinates\n\n )),\n\n Sprite::from_pixel_values(\n\n image_w, image_h, sprite_w, sprite_h, pixel_left, pixel_top, offsets, false, false\n\n )\n\n );\n\n }\n\n fn create_texture() -> Handle<Texture> {\n\n use crate::formats::texture::TextureGenerator;\n\n use amethyst_assets::{AssetStorage, Loader};\n\n use rayon::ThreadPoolBuilder;\n\n use std::sync::Arc;\n\n\n", "file_path": "amethyst_rendy/src/sprite/mod.rs", "rank": 69, "score": 11.159982114143354 }, { "content": "//! Module containing structures useful for batching draw calls\n\n//! in scenarios with various known assumptions, e.g. order independence.\n\nuse crate::util::TapCountIter;\n\nuse derivative::Derivative;\n\nuse smallvec::{smallvec, SmallVec};\n\nuse std::{\n\n collections::hash_map::Entry,\n\n iter::{Extend, FromIterator},\n\n ops::Range,\n\n};\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\n/// Iterator trait for grouping iterated 2-tuples `(K, V)` by contiguous ranges with equal `K`,\n\n/// providing access in a group-by-group manner.\n", "file_path": "amethyst_rendy/src/batch.rs", "rank": 70, "score": 11.113081675325986 }, { "content": "//! CircularBuffer\n\n\n\nuse std::collections::VecDeque;\n\n\n\n/// A CircularBuffer that drops the oldest element inserted when full.\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// # use amethyst_utils::circular_buffer::CircularBuffer;\n\n/// # use std::collections::VecDeque;\n\n/// let mut buf = CircularBuffer::<u32>::new(2);\n\n/// assert_eq!(*buf.queue(), VecDeque::<u32>::from(vec![]));\n\n/// assert!(buf.push(1).is_none());\n\n/// assert_eq!(*buf.queue(), VecDeque::<u32>::from(vec![1]));\n\n/// assert!(buf.push(2).is_none());\n\n/// assert_eq!(*buf.queue(), VecDeque::<u32>::from(vec![1, 2]));\n\n/// assert!(buf.push(3).is_some());\n\n/// assert_eq!(*buf.queue(), VecDeque::<u32>::from(vec![2, 3]));\n\n/// assert_eq!(buf.capacity(), 2);\n\n/// ```\n", "file_path": "amethyst_utils/src/circular_buffer.rs", "rank": 71, "score": 11.111664318501772 }, { "content": "//! GLTF format\n\n\n\nuse std::{cmp::Ordering, collections::HashMap, sync::Arc};\n\n\n\nuse gltf::{self, Gltf};\n\nuse log::debug;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse amethyst_animation::AnimationHierarchyPrefab;\n\nuse amethyst_assets::{Format, FormatValue, Prefab, Source};\n\nuse amethyst_core::{\n\n math::{convert, Quaternion, Unit, Vector3, Vector4},\n\n transform::Transform,\n\n};\n\nuse amethyst_error::{format_err, Error, ResultExt};\n\nuse amethyst_rendy::{camera::CameraPrefab, light::LightPrefab};\n\n\n\nuse crate::{error, GltfMaterialSet, GltfNodeExtent, GltfPrefab, GltfSceneOptions, Named};\n\n\n\nuse self::{\n", "file_path": "amethyst_gltf/src/format/mod.rs", "rank": 72, "score": 11.098750358332463 }, { "content": "use std::{borrow::Borrow, hash::Hash, path::PathBuf, sync::Arc};\n\n\n\nuse fnv::FnvHashMap;\n\nuse log::debug;\n\nuse rayon::ThreadPool;\n\n\n\nuse amethyst_error::ResultExt;\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\nuse crate::{\n\n error::Error,\n\n storage::{AssetStorage, Handle, Processed},\n\n Asset, Directory, Format, FormatValue, Progress, Source,\n\n};\n\n\n\n/// The asset loader, holding the sources and a reference to the `ThreadPool`.\n\npub struct Loader {\n\n hot_reload: bool,\n\n pool: Arc<ThreadPool>,\n", "file_path": "amethyst_assets/src/loader.rs", "rank": 73, "score": 11.094976090707046 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse std::{\n\n collections::{HashMap, HashSet},\n\n marker::PhantomData,\n\n};\n\n\n\nuse amethyst_core::{\n\n ecs::{\n\n Component, DenseVecStorage, Entities, Entity, Join, Read, ReadExpect, ReadStorage,\n\n ReaderId, System, SystemData, Write, WriteStorage,\n\n },\n\n math::Vector2,\n\n shrev::EventChannel,\n\n Hidden, HiddenPropagate, ParentHierarchy,\n\n};\n\nuse amethyst_derive::SystemDesc;\n\nuse amethyst_input::{BindingTypes, InputHandler};\n\nuse amethyst_window::ScreenDimensions;\n\n\n\nuse crate::{\n", "file_path": "amethyst_ui/src/drag.rs", "rank": 74, "score": 11.094976090707046 }, { "content": "use std::sync::{\n\n atomic::{AtomicUsize, Ordering},\n\n Arc,\n\n};\n\n\n\nuse amethyst_error::Error;\n\nuse log::error;\n\nuse parking_lot::Mutex;\n\n\n\n/// Completion status, returned by `ProgressCounter::complete`.\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum Completion {\n\n /// Loading is complete\n\n Complete,\n\n /// Some asset loads have failed\n\n Failed,\n\n /// Still loading assets\n\n Loading,\n\n}\n\n\n\n/// The `Progress` trait, allowing to track which assets are\n\n/// imported already.\n", "file_path": "amethyst_assets/src/progress.rs", "rank": 75, "score": 11.055907798050677 }, { "content": "use std::{borrow::Borrow, hash::Hash};\n\n\n\nuse derivative::Derivative;\n\nuse fnv::FnvHashMap;\n\n\n\nuse crate::{Handle, WeakHandle};\n\n\n\n/// A simple cache for asset handles of type `A`.\n\n/// This stores `WeakHandle`, so it doesn't keep the assets alive.\n\n#[derive(Derivative)]\n\n#[derivative(Default(bound = \"\"))]\n\npub struct Cache<A> {\n\n map: FnvHashMap<String, WeakHandle<A>>,\n\n}\n\n\n\nimpl<A> Cache<A>\n\nwhere\n\n A: Clone,\n\n{\n\n /// Creates a new `Cache` and initializes it with the default values.\n", "file_path": "amethyst_assets/src/cache.rs", "rank": 76, "score": 11.055907798050677 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse amethyst_core::{\n\n bundle::SystemBundle,\n\n ecs::{DispatcherBuilder, World},\n\n};\n\nuse amethyst_error::Error;\n\nuse derive_new::new;\n\n\n\nuse crate::{types::Backend, RenderingBundle};\n\n\n\n/// Adds basic rendering system to the dispatcher.\n\n///\n\n/// This test bundle requires the user to also add the `TransformBundle`.\n\n///\n\n/// This is only meant for testing and only provides very basic sprite rendering. You need to enable\n\n/// the `test-support` flag to use this.\n\n#[derive(Debug, new)]\n\npub struct RenderTestBundle<B>(PhantomData<B>);\n\n\n", "file_path": "amethyst_rendy/src/render_test_bundle.rs", "rank": 77, "score": 11.055532237400719 }, { "content": "//! Defines the `Reload` trait.\n\n\n\nuse std::{sync::Arc, time::Instant};\n\n\n\nuse derive_new::new;\n\n\n\nuse amethyst_core::{\n\n ecs::prelude::{DispatcherBuilder, Read, System, SystemData, World, Write},\n\n SystemBundle, SystemDesc, Time,\n\n};\n\nuse amethyst_error::Error;\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\nuse crate::{Format, FormatValue, Loader, Source};\n\n\n\n/// This bundle activates hot reload for the `Loader`,\n\n/// adds a `HotReloadStrategy` and the `HotReloadSystem`.\n\n#[derive(Default)]\n", "file_path": "amethyst_assets/src/reload.rs", "rank": 78, "score": 11.055532237400719 }, { "content": "use palette::{LinSrgba, Srgba};\n\nuse rendy::{\n\n command::{Families, QueueId},\n\n factory::{Factory, ImageState},\n\n graph::{Graph, GraphBuilder},\n\n texture::palette::{load_from_linear_rgba, load_from_srgba},\n\n};\n\nuse std::{marker::PhantomData, sync::Arc};\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\n/// Graph trait implementation required by consumers. Builds a graph and manages signaling when\n\n/// the graph needs to be rebuilt.\n", "file_path": "amethyst_rendy/src/system.rs", "rank": 79, "score": 11.021750344779498 }, { "content": "use bytes::Bytes;\n\nuse log::error;\n\nuse std::time::Instant;\n\n\n\n/// Use this network bundle to add the laminar transport layer to your game.\n\npub struct LaminarNetworkBundle {\n\n socket: Option<LaminarSocket>,\n\n}\n\n\n\nimpl LaminarNetworkBundle {\n\n pub fn new(socket: Option<LaminarSocket>) -> Self {\n\n Self { socket }\n\n }\n\n}\n\n\n\nimpl<'a, 'b> SystemBundle<'a, 'b> for LaminarNetworkBundle {\n\n fn build(\n\n self,\n\n world: &mut World,\n\n builder: &mut DispatcherBuilder<'_, '_>,\n", "file_path": "amethyst_network/src/simulation/transport/laminar.rs", "rank": 80, "score": 11.021750344779498 }, { "content": "use crate::transform::UiTransform;\n\nuse amethyst_core::{\n\n ecs::{\n\n prelude::{\n\n Component, Entities, Entity, Join, Read, ReadExpect, ReadStorage, System, Write,\n\n },\n\n storage::NullStorage,\n\n },\n\n math::Vector2,\n\n shrev::EventChannel,\n\n Hidden, HiddenPropagate,\n\n};\n\nuse amethyst_input::{BindingTypes, InputHandler};\n\nuse amethyst_window::ScreenDimensions;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{collections::HashSet, marker::PhantomData};\n\nuse winit::MouseButton;\n\n\n\n/// An event that pertains to a specific `Entity`, for example a `UiEvent` for clicking on a widget\n\n/// entity.\n", "file_path": "amethyst_ui/src/event.rs", "rank": 81, "score": 11.014774228125363 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse amethyst_core::{\n\n ecs::{\n\n prelude::{\n\n Component, DenseVecStorage, Entities, Entity, FlaggedStorage, Join, ReadStorage, World,\n\n },\n\n shred::{ResourceId, SystemData},\n\n storage::GenericReadStorage,\n\n },\n\n ParentHierarchy,\n\n};\n\nuse amethyst_window::ScreenDimensions;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse super::{Anchor, ScaleMode, Stretch};\n\n\n\n/// Utility `SystemData` for finding UI entities based on `UiTransform` id\n\n#[derive(SystemData)]\n", "file_path": "amethyst_ui/src/transform.rs", "rank": 82, "score": 10.982906653890137 }, { "content": "//! Transparency, visibility sorting and camera centroid culling for 2D Sprites.\n\nuse crate::{\n\n camera::{ActiveCamera, Camera},\n\n transparent::Transparent,\n\n};\n\nuse amethyst_core::{\n\n ecs::{\n\n hibitset::BitSet,\n\n prelude::{Entities, Entity, Join, Read, ReadStorage, System, Write},\n\n },\n\n math::{Point3, Vector3},\n\n Hidden, HiddenPropagate, Transform,\n\n};\n\nuse derivative::Derivative;\n\nuse std::cmp::Ordering;\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\n/// Resource for controlling what entities should be rendered, and whether to draw them ordered or\n", "file_path": "amethyst_rendy/src/sprite_visibility.rs", "rank": 83, "score": 10.982906653890137 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse derivative::Derivative;\n\nuse derive_new::new;\n\nuse serde::{Deserialize, Serialize};\n\nuse winit::{ElementState, Event, KeyboardInput, VirtualKeyCode, WindowEvent};\n\n\n\nuse amethyst_core::{\n\n ecs::{\n\n Component, DenseVecStorage, Entities, FlaggedStorage, Join, Read, ReadStorage, ReaderId,\n\n System, SystemData, World, Write, WriteStorage,\n\n },\n\n shrev::EventChannel,\n\n SystemDesc,\n\n};\n\nuse amethyst_derive::SystemDesc;\n\nuse amethyst_input::{BindingTypes, InputHandler};\n\n\n\nuse crate::{CachedSelectionOrder, UiEvent, UiEventType};\n\n\n", "file_path": "amethyst_ui/src/selection.rs", "rank": 84, "score": 10.972952279358203 }, { "content": "//! Provides a small simple tag component for identifying entities.\n\n\n\nuse std::marker::PhantomData;\n\n\n\nuse amethyst_assets::PrefabData;\n\nuse amethyst_core::ecs::{\n\n shred::{ResourceId, SystemData},\n\n Component, Entities, Entity, Join, NullStorage, ReadStorage, World, WriteStorage,\n\n};\n\nuse amethyst_derive::PrefabData;\n\nuse amethyst_error::Error;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Tag component that can be used with a custom type to tag entities for processing\n\n#[derive(Clone, Debug, Serialize, Deserialize, PrefabData)]\n\n#[serde(default)]\n\n#[prefab(Component)]\n\npub struct Tag<T>\n\nwhere\n", "file_path": "amethyst_utils/src/tag.rs", "rank": 85, "score": 10.962252050217984 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse amethyst_core::ecs::prelude::{\n\n Component, DenseVecStorage, Entity, FlaggedStorage, Read, ReadExpect, ResourceId, SystemData,\n\n World, WriteStorage,\n\n};\n\nuse amethyst_error::Error;\n\n\n\nuse crate::{\n\n Asset, AssetStorage, Format, Handle, Loader, Progress, ProgressCounter, SerializableFormat,\n\n};\n\n\n\npub use self::system::{PrefabLoaderSystem, PrefabLoaderSystemDesc};\n\n\n\nmod impls;\n\nmod system;\n\n\n\n/// Trait for loading a prefabs data for a single entity\n", "file_path": "amethyst_assets/src/prefab/mod.rs", "rank": 86, "score": 10.942911072918609 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse amethyst_core::{\n\n bundle::SystemBundle,\n\n ecs::prelude::{DispatcherBuilder, World},\n\n math::one,\n\n SystemDesc,\n\n};\n\nuse amethyst_error::Error;\n\nuse amethyst_input::BindingTypes;\n\n\n\nuse super::*;\n\n\n\n/// The bundle that creates a flying movement system.\n\n///\n\n/// Note: Will not actually create a moving entity. It will only register the needed resources and\n\n/// systems.\n\n///\n\n/// You might want to add `\"fly_movement\"` and `\"free_rotation\"` as dependencies of the\n\n/// `TransformSystem` in order to apply changes made by these systems in the same frame.\n", "file_path": "amethyst_controls/src/bundles.rs", "rank": 87, "score": 10.922496786250235 }, { "content": " }\n\n\n\n /// Access the internal `std::error::Error` as a trait.\n\n ///\n\n /// This can be useful for integrating with systems that operate on `std::error::Error`.\n\n ///\n\n /// **Warning:** This erases most diagnostics in favor of returning only the top error.\n\n /// `std::error::Error` is expanded further.\n\n pub fn as_error(&self) -> &(dyn error::Error + 'static) {\n\n &self.inner.error\n\n }\n\n}\n\n\n\n/// Blanket implementation.\n\n///\n\n/// Encapsulate errors which are Send + Sync.\n\nimpl<T> From<T> for Error\n\nwhere\n\n T: 'static + error::Error + Send + Sync,\n\n{\n", "file_path": "amethyst_error/src/lib.rs", "rank": 88, "score": 10.919973318370541 }, { "content": "//! The core engine framework.\n\n\n\nuse std::{env, marker::PhantomData, path::Path, sync::Arc, time::Duration};\n\n\n\nuse crate::shred::Resource;\n\nuse derivative::Derivative;\n\nuse log::{debug, info, log_enabled, trace, Level};\n\nuse rayon::ThreadPoolBuilder;\n\n#[cfg(feature = \"sentry\")]\n\nuse sentry::integrations::panic::register_panic_handler;\n\nuse winit::Event;\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::{profile_scope, register_thread_with_profiler, write_profile};\n\n\n\nuse crate::{\n\n assets::{Loader, Source},\n\n callback_queue::CallbackQueue,\n\n core::{\n\n frame_limiter::{FrameLimiter, FrameRateLimitConfig, FrameRateLimitStrategy},\n", "file_path": "src/app.rs", "rank": 89, "score": 10.88002494149026 }, { "content": "//! World resource that handles all user input.\n\n\n\nuse super::{\n\n controller::{ControllerButton, ControllerEvent},\n\n event::InputEvent::{self, *},\n\n scroll_direction::ScrollDirection,\n\n *,\n\n};\n\nuse amethyst_core::shrev::EventChannel;\n\nuse derivative::Derivative;\n\nuse smallvec::SmallVec;\n\nuse std::{borrow::Borrow, hash::Hash};\n\nuse winit::{\n\n dpi::LogicalPosition, DeviceEvent, ElementState, Event, KeyboardInput, MouseButton,\n\n MouseScrollDelta, VirtualKeyCode, WindowEvent,\n\n};\n\n\n\n/// This struct holds state information about input devices.\n\n///\n\n/// For example, if a key is pressed on the keyboard, this struct will record\n", "file_path": "amethyst_input/src/input_handler.rs", "rank": 90, "score": 10.860531885098172 }, { "content": "//! Transparency, visibility sorting and camera centroid culling for 3D Meshes.\n\nuse crate::{\n\n camera::{ActiveCamera, Camera},\n\n transparent::Transparent,\n\n};\n\nuse amethyst_core::{\n\n ecs::{\n\n hibitset::BitSet,\n\n prelude::{\n\n Component, DenseVecStorage, Entities, Entity, Join, Read, ReadStorage, System, Write,\n\n },\n\n },\n\n math::{convert, distance_squared, Matrix4, Point3, Vector4},\n\n Hidden, HiddenPropagate, Transform,\n\n};\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse std::cmp::Ordering;\n\n\n\n#[cfg(feature = \"profiler\")]\n", "file_path": "amethyst_rendy/src/visibility.rs", "rank": 91, "score": 10.854812855938992 }, { "content": "use std::{collections::HashMap, marker::PhantomData};\n\n\n\nuse derivative::Derivative;\n\nuse log::error;\n\n\n\nuse amethyst_core::{\n\n ecs::{\n\n storage::ComponentEvent, BitSet, Entities, Entity, Join, Read, ReadExpect, ReadStorage,\n\n ReaderId, System, SystemData, World, Write, WriteStorage,\n\n },\n\n ArcThreadPool, Parent, SystemDesc, Time,\n\n};\n\nuse amethyst_error::{format_err, Error, ResultExt};\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\nuse crate::{AssetStorage, Completion, Handle, HotReloadStrategy, ProcessingState};\n\n\n\nuse super::{Prefab, PrefabData, PrefabTag};\n", "file_path": "amethyst_assets/src/prefab/system.rs", "rank": 92, "score": 10.841092838993157 }, { "content": " use crate::{\n\n bundle::{ImageOptions, OutputColor},\n\n Format, Kind,\n\n };\n\n use amethyst_config::{Config, ConfigError};\n\n use amethyst_core::{\n\n ecs::{ReadExpect, SystemData},\n\n SystemBundle,\n\n };\n\n use amethyst_window::{DisplayConfig, ScreenDimensions, Window, WindowBundle};\n\n use rendy::hal::command::{ClearColor, ClearDepthStencil, ClearValue};\n\n use std::path::Path;\n\n\n\n /// A [RenderPlugin] for opening a window and displaying a render target to it.\n\n ///\n\n /// When you provide [`DisplayConfig`], it opens a window for you using [`WindowBundle`].\n\n #[derive(Default, Debug)]\n\n pub struct RenderToWindow {\n\n target: Target,\n\n config: Option<DisplayConfig>,\n", "file_path": "amethyst_rendy/src/plugins.rs", "rank": 93, "score": 10.834120924012396 }, { "content": "use std::marker::PhantomData;\n\n\n\nuse derivative::Derivative;\n\n\n\nuse amethyst_core::{\n\n ecs::prelude::{Component, Read, ReadStorage, System, SystemData, World, Write},\n\n shrev::{Event, EventChannel, ReaderId},\n\n SystemDesc,\n\n};\n\n\n\nuse crate::event::TargetedEvent;\n\n\n\n#[cfg(feature = \"profiler\")]\n\nuse thread_profiler::profile_scope;\n\n\n\n/// Describes anything that can receive events one by one or in batches. This\n\n/// lets whoever wants to receive triggered events decide on how they\n\n/// want to receive them, instead of forcing them to construct certain\n\n/// data structures such as a `Vec`.\n", "file_path": "amethyst_ui/src/event_retrigger.rs", "rank": 94, "score": 10.833511270881008 }, { "content": " Named,\n\n};\n\nuse amethyst_error::Error;\n\nuse amethyst_rendy::{\n\n camera::CameraPrefab, formats::mtl::MaterialPrefab, light::LightPrefab,\n\n rendy::mesh::MeshBuilder, types::Mesh, visibility::BoundingSphere,\n\n};\n\nuse derivative::Derivative;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{collections::HashMap, ops::Range};\n\n\n\npub use crate::format::GltfSceneFormat;\n\n\n\nmod error;\n\nmod format;\n\n\n\n/// Builds a `GltfSceneLoaderSystem`.\n\npub type GltfSceneLoaderSystemDesc = PrefabLoaderSystemDesc<GltfPrefab>;\n\n\n\n/// Load `GltfSceneAsset`s\n", "file_path": "amethyst_gltf/src/lib.rs", "rank": 95, "score": 10.83345645585146 }, { "content": "use super::{get_image_data, Buffers, ImageFormat as ImportDataFormat};\n\nuse amethyst_assets::Source;\n\nuse amethyst_error::Error;\n\nuse amethyst_rendy::{\n\n formats::{mtl::MaterialPrefab, texture::TexturePrefab},\n\n palette::{LinSrgba, Srgba},\n\n rendy::{\n\n hal,\n\n texture::{\n\n image::{load_from_image, ImageFormat as DataFormat, ImageTextureConfig, Repr},\n\n palette::{load_from_linear_rgba, load_from_srgba},\n\n MipLevels, TextureBuilder,\n\n },\n\n },\n\n};\n\n\n\nuse gltf::{self, material::AlphaMode};\n\nuse std::sync::Arc;\n\n\n\n// Load a single material, and transform into a format usable by the engine\n", "file_path": "amethyst_gltf/src/format/material.rs", "rank": 96, "score": 10.806569224284413 }, { "content": "mod test {\n\n use std::path::Path;\n\n\n\n use crate::source::Source;\n\n\n\n use super::Directory;\n\n\n\n #[test]\n\n fn loads_asset_from_assets_directory() {\n\n let test_assets_dir = Path::new(env!(\"CARGO_MANIFEST_DIR\")).join(\"tests/assets\");\n\n let directory = Directory::new(test_assets_dir);\n\n\n\n assert_eq!(\n\n b\"data\".to_vec(),\n\n directory\n\n .load(\"subdir/asset\")\n\n .expect(\"Failed to load tests/assets/subdir/asset\")\n\n );\n\n }\n\n\n", "file_path": "amethyst_assets/src/source/dir.rs", "rank": 97, "score": 10.770228379373691 }, { "content": " SystemBundle,\n\n};\n\nuse amethyst_error::{format_err, Error};\n\nuse std::collections::HashMap;\n\n\n\n/// A bundle of systems used for rendering using `Rendy` render graph.\n\n///\n\n/// Provides a mechanism for registering rendering plugins.\n\n/// By itself doesn't render anything, you must use `with_plugin` method\n\n/// to define a set of functionalities you want to use.\n\n///\n\n/// If you need much more control, or you need to deal directly with the render pipeline,\n\n/// it's possible to define a `RenderGraphCreator` as show by the\n\n/// `renderable_custom` example.\n\n#[derive(Debug)]\n\npub struct RenderingBundle<B: Backend> {\n\n plugins: Vec<Box<dyn RenderPlugin<B>>>,\n\n}\n\n\n\nimpl<B: Backend> RenderingBundle<B> {\n", "file_path": "amethyst_rendy/src/bundle.rs", "rank": 98, "score": 10.753350155276964 }, { "content": "use crate::{\n\n resources::AnimationSampling,\n\n skinning::VertexSkinningSystemDesc,\n\n systems::{\n\n AnimationControlSystemDesc, AnimationProcessor, SamplerInterpolationSystem,\n\n SamplerProcessor,\n\n },\n\n};\n\nuse amethyst_core::{\n\n ecs::prelude::{Component, DispatcherBuilder, World},\n\n SystemBundle, SystemDesc,\n\n};\n\nuse amethyst_error::Error;\n\nuse std::{hash::Hash, marker};\n\n\n\n/// Bundle for vertex skinning\n\n///\n\n/// This registers `VertexSkinningSystem`.\n\n/// Note that the user must make sure this system runs after `TransformSystem`\n\n#[derive(Default, Debug)]\n", "file_path": "amethyst_animation/src/bundle.rs", "rank": 99, "score": 10.725687871618065 } ]
Rust
src/float/conv.rs
mattico/compiler-builtins
35dec6bd8ab1214078e6284d58e9ba62271e2fb9
use float::Float; use int::{Int, CastInto}; fn int_to_float<I: Int, F: Float>(i: I) -> F where F::Int: CastInto<u32>, F::Int: CastInto<I>, I::UnsignedInt: CastInto<F::Int>, u32: CastInto<F::Int>, { if i == I::ZERO { return F::ZERO; } let two = I::UnsignedInt::ONE + I::UnsignedInt::ONE; let four = two + two; let mant_dig = F::SIGNIFICAND_BITS + 1; let exponent_bias = F::EXPONENT_BIAS; let n = I::BITS; let (s, a) = i.extract_sign(); let mut a = a; let sd = n - a.leading_zeros(); let mut e = sd - 1; if I::BITS < mant_dig { return F::from_parts(s, (e + exponent_bias).cast(), a.cast() << (mant_dig - e - 1)); } a = if sd > mant_dig { /* start: 0000000000000000000001xxxxxxxxxxxxxxxxxxxxxxPQxxxxxxxxxxxxxxxxxx * finish: 000000000000000000000000000000000000001xxxxxxxxxxxxxxxxxxxxxxPQR * 12345678901234567890123456 * 1 = msb 1 bit * P = bit MANT_DIG-1 bits to the right of 1 * Q = bit MANT_DIG bits to the right of 1 * R = "or" of all bits to the right of Q */ let mant_dig_plus_one = mant_dig + 1; let mant_dig_plus_two = mant_dig + 2; a = if sd == mant_dig_plus_one { a << 1 } else if sd == mant_dig_plus_two { a } else { (a >> (sd - mant_dig_plus_two)) | Int::from_bool((a & I::UnsignedInt::max_value()).wrapping_shl((n + mant_dig_plus_two) - sd) != Int::ZERO) }; /* finish: */ a |= Int::from_bool((a & four) != I::UnsignedInt::ZERO); /* Or P into R */ a += Int::ONE; /* round - this step may add a significant bit */ a >>= 2; /* dump Q and R */ /* a is now rounded to mant_dig or mant_dig+1 bits */ if (a & (I::UnsignedInt::ONE << mant_dig)) != Int::ZERO { a >>= 1; e += 1; } a /* a is now rounded to mant_dig bits */ } else { a.wrapping_shl(mant_dig - sd) /* a is now rounded to mant_dig bits */ }; F::from_parts(s, (e + exponent_bias).cast(), a.cast()) } intrinsics! { #[arm_aeabi_alias = __aeabi_i2f] pub extern "C" fn __floatsisf(i: i32) -> f32 { int_to_float(i) } #[arm_aeabi_alias = __aeabi_i2d] pub extern "C" fn __floatsidf(i: i32) -> f64 { int_to_float(i) } #[use_c_shim_if(all(target_arch = "x86", not(target_env = "msvc")))] #[arm_aeabi_alias = __aeabi_l2d] pub extern "C" fn __floatdidf(i: i64) -> f64 { if cfg!(target_arch = "x86_64") { i as f64 } else { int_to_float(i) } } #[unadjusted_on_win64] pub extern "C" fn __floattisf(i: i128) -> f32 { int_to_float(i) } #[unadjusted_on_win64] pub extern "C" fn __floattidf(i: i128) -> f64 { int_to_float(i) } #[arm_aeabi_alias = __aeabi_ui2f] pub extern "C" fn __floatunsisf(i: u32) -> f32 { int_to_float(i) } #[arm_aeabi_alias = __aeabi_ui2d] pub extern "C" fn __floatunsidf(i: u32) -> f64 { int_to_float(i) } #[use_c_shim_if(all(not(target_env = "msvc"), any(target_arch = "x86", all(not(windows), target_arch = "x86_64"))))] #[arm_aeabi_alias = __aeabi_ul2d] pub extern "C" fn __floatundidf(i: u64) -> f64 { int_to_float(i) } #[unadjusted_on_win64] pub extern "C" fn __floatuntisf(i: u128) -> f32 { int_to_float(i) } #[unadjusted_on_win64] pub extern "C" fn __floatuntidf(i: u128) -> f64 { int_to_float(i) } } #[derive(PartialEq)] enum Sign { Positive, Negative } fn float_to_int<F: Float, I: Int>(f: F) -> I where F::Int: CastInto<u32>, F::Int: CastInto<I>, { let f = f; let fixint_min = I::min_value(); let fixint_max = I::max_value(); let fixint_bits = I::BITS; let fixint_unsigned = fixint_min == I::ZERO; let sign_bit = F::SIGN_MASK; let significand_bits = F::SIGNIFICAND_BITS; let exponent_bias = F::EXPONENT_BIAS; let a_rep = F::repr(f); let a_abs = a_rep & !sign_bit; let sign = if (a_rep & sign_bit) == F::Int::ZERO { Sign::Positive } else { Sign::Negative }; let mut exponent: u32 = (a_abs >> significand_bits).cast(); let significand = (a_abs & F::SIGNIFICAND_MASK) | F::IMPLICIT_BIT; if exponent < exponent_bias || fixint_unsigned && sign == Sign::Negative { return I::ZERO; } exponent -= exponent_bias; if exponent >= (if fixint_unsigned {fixint_bits} else {fixint_bits -1}) { return if sign == Sign::Positive {fixint_max} else {fixint_min} } let r: I = if exponent < significand_bits { (significand >> (significand_bits - exponent)).cast() } else { (significand << (exponent - significand_bits)).cast() }; if sign == Sign::Negative { (!r).wrapping_add(I::ONE) } else { r } } intrinsics! { #[arm_aeabi_alias = __aeabi_f2iz] pub extern "C" fn __fixsfsi(f: f32) -> i32 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_f2lz] pub extern "C" fn __fixsfdi(f: f32) -> i64 { float_to_int(f) } #[unadjusted_on_win64] pub extern "C" fn __fixsfti(f: f32) -> i128 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_d2iz] pub extern "C" fn __fixdfsi(f: f64) -> i32 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_d2lz] pub extern "C" fn __fixdfdi(f: f64) -> i64 { float_to_int(f) } #[unadjusted_on_win64] pub extern "C" fn __fixdfti(f: f64) -> i128 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_f2uiz] pub extern "C" fn __fixunssfsi(f: f32) -> u32 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_f2ulz] pub extern "C" fn __fixunssfdi(f: f32) -> u64 { float_to_int(f) } #[unadjusted_on_win64] pub extern "C" fn __fixunssfti(f: f32) -> u128 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_d2uiz] pub extern "C" fn __fixunsdfsi(f: f64) -> u32 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_d2ulz] pub extern "C" fn __fixunsdfdi(f: f64) -> u64 { float_to_int(f) } #[unadjusted_on_win64] pub extern "C" fn __fixunsdfti(f: f64) -> u128 { float_to_int(f) } }
use float::Float; use int::{Int, CastInto}; fn int_to_float<I: Int, F: Float>(i: I) -> F where F::Int: CastInto<u32>, F::Int: CastInto<I>, I::UnsignedInt: CastInto<F::Int>, u32: CastInto<F::Int>, { if i == I::ZERO { return F::ZERO; } let two = I::UnsignedInt::ONE + I::UnsignedInt::ONE; let four = two + two; let mant_dig = F::SIGNIFICAND_BITS + 1; let exponent_bias = F::EXPONENT_BIAS; let n = I::BITS; let (s, a) = i.extract_sign(); let mut a = a; let sd = n - a.leading_zeros(); let mut e = sd - 1; if I::BITS < mant_dig { return F::from_parts(s, (e + exponent_bias).cast(), a.cast() << (mant_dig - e - 1)); } a = if sd > mant_dig { /* start: 0000000000000000000001xxxxxxxxxxxxxxxxxxxxxxPQxxxxxxxxxxxxxxxxxx * finish: 000000000000000000000000000000000000001xxxxxxxxxxxxxxxxxxxxxxPQR * 12345678901234567890123456 * 1 = msb 1 bit * P = bit MANT_DIG-1 bits to the right of 1 * Q = bit MANT_DIG bits to the right of 1 * R = "or" of all bits to the right of Q */ let mant_dig_plus_one = mant_dig + 1; let mant_dig_plus_two = mant_dig + 2; a = if sd == mant_dig_plus_one { a << 1 } else if sd == mant_dig
pub extern "C" fn __floatuntidf(i: u128) -> f64 { int_to_float(i) } } #[derive(PartialEq)] enum Sign { Positive, Negative } fn float_to_int<F: Float, I: Int>(f: F) -> I where F::Int: CastInto<u32>, F::Int: CastInto<I>, { let f = f; let fixint_min = I::min_value(); let fixint_max = I::max_value(); let fixint_bits = I::BITS; let fixint_unsigned = fixint_min == I::ZERO; let sign_bit = F::SIGN_MASK; let significand_bits = F::SIGNIFICAND_BITS; let exponent_bias = F::EXPONENT_BIAS; let a_rep = F::repr(f); let a_abs = a_rep & !sign_bit; let sign = if (a_rep & sign_bit) == F::Int::ZERO { Sign::Positive } else { Sign::Negative }; let mut exponent: u32 = (a_abs >> significand_bits).cast(); let significand = (a_abs & F::SIGNIFICAND_MASK) | F::IMPLICIT_BIT; if exponent < exponent_bias || fixint_unsigned && sign == Sign::Negative { return I::ZERO; } exponent -= exponent_bias; if exponent >= (if fixint_unsigned {fixint_bits} else {fixint_bits -1}) { return if sign == Sign::Positive {fixint_max} else {fixint_min} } let r: I = if exponent < significand_bits { (significand >> (significand_bits - exponent)).cast() } else { (significand << (exponent - significand_bits)).cast() }; if sign == Sign::Negative { (!r).wrapping_add(I::ONE) } else { r } } intrinsics! { #[arm_aeabi_alias = __aeabi_f2iz] pub extern "C" fn __fixsfsi(f: f32) -> i32 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_f2lz] pub extern "C" fn __fixsfdi(f: f32) -> i64 { float_to_int(f) } #[unadjusted_on_win64] pub extern "C" fn __fixsfti(f: f32) -> i128 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_d2iz] pub extern "C" fn __fixdfsi(f: f64) -> i32 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_d2lz] pub extern "C" fn __fixdfdi(f: f64) -> i64 { float_to_int(f) } #[unadjusted_on_win64] pub extern "C" fn __fixdfti(f: f64) -> i128 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_f2uiz] pub extern "C" fn __fixunssfsi(f: f32) -> u32 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_f2ulz] pub extern "C" fn __fixunssfdi(f: f32) -> u64 { float_to_int(f) } #[unadjusted_on_win64] pub extern "C" fn __fixunssfti(f: f32) -> u128 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_d2uiz] pub extern "C" fn __fixunsdfsi(f: f64) -> u32 { float_to_int(f) } #[arm_aeabi_alias = __aeabi_d2ulz] pub extern "C" fn __fixunsdfdi(f: f64) -> u64 { float_to_int(f) } #[unadjusted_on_win64] pub extern "C" fn __fixunsdfti(f: f64) -> u128 { float_to_int(f) } }
_plus_two { a } else { (a >> (sd - mant_dig_plus_two)) | Int::from_bool((a & I::UnsignedInt::max_value()).wrapping_shl((n + mant_dig_plus_two) - sd) != Int::ZERO) }; /* finish: */ a |= Int::from_bool((a & four) != I::UnsignedInt::ZERO); /* Or P into R */ a += Int::ONE; /* round - this step may add a significant bit */ a >>= 2; /* dump Q and R */ /* a is now rounded to mant_dig or mant_dig+1 bits */ if (a & (I::UnsignedInt::ONE << mant_dig)) != Int::ZERO { a >>= 1; e += 1; } a /* a is now rounded to mant_dig bits */ } else { a.wrapping_shl(mant_dig - sd) /* a is now rounded to mant_dig bits */ }; F::from_parts(s, (e + exponent_bias).cast(), a.cast()) } intrinsics! { #[arm_aeabi_alias = __aeabi_i2f] pub extern "C" fn __floatsisf(i: i32) -> f32 { int_to_float(i) } #[arm_aeabi_alias = __aeabi_i2d] pub extern "C" fn __floatsidf(i: i32) -> f64 { int_to_float(i) } #[use_c_shim_if(all(target_arch = "x86", not(target_env = "msvc")))] #[arm_aeabi_alias = __aeabi_l2d] pub extern "C" fn __floatdidf(i: i64) -> f64 { if cfg!(target_arch = "x86_64") { i as f64 } else { int_to_float(i) } } #[unadjusted_on_win64] pub extern "C" fn __floattisf(i: i128) -> f32 { int_to_float(i) } #[unadjusted_on_win64] pub extern "C" fn __floattidf(i: i128) -> f64 { int_to_float(i) } #[arm_aeabi_alias = __aeabi_ui2f] pub extern "C" fn __floatunsisf(i: u32) -> f32 { int_to_float(i) } #[arm_aeabi_alias = __aeabi_ui2d] pub extern "C" fn __floatunsidf(i: u32) -> f64 { int_to_float(i) } #[use_c_shim_if(all(not(target_env = "msvc"), any(target_arch = "x86", all(not(windows), target_arch = "x86_64"))))] #[arm_aeabi_alias = __aeabi_ul2d] pub extern "C" fn __floatundidf(i: u64) -> f64 { int_to_float(i) } #[unadjusted_on_win64] pub extern "C" fn __floatuntisf(i: u128) -> f32 { int_to_float(i) } #[unadjusted_on_win64]
random
[ { "content": "#[test]\n\nfn two() {\n\n let mut aligned = Aligned::new([0u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let n = 2;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 0xef, 0, 0, 0, 0, 0, 0]);\n\n\n\n let mut aligned = Aligned::new([1u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 0xef, 1, 1, 1, 1, 1, 1]);\n\n}\n\n\n", "file_path": "tests/aeabi_memset.rs", "rank": 2, "score": 101751.1608657096 }, { "content": "#[test]\n\nfn four() {\n\n let mut aligned = Aligned::new([0u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let n = 4;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 0xef, 0xef, 0xef, 0, 0, 0, 0]);\n\n\n\n let mut aligned = Aligned::new([1u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 0xef, 0xef, 0xef, 1, 1, 1, 1]);\n\n}\n\n\n", "file_path": "tests/aeabi_memset.rs", "rank": 3, "score": 101751.1608657096 }, { "content": "/// Returns `a + b`\n\nfn add<F: Float>(a: F, b: F) -> F where\n\n u32: CastInto<F::Int>,\n\n F::Int: CastInto<u32>,\n\n i32: CastInto<F::Int>,\n\n F::Int: CastInto<i32>,\n\n{\n\n let one = F::Int::ONE;\n\n let zero = F::Int::ZERO;\n\n\n\n let bits = F::BITS.cast();\n\n let significand_bits = F::SIGNIFICAND_BITS;\n\n let max_exponent = F::EXPONENT_MAX;\n\n\n\n let implicit_bit = F::IMPLICIT_BIT;\n\n let significand_mask = F::SIGNIFICAND_MASK;\n\n let sign_bit = F::SIGN_MASK as F::Int;\n\n let abs_mask = sign_bit - one;\n\n let exponent_mask = F::EXPONENT_MASK;\n\n let inf_rep = exponent_mask;\n\n let quiet_bit = implicit_bit >> 1;\n", "file_path": "src/float/add.rs", "rank": 4, "score": 100975.16462086907 }, { "content": "fn to_u32(x: f32) -> u32 {\n\n unsafe { mem::transmute(x) }\n\n}\n\n\n\nstatic TEST_CASES: &[((u32, u32), u32)] = &[\n\n\"#\n\n }\n\n\n\n fn epilogue() -> &'static str {\n\n \"\n\n];\n\n\n", "file_path": "build.rs", "rank": 5, "score": 98544.87547493997 }, { "content": "fn mk_f32(x: u32) -> f32 {\n\n unsafe { mem::transmute(x) }\n\n}\n\n\n", "file_path": "build.rs", "rank": 6, "score": 84786.56318605528 }, { "content": "fn something_with_a_dtor(f: &Fn()) {\n\n struct A<'a>(&'a (Fn() + 'a));\n\n\n\n impl<'a> Drop for A<'a> {\n\n fn drop(&mut self) {\n\n (self.0)();\n\n }\n\n }\n\n let _a = A(f);\n\n f();\n\n}\n\n\n", "file_path": "examples/intrinsics.rs", "rank": 7, "score": 76788.08339658246 }, { "content": "#[cfg(thumb)]\n\n#[no_mangle]\n\npub fn _start() -> ! {\n\n run();\n\n loop {}\n\n}\n\n\n\n#[cfg(windows)]\n\n#[link(name = \"kernel32\")]\n\n#[link(name = \"msvcrt\")]\n\nextern {}\n\n\n\n// ARM targets need these symbols\n", "file_path": "examples/intrinsics.rs", "rank": 8, "score": 71984.25866712532 }, { "content": "trait Ashl: Int + LargeInt {\n\n /// Returns `a << b`, requires `b < Self::BITS`\n\n fn ashl(self, offset: u32) -> Self\n\n where Self: LargeInt<HighHalf = <Self as LargeInt>::LowHalf>,\n\n {\n\n let half_bits = Self::BITS / 2;\n\n if offset & half_bits != 0 {\n\n Self::from_parts(Int::ZERO, self.low() << (offset - half_bits))\n\n } else if offset == 0 {\n\n self\n\n } else {\n\n Self::from_parts(self.low() << offset,\n\n (self.high() << offset) |\n\n (self.low() >> (half_bits - offset)))\n\n }\n\n }\n\n}\n\n\n\nimpl Ashl for u64 {}\n\nimpl Ashl for u128 {}\n\n\n", "file_path": "src/int/shift.rs", "rank": 9, "score": 61499.97952727582 }, { "content": "/// Trait to convert an integer to/from smaller parts\n\npub trait LargeInt: Int {\n\n type LowHalf: Int;\n\n type HighHalf: Int;\n\n\n\n fn low(self) -> Self::LowHalf;\n\n fn low_as_high(low: Self::LowHalf) -> Self::HighHalf;\n\n fn high(self) -> Self::HighHalf;\n\n fn high_as_low(low: Self::HighHalf) -> Self::LowHalf;\n\n fn from_parts(low: Self::LowHalf, high: Self::HighHalf) -> Self;\n\n}\n\n\n\nmacro_rules! large_int {\n\n ($ty:ty, $tylow:ty, $tyhigh:ty, $halfbits:expr) => {\n\n impl LargeInt for $ty {\n\n type LowHalf = $tylow;\n\n type HighHalf = $tyhigh;\n\n\n\n fn low(self) -> $tylow {\n\n self as $tylow\n\n }\n", "file_path": "src/int/mod.rs", "rank": 10, "score": 61499.97952727582 }, { "content": "trait Ashr: Int + LargeInt {\n\n /// Returns arithmetic `a >> b`, requires `b < Self::BITS`\n\n fn ashr(self, offset: u32) -> Self\n\n where Self: LargeInt<LowHalf = <<Self as LargeInt>::HighHalf as Int>::UnsignedInt>,\n\n {\n\n let half_bits = Self::BITS / 2;\n\n if offset & half_bits != 0 {\n\n Self::from_parts((self.high() >> (offset - half_bits)).unsigned(),\n\n self.high() >> (half_bits - 1))\n\n } else if offset == 0 {\n\n self\n\n } else {\n\n let high_unsigned = self.high().unsigned();\n\n Self::from_parts((high_unsigned << (half_bits - offset)) | (self.low() >> offset),\n\n self.high() >> offset)\n\n }\n\n }\n\n}\n\n\n\nimpl Ashr for i64 {}\n\nimpl Ashr for i128 {}\n\n\n", "file_path": "src/int/shift.rs", "rank": 11, "score": 61499.97952727582 }, { "content": "trait Lshr: Int + LargeInt {\n\n /// Returns logical `a >> b`, requires `b < Self::BITS`\n\n fn lshr(self, offset: u32) -> Self\n\n where Self: LargeInt<HighHalf = <Self as LargeInt>::LowHalf>,\n\n {\n\n let half_bits = Self::BITS / 2;\n\n if offset & half_bits != 0 {\n\n Self::from_parts(self.high() >> (offset - half_bits), Int::ZERO)\n\n } else if offset == 0 {\n\n self\n\n } else {\n\n Self::from_parts((self.high() << (half_bits - offset)) |\n\n (self.low() >> offset),\n\n self.high() >> offset)\n\n }\n\n }\n\n}\n\n\n\nimpl Lshr for u64 {}\n\nimpl Lshr for u128 {}\n", "file_path": "src/int/shift.rs", "rank": 12, "score": 61499.97952727582 }, { "content": "/// Trait for some basic operations on integers\n\npub trait Int:\n\n Copy +\n\n PartialEq +\n\n PartialOrd +\n\n ops::AddAssign +\n\n ops::BitAndAssign +\n\n ops::BitOrAssign +\n\n ops::ShlAssign<i32> +\n\n ops::ShrAssign<u32> +\n\n ops::Add<Output = Self> +\n\n ops::Sub<Output = Self> +\n\n ops::Div<Output = Self> +\n\n ops::Shl<u32, Output = Self> +\n\n ops::Shr<u32, Output = Self> +\n\n ops::BitOr<Output = Self> +\n\n ops::BitXor<Output = Self> +\n\n ops::BitAnd<Output = Self> +\n\n ops::Not<Output = Self> +\n\n{\n\n /// Type with the same width but other signedness\n", "file_path": "src/int/mod.rs", "rank": 13, "score": 61130.31021027596 }, { "content": "trait Div: Int {\n\n /// Returns `a / b`\n\n fn div(self, other: Self) -> Self {\n\n let s_a = self >> (Self::BITS - 1);\n\n let s_b = other >> (Self::BITS - 1);\n\n // NOTE it's OK to overflow here because of the `.unsigned()` below.\n\n // This whole operation is computing the absolute value of the inputs\n\n // So some overflow will happen when dealing with e.g. `i64::MIN`\n\n // where the absolute value is `(-i64::MIN) as u64`\n\n let a = (self ^ s_a).wrapping_sub(s_a);\n\n let b = (other ^ s_b).wrapping_sub(s_b);\n\n let s = s_a ^ s_b;\n\n\n\n let r = a.unsigned().aborting_div(b.unsigned());\n\n (Self::from_unsigned(r) ^ s) - s\n\n }\n\n}\n\n\n\nimpl Div for i32 {}\n\nimpl Div for i64 {}\n\nimpl Div for i128 {}\n\n\n", "file_path": "src/int/sdiv.rs", "rank": 14, "score": 61130.31021027596 }, { "content": "trait Mod: Int {\n\n /// Returns `a % b`\n\n fn mod_(self, other: Self) -> Self {\n\n let s = other >> (Self::BITS - 1);\n\n // NOTE(wrapping_sub) see comment in the `div`\n\n let b = (other ^ s).wrapping_sub(s);\n\n let s = self >> (Self::BITS - 1);\n\n let a = (self ^ s).wrapping_sub(s);\n\n\n\n let r = a.unsigned().aborting_rem(b.unsigned());\n\n (Self::from_unsigned(r) ^ s) - s\n\n }\n\n}\n\n\n\nimpl Mod for i32 {}\n\nimpl Mod for i64 {}\n\nimpl Mod for i128 {}\n\n\n", "file_path": "src/int/sdiv.rs", "rank": 15, "score": 61130.31021027596 }, { "content": "trait Divmod: Int {\n\n /// Returns `a / b` and sets `*rem = n % d`\n\n fn divmod<F>(self, other: Self, rem: &mut Self, div: F) -> Self\n\n where F: Fn(Self, Self) -> Self,\n\n {\n\n let r = div(self, other);\n\n // NOTE won't overflow because it's using the result from the\n\n // previous division\n\n *rem = self - r.wrapping_mul(other);\n\n r\n\n }\n\n}\n\n\n\nimpl Divmod for i32 {}\n\nimpl Divmod for i64 {}\n\n\n\nintrinsics! {\n\n #[arm_aeabi_alias = __aeabi_idiv]\n\n pub extern \"C\" fn __divsi3(a: i32, b: i32) -> i32 {\n\n a.div(b)\n", "file_path": "src/int/sdiv.rs", "rank": 16, "score": 61130.31021027596 }, { "content": "fn unwrap<T>(t: Option<T>) -> T {\n\n match t {\n\n Some(t) => t,\n\n None => ::abort(),\n\n }\n\n}\n\n\n\nmacro_rules! int_impl_common {\n\n ($ty:ty, $bits:expr) => {\n\n const BITS: u32 = $bits;\n\n\n\n const ZERO: Self = 0;\n\n const ONE: Self = 1;\n\n\n\n fn from_bool(b: bool) -> Self {\n\n b as $ty\n\n }\n\n\n\n fn max_value() -> Self {\n\n <Self>::max_value()\n", "file_path": "src/int/mod.rs", "rank": 17, "score": 55063.918256291116 }, { "content": "#[test]\n\nfn mulosi4() {\n\n let mut overflow_ = 2;\n\n for &((a, b), (c, overflow)) in TEST_CASES {\n\n let c_ = __mulosi4(a, b, &mut overflow_);\n\n assert_eq!(((a, b), (c, overflow)), ((a, b), (c_, overflow_)));\n\n }\n\n}\n\n\"\n\n }\n\n\n\n fn to_string(&self, buffer: &mut String) {\n\n writeln!(\n\n buffer,\n\n \"(({a}, {b}), ({c}, {overflow})),\",\n\n a = self.a,\n\n b = self.b,\n\n c = self.c,\n\n overflow = self.overflow\n\n )\n\n .unwrap();\n", "file_path": "build.rs", "rank": 18, "score": 49156.150170875 }, { "content": "#[test]\n\nfn floatuntisf() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __floatuntisf(a);\n\n assert_eq!(((a,), b), ((a,), to_u32(b_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Floatuntidf {\n\n a: u128,\n\n b: u64, // f64\n\n }\n\n\n\n impl TestCase for Floatuntidf {\n\n fn name() -> &'static str {\n\n \"floatuntidf\"\n\n }\n", "file_path": "build.rs", "rank": 19, "score": 49156.150170875 }, { "content": "#[test]\n\nfn fixunssfsi() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __fixunssfsi(mk_f32(a));\n\n assert_eq!(((a,), b), ((a,), b_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Fixunssfti {\n\n a: u32, // f32\n\n b: u128,\n\n }\n\n\n\n impl TestCase for Fixunssfti {\n\n fn name() -> &'static str {\n\n \"fixunssfti\"\n\n }\n", "file_path": "build.rs", "rank": 20, "score": 49156.150170875 }, { "content": "#[test]\n\nfn umoddi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __umoddi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Umodsi3 {\n\n a: u32,\n\n b: u32,\n\n c: u32,\n\n }\n\n\n\n impl TestCase for Umodsi3 {\n\n fn name() -> &'static str {\n\n \"umodsi3\"\n", "file_path": "build.rs", "rank": 21, "score": 49156.150170875 }, { "content": "#[test]\n\nfn modti3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __modti3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n struct Muldi3 {\n\n a: u64,\n\n b: u64,\n\n c: u64,\n\n }\n\n\n\n impl TestCase for Muldi3 {\n\n fn name() -> &'static str {\n\n \"muldi3\"\n", "file_path": "build.rs", "rank": 22, "score": 49156.150170875 }, { "content": "#[test]\n\nfn addsf3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __addsf3(mk_f32(a), mk_f32(b));\n\n assert_eq!(((a, b), c), ((a, b), to_u32(c_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Ashldi3 {\n\n a: u64,\n\n b: u32,\n\n c: u64,\n\n }\n\n\n\n impl TestCase for Ashldi3 {\n\n fn name() -> &'static str {\n\n \"ashldi3\"\n", "file_path": "build.rs", "rank": 23, "score": 49156.150170875 }, { "content": "#[test]\n\nfn muloti4() {\n\n let mut overflow_ = 2;\n\n for &((a, b), (c, overflow)) in TEST_CASES {\n\n let c_ = __muloti4(a, b, &mut overflow_);\n\n assert_eq!(((a, b), (c, overflow)), ((a, b), (c_, overflow_)));\n\n }\n\n}\n\n\"\n\n }\n\n\n\n fn to_string(&self, buffer: &mut String) {\n\n writeln!(\n\n buffer,\n\n \"(({a}, {b}), ({c}, {overflow})),\",\n\n a = self.a,\n\n b = self.b,\n\n c = self.c,\n\n overflow = self.overflow\n\n )\n\n .unwrap();\n", "file_path": "build.rs", "rank": 24, "score": 49156.150170875 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n\n\n let target = env::var(\"TARGET\").unwrap();\n\n\n\n // Emscripten's runtime includes all the builtins\n\n if target.contains(\"emscripten\") {\n\n return;\n\n }\n\n\n\n // NOTE we are going to assume that llvm-target, what determines our codegen option, matches the\n\n // target triple. This is usually correct for our built-in targets but can break in presence of\n\n // custom targets, which can have arbitrary names.\n\n let llvm_target = target.split('-').collect::<Vec<_>>();\n\n\n\n // Build test files\n\n #[cfg(feature = \"gen-tests\")]\n\n tests::generate();\n\n\n\n // Build missing intrinsics from compiler-rt C source code. If we're\n", "file_path": "build.rs", "rank": 25, "score": 49156.150170875 }, { "content": "#[test]\n\nfn floatuntidf() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __floatuntidf(a);\n\n let g_b = to_u64(b_);\n\n let diff = if g_b > b { g_b - b } else { b - g_b };\n\n assert_eq!(((a,), b, g_b, true), ((a,), b, g_b, diff <= 1));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Moddi3 {\n\n a: i64,\n\n b: i64,\n\n c: i64,\n\n }\n\n\n\n impl TestCase for Moddi3 {\n", "file_path": "build.rs", "rank": 26, "score": 49156.150170875 }, { "content": "#[test]\n\nfn fixdfdi() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __fixdfsi(mk_f64(a));\n\n assert_eq!(((a,), b), ((a,), b_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Fixsfdi {\n\n a: u32, // f32\n\n b: i64,\n\n }\n\n\n\n impl TestCase for Fixsfdi {\n\n fn name() -> &'static str {\n\n \"fixsfdi\"\n\n }\n", "file_path": "build.rs", "rank": 27, "score": 49156.150170875 }, { "content": "#[test]\n\nfn fixsfdi() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __fixsfdi(mk_f32(a));\n\n assert_eq!(((a,), b), ((a,), b_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Fixsfsi {\n\n a: u32, // f32\n\n b: i32,\n\n }\n\n\n\n impl TestCase for Fixsfsi {\n\n fn name() -> &'static str {\n\n \"fixsfsi\"\n\n }\n", "file_path": "build.rs", "rank": 28, "score": 49156.150170875 }, { "content": "#[test]\n\nfn floatsisf() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __floatsisf(a);\n\n assert_eq!(((a,), b), ((a,), to_u32(b_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Floattisf {\n\n a: i128,\n\n b: u32, // f32\n\n }\n\n\n\n impl TestCase for Floattisf {\n\n fn name() -> &'static str {\n\n \"floattisf\"\n\n }\n", "file_path": "build.rs", "rank": 29, "score": 49156.150170875 }, { "content": "#[test]\n\nfn fixunssfti() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __fixunssfti(mk_f32(a));\n\n assert_eq!(((a,), b), ((a,), b_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Fixunsdfti {\n\n a: u64, // f64\n\n b: u128,\n\n }\n\n\n\n impl TestCase for Fixunsdfti {\n\n fn name() -> &'static str {\n\n \"fixunsdfti\"\n\n }\n", "file_path": "build.rs", "rank": 30, "score": 49156.150170875 }, { "content": "#[test]\n\nfn divmoddi4() {\n\n for &((a, b), (c, rem)) in TEST_CASES {\n\n let mut rem_ = 0;\n\n let c_ = __divmoddi4(a, b, &mut rem_);\n\n assert_eq!(((a, b), (c, rem)), ((a, b), (c_, rem_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Divdi3 {\n\n a: i64,\n\n b: i64,\n\n c: i64,\n\n }\n\n\n\n impl TestCase for Divdi3 {\n\n fn name() -> &'static str {\n", "file_path": "build.rs", "rank": 31, "score": 49156.150170875 }, { "content": "#[test]\n\nfn adddf3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __adddf3(mk_f64(a), mk_f64(b));\n\n assert_eq!(((a, b), c), ((a, b), to_u64(c_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Addsf3 {\n\n a: u32, // f32\n\n b: u32, // f32\n\n c: u32, // f32\n\n }\n\n\n\n impl TestCase for Addsf3 {\n\n fn name() -> &'static str {\n\n \"addsf3\"\n", "file_path": "build.rs", "rank": 32, "score": 49156.150170875 }, { "content": "#[test]\n\nfn mulodi4() {\n\n let mut overflow_ = 2;\n\n for &((a, b), (c, overflow)) in TEST_CASES {\n\n let c_ = __mulodi4(a, b, &mut overflow_);\n\n assert_eq!(((a, b), (c, overflow)), ((a, b), (c_, overflow_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Mulosi4 {\n\n a: i32,\n\n b: i32,\n\n c: i32,\n\n overflow: u32,\n\n }\n\n\n\n impl TestCase for Mulosi4 {\n", "file_path": "build.rs", "rank": 33, "score": 49156.150170875 }, { "content": "#[test]\n\nfn floatundidf() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __floatundidf(a);\n\n assert_eq!(((a,), b), ((a,), to_u64(b_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Floatunsidf {\n\n a: u32,\n\n b: u64, // f64\n\n }\n\n\n\n impl TestCase for Floatunsidf {\n\n fn name() -> &'static str {\n\n \"floatunsidf\"\n\n }\n", "file_path": "build.rs", "rank": 34, "score": 49156.150170875 }, { "content": "#[test]\n\nfn divmodsi4() {\n\n for &((a, b), (c, rem)) in TEST_CASES {\n\n let mut rem_ = 0;\n\n let c_ = __divmodsi4(a, b, &mut rem_);\n\n assert_eq!(((a, b), (c, rem)), ((a, b), (c_, rem_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Divsi3 {\n\n a: i32,\n\n b: i32,\n\n c: i32,\n\n }\n\n\n\n impl TestCase for Divsi3 {\n\n fn name() -> &'static str {\n", "file_path": "build.rs", "rank": 35, "score": 49156.150170875 }, { "content": "#[test]\n\nfn ashrdi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __ashrdi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Ashrti3 {\n\n a: i128,\n\n b: u32,\n\n c: i128,\n\n }\n\n\n\n impl TestCase for Ashrti3 {\n\n fn name() -> &'static str {\n\n \"ashrti3\"\n", "file_path": "build.rs", "rank": 36, "score": 49156.150170875 }, { "content": "#[test]\n\nfn ashrti3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __ashrti3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Divmoddi4 {\n\n a: i64,\n\n b: i64,\n\n c: i64,\n\n rem: i64,\n\n }\n\n\n\n impl TestCase for Divmoddi4 {\n\n fn name() -> &'static str {\n", "file_path": "build.rs", "rank": 37, "score": 49156.150170875 }, { "content": "#[test]\n\nfn udivmodti4() {\n\n for &((a, b), (c, rem)) in TEST_CASES {\n\n let mut rem_ = 0;\n\n let c_ = __udivmodti4(a, b, Some(&mut rem_));\n\n assert_eq!(((a, b), (c, rem)), ((a, b), (c_, rem_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Udivsi3 {\n\n a: u32,\n\n b: u32,\n\n c: u32,\n\n }\n\n\n\n impl TestCase for Udivsi3 {\n\n fn name() -> &'static str {\n", "file_path": "build.rs", "rank": 38, "score": 49156.150170875 }, { "content": "#[test]\n\nfn divsi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __divsi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Divti3 {\n\n a: i128,\n\n b: i128,\n\n c: i128,\n\n }\n\n\n\n impl TestCase for Divti3 {\n\n fn name() -> &'static str {\n\n \"divti3\"\n", "file_path": "build.rs", "rank": 39, "score": 49156.150170875 }, { "content": "#[test]\n\nfn fixsfti() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __fixsfti(mk_f32(a));\n\n assert_eq!(((a,), b), ((a,), b_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Fixdfti {\n\n a: u64, // f64\n\n b: i128,\n\n }\n\n\n\n impl TestCase for Fixdfti {\n\n fn name() -> &'static str {\n\n \"fixdfti\"\n\n }\n", "file_path": "build.rs", "rank": 40, "score": 49156.150170875 }, { "content": "#[test]\n\nfn floatsidf() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __floatsidf(a);\n\n assert_eq!(((a,), b), ((a,), to_u64(b_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Floatsisf {\n\n a: i32,\n\n b: u32, // f32\n\n }\n\n\n\n impl TestCase for Floatsisf {\n\n fn name() -> &'static str {\n\n \"floatsisf\"\n\n }\n", "file_path": "build.rs", "rank": 41, "score": 49156.150170875 }, { "content": "#[test]\n\nfn muldi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __muldi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Mulodi4 {\n\n a: i64,\n\n b: i64,\n\n c: i64,\n\n overflow: u32,\n\n }\n\n\n\n impl TestCase for Mulodi4 {\n\n fn name() -> &'static str {\n", "file_path": "build.rs", "rank": 42, "score": 49156.150170875 }, { "content": "#[test]\n\nfn lshrdi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __lshrdi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Lshrti3 {\n\n a: u128,\n\n b: u32,\n\n c: u128,\n\n }\n\n\n\n impl TestCase for Lshrti3 {\n\n fn name() -> &'static str {\n\n \"lshrti3\"\n", "file_path": "build.rs", "rank": 43, "score": 49156.150170875 }, { "content": "#[test]\n\nfn udivsi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __udivsi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Udivti3 {\n\n a: u128,\n\n b: u128,\n\n c: u128,\n\n }\n\n\n\n impl TestCase for Udivti3 {\n\n fn name() -> &'static str {\n\n \"udivti3\"\n", "file_path": "build.rs", "rank": 44, "score": 49156.150170875 }, { "content": "#[test]\n\nfn fixunsdfti() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __fixunsdfti(mk_f64(a));\n\n assert_eq!(((a,), b), ((a,), b_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Floatdidf {\n\n a: i64,\n\n b: u64, // f64\n\n }\n\n\n\n impl TestCase for Floatdidf {\n\n fn name() -> &'static str {\n\n \"floatdidf\"\n\n }\n", "file_path": "build.rs", "rank": 45, "score": 49156.150170875 }, { "content": "#[test]\n\nfn ashlti3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __ashlti3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Ashrdi3 {\n\n a: i64,\n\n b: u32,\n\n c: i64,\n\n }\n\n\n\n impl TestCase for Ashrdi3 {\n\n fn name() -> &'static str {\n\n \"ashrdi3\"\n", "file_path": "build.rs", "rank": 46, "score": 49156.150170875 }, { "content": "#[test]\n\nfn floattisf() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __floattisf(a);\n\n assert_eq!(((a,), b), ((a,), to_u32(b_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Floattidf {\n\n a: i128,\n\n b: u64, // f64\n\n }\n\n\n\n impl TestCase for Floattidf {\n\n fn name() -> &'static str {\n\n \"floattidf\"\n\n }\n", "file_path": "build.rs", "rank": 47, "score": 49156.150170875 }, { "content": "#[test]\n\nfn multi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __multi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Powidf2 {\n\n a: u64, // f64\n\n b: i32,\n\n c: u64, // f64\n\n }\n\n\n\n impl TestCase for Powidf2 {\n\n fn name() -> &'static str {\n\n \"powidf2\"\n", "file_path": "build.rs", "rank": 48, "score": 49156.150170875 }, { "content": "#[test]\n\nfn moddi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __moddi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Modsi3 {\n\n a: i32,\n\n b: i32,\n\n c: i32,\n\n }\n\n\n\n impl TestCase for Modsi3 {\n\n fn name() -> &'static str {\n\n \"modsi3\"\n", "file_path": "build.rs", "rank": 49, "score": 49156.150170875 }, { "content": "#[test]\n\nfn floattidf() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __floattidf(a);\n\n let g_b = to_u64(b_);\n\n let diff = if g_b > b { g_b - b } else { b - g_b };\n\n assert_eq!(((a,), b, g_b, true), ((a,), b, g_b, diff <= 1));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Floatundidf {\n\n a: u64,\n\n b: u64, // f64\n\n }\n\n\n\n impl TestCase for Floatundidf {\n\n fn name() -> &'static str {\n", "file_path": "build.rs", "rank": 50, "score": 49156.150170875 }, { "content": "#[test]\n\nfn umodti3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __umodti3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n", "file_path": "build.rs", "rank": 51, "score": 49156.150170875 }, { "content": "#[test]\n\nfn lshrti3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __lshrti3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Subdf3 {\n\n a: u64, // f64\n\n b: u64, // f64\n\n c: u64, // f64\n\n }\n\n\n\n impl TestCase for Subdf3 {\n\n fn name() -> &'static str {\n\n \"subdf3\"\n", "file_path": "build.rs", "rank": 52, "score": 49156.150170875 }, { "content": "#[test]\n\nfn divdi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __divdi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Divmodsi4 {\n\n a: i32,\n\n b: i32,\n\n c: i32,\n\n rem: i32,\n\n }\n\n\n\n impl TestCase for Divmodsi4 {\n\n fn name() -> &'static str {\n", "file_path": "build.rs", "rank": 53, "score": 49156.150170875 }, { "content": "#[test]\n\nfn powidf2() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __powidf2(mk_f64(a), b);\n\n assert_eq!(((a, b), c), ((a, b), to_u64(c_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Powisf2 {\n\n a: u32, // f32\n\n b: i32,\n\n c: u32, // f32\n\n }\n\n\n\n impl TestCase for Powisf2 {\n\n fn name() -> &'static str {\n\n \"powisf2\"\n", "file_path": "build.rs", "rank": 54, "score": 49156.150170875 }, { "content": "#[test]\n\nfn fixsfsi() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __fixsfsi(mk_f32(a));\n\n assert_eq!(((a,), b), ((a,), b_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Fixsfti {\n\n a: u32, // f32\n\n b: i128,\n\n }\n\n\n\n impl TestCase for Fixsfti {\n\n fn name() -> &'static str {\n\n \"fixsfti\"\n\n }\n", "file_path": "build.rs", "rank": 55, "score": 49156.150170875 }, { "content": "#[test]\n\nfn floatdidf() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __floatdidf(a);\n\n assert_eq!(((a,), b), ((a,), to_u64(b_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Floatsidf {\n\n a: i32,\n\n b: u64, // f64\n\n }\n\n\n\n impl TestCase for Floatsidf {\n\n fn name() -> &'static str {\n\n \"floatsidf\"\n\n }\n", "file_path": "build.rs", "rank": 56, "score": 49156.150170875 }, { "content": "#[test]\n\nfn udivmodsi4() {\n\n for &((a, b), (c, rem)) in TEST_CASES {\n\n let mut rem_ = 0;\n\n let c_ = __udivmodsi4(a, b, Some(&mut rem_));\n\n assert_eq!(((a, b), (c, rem)), ((a, b), (c_, rem_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Udivmodti4 {\n\n a: u128,\n\n b: u128,\n\n c: u128,\n\n rem: u128,\n\n }\n\n\n\n impl TestCase for Udivmodti4 {\n", "file_path": "build.rs", "rank": 57, "score": 49156.150170875 }, { "content": "#[test]\n\nfn udivdi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __udivdi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Udivmoddi4 {\n\n a: u64,\n\n b: u64,\n\n c: u64,\n\n rem: u64,\n\n }\n\n\n\n impl TestCase for Udivmoddi4 {\n\n fn name() -> &'static str {\n", "file_path": "build.rs", "rank": 58, "score": 49156.150170875 }, { "content": "#[test]\n\nfn powisf2() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __powisf2(mk_f32(a), b);\n\n assert_eq!(((a, b), c), ((a, b), to_u32(c_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Lshrdi3 {\n\n a: u64,\n\n b: u32,\n\n c: u64,\n\n }\n\n\n\n impl TestCase for Lshrdi3 {\n\n fn name() -> &'static str {\n\n \"lshrdi3\"\n", "file_path": "build.rs", "rank": 59, "score": 49156.150170875 }, { "content": "#[test]\n\nfn ashldi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __ashldi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Ashlti3 {\n\n a: u128,\n\n b: u32,\n\n c: u128,\n\n }\n\n\n\n impl TestCase for Ashlti3 {\n\n fn name() -> &'static str {\n\n \"ashlti3\"\n", "file_path": "build.rs", "rank": 60, "score": 49156.150170875 }, { "content": "#[test]\n\nfn floatunsisf() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __floatunsisf(a);\n\n assert_eq!(((a,), b), ((a,), to_u32(b_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Floatuntisf {\n\n a: u128,\n\n b: u32, // f32\n\n }\n\n\n\n impl TestCase for Floatuntisf {\n\n fn name() -> &'static str {\n\n \"floatuntisf\"\n\n }\n", "file_path": "build.rs", "rank": 61, "score": 49156.150170875 }, { "content": "#[test]\n\nfn subsf3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __subsf3(mk_f32(a), mk_f32(b));\n\n assert_eq!(((a, b), c), ((a, b), to_u32(c_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Udivdi3 {\n\n a: u64,\n\n b: u64,\n\n c: u64,\n\n }\n\n\n\n impl TestCase for Udivdi3 {\n\n fn name() -> &'static str {\n\n \"udivdi3\"\n", "file_path": "build.rs", "rank": 62, "score": 49156.150170875 }, { "content": "#[test]\n\nfn divti3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __divti3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Fixdfdi {\n\n a: u64, // f64\n\n b: i64,\n\n }\n\n\n\n impl TestCase for Fixdfdi {\n\n fn name() -> &'static str {\n\n \"fixdfdi\"\n\n }\n", "file_path": "build.rs", "rank": 63, "score": 49156.150170875 }, { "content": "#[test]\n\nfn subdf3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __subdf3(mk_f64(a), mk_f64(b));\n\n assert_eq!(((a, b), c), ((a, b), to_u64(c_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Subsf3 {\n\n a: u32, // f32\n\n b: u32, // f32\n\n c: u32, // f32\n\n }\n\n\n\n impl TestCase for Subsf3 {\n\n fn name() -> &'static str {\n\n \"subsf3\"\n", "file_path": "build.rs", "rank": 64, "score": 49156.150170875 }, { "content": "#[test]\n\nfn fixdfti() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __fixdfti(mk_f64(a));\n\n assert_eq!(((a,), b), ((a,), b_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Fixunsdfdi {\n\n a: u64, // f64\n\n b: u64,\n\n }\n\n\n\n impl TestCase for Fixunsdfdi {\n\n fn name() -> &'static str {\n\n \"fixunsdfdi\"\n\n }\n", "file_path": "build.rs", "rank": 65, "score": 49156.150170875 }, { "content": "#[test]\n\nfn fixunssfdi() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __fixunssfdi(mk_f32(a));\n\n assert_eq!(((a,), b), ((a,), b_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Fixunssfsi {\n\n a: u32, // f32\n\n b: u32,\n\n }\n\n\n\n impl TestCase for Fixunssfsi {\n\n fn name() -> &'static str {\n\n \"fixunssfsi\"\n\n }\n", "file_path": "build.rs", "rank": 66, "score": 49156.150170875 }, { "content": "#[test]\n\nfn modsi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __modsi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Modti3 {\n\n a: i128,\n\n b: i128,\n\n c: i128,\n\n }\n\n\n\n impl TestCase for Modti3 {\n\n fn name() -> &'static str {\n\n \"modti3\"\n", "file_path": "build.rs", "rank": 67, "score": 49156.150170875 }, { "content": "#[test]\n\nfn udivti3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __udivti3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Umoddi3 {\n\n a: u64,\n\n b: u64,\n\n c: u64,\n\n }\n\n\n\n impl TestCase for Umoddi3 {\n\n fn name() -> &'static str {\n\n \"umoddi3\"\n", "file_path": "build.rs", "rank": 68, "score": 49156.150170875 }, { "content": "#[test]\n\nfn udivmoddi4() {\n\n for &((a, b), (c, rem)) in TEST_CASES {\n\n let mut rem_ = 0;\n\n let c_ = __udivmoddi4(a, b, Some(&mut rem_));\n\n assert_eq!(((a, b), (c, rem)), ((a, b), (c_, rem_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Udivmodsi4 {\n\n a: u32,\n\n b: u32,\n\n c: u32,\n\n rem: u32,\n\n }\n\n\n\n impl TestCase for Udivmodsi4 {\n", "file_path": "build.rs", "rank": 69, "score": 49156.150170875 }, { "content": "#[test]\n\nfn floatunsidf() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __floatunsidf(a);\n\n assert_eq!(((a,), b), ((a,), to_u64(b_)));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Floatunsisf {\n\n a: u32,\n\n b: u32, // f32\n\n }\n\n\n\n impl TestCase for Floatunsisf {\n\n fn name() -> &'static str {\n\n \"floatunsisf\"\n\n }\n", "file_path": "build.rs", "rank": 70, "score": 49156.150170875 }, { "content": "#[test]\n\nfn fixunsdfdi() {\n\n for &((a,), b) in TEST_CASES {\n\n let b_ = __fixunsdfsi(mk_f64(a));\n\n assert_eq!(((a,), b), ((a,), b_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Fixunssfdi {\n\n a: u32, // f32\n\n b: u64,\n\n }\n\n\n\n impl TestCase for Fixunssfdi {\n\n fn name() -> &'static str {\n\n \"fixunssfdi\"\n\n }\n", "file_path": "build.rs", "rank": 71, "score": 49156.150170875 }, { "content": "#[test]\n\nfn umodsi3() {\n\n for &((a, b), c) in TEST_CASES {\n\n let c_ = __umodsi3(a, b);\n\n assert_eq!(((a, b), c), ((a, b), c_));\n\n }\n\n}\n\n\"\n\n }\n\n }\n\n\n\n #[derive(Eq, Hash, PartialEq)]\n\n pub struct Umodti3 {\n\n a: u128,\n\n b: u128,\n\n c: u128,\n\n }\n\n\n\n impl TestCase for Umodti3 {\n\n fn name() -> &'static str {\n\n \"umodti3\"\n", "file_path": "build.rs", "rank": 72, "score": 49156.150170875 }, { "content": "trait Mulo: Int + ops::Neg<Output = Self> {\n\n fn mulo(self, other: Self, overflow: &mut i32) -> Self {\n\n *overflow = 0;\n\n let result = self.wrapping_mul(other);\n\n if self == Self::min_value() {\n\n if other != Self::ZERO && other != Self::ONE {\n\n *overflow = 1;\n\n }\n\n return result;\n\n }\n\n if other == Self::min_value() {\n\n if self != Self::ZERO && self != Self::ONE {\n\n *overflow = 1;\n\n }\n\n return result;\n\n }\n\n\n\n let sa = self >> (Self::BITS - 1);\n\n let abs_a = (self ^ sa) - sa;\n\n let sb = other >> (Self::BITS - 1);\n", "file_path": "src/int/mul.rs", "rank": 73, "score": 48098.8219962988 }, { "content": "fn run() {\n\n use intrinsics::*;\n\n\n\n // A copy of \"test::black_box\". Used to prevent LLVM from optimizing away the intrinsics during LTO\n\n fn bb<T>(dummy: T) -> T {\n\n unsafe { asm!(\"\" : : \"r\"(&dummy)) }\n\n dummy\n\n }\n\n\n\n bb(aeabi_d2f(bb(2.)));\n\n bb(aeabi_d2i(bb(2.)));\n\n bb(aeabi_d2l(bb(2.)));\n\n bb(aeabi_d2uiz(bb(2.)));\n\n bb(aeabi_d2ulz(bb(2.)));\n\n bb(aeabi_dadd(bb(2.), bb(3.)));\n\n bb(aeabi_dcmpeq(bb(2.), bb(3.)));\n\n bb(aeabi_dcmpgt(bb(2.), bb(3.)));\n\n bb(aeabi_dcmplt(bb(2.), bb(3.)));\n\n bb(aeabi_ddiv(bb(2.), bb(3.)));\n\n bb(aeabi_dmul(bb(2.), bb(3.)));\n", "file_path": "examples/intrinsics.rs", "rank": 74, "score": 46970.64074585679 }, { "content": "fn abort() -> ! {\n\n unsafe { core::intrinsics::abort() }\n\n}\n\n\n\n#[macro_use]\n\nmod macros;\n\n\n\npub mod int;\n\npub mod float;\n\n\n\npub mod mem;\n\n\n\n#[cfg(target_arch = \"arm\")]\n\npub mod arm;\n\n\n\n#[cfg(target_arch = \"x86_64\")]\n\npub mod x86_64;\n\n\n\npub mod probestack;\n", "file_path": "src/lib.rs", "rank": 75, "score": 46970.64074585679 }, { "content": "#[test]\n\nfn five() {\n\n let mut aligned = Aligned::new([0u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let n = 5;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 0xef, 0xef, 0xef, 0xef, 0, 0, 0]);\n\n\n\n let mut aligned = Aligned::new([1u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 0xef, 0xef, 0xef, 0xef, 1, 1, 1]);\n\n}\n\n\n", "file_path": "tests/aeabi_memset.rs", "rank": 76, "score": 45066.83224399477 }, { "content": "#[test]\n\nfn memcpy4() {\n\n let mut aligned = Aligned::new([0; 8]);\n\n let dest = &mut aligned.array;\n\n let src = [0xde, 0xad, 0xbe, 0xef, 0xba, 0xad, 0xf0, 0x0d];\n\n\n\n for n in 0..dest.len() {\n\n dest.copy_from_slice(&[0; 8]);\n\n\n\n unsafe { __aeabi_memcpy4(dest.as_mut_ptr(), src.as_ptr(), n) }\n\n\n\n assert_eq!(&dest[0..n], &src[0..n])\n\n }\n\n}\n", "file_path": "tests/aeabi_memcpy.rs", "rank": 77, "score": 45066.83224399477 }, { "content": "#[test]\n\nfn memclr4() {\n\n let mut aligned = Aligned::new();;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n\n\n for n in 0..9 {\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, 0xff);\n\n __aeabi_memclr4(xs.as_mut_ptr(), n);\n\n }\n\n\n\n assert!(xs[0..n].iter().all(|x| *x == 0));\n\n }\n\n}\n", "file_path": "tests/aeabi_memclr.rs", "rank": 78, "score": 45066.83224399477 }, { "content": "#[test]\n\nfn six() {\n\n let mut aligned = Aligned::new([0u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let n = 6;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 0xef, 0xef, 0xef, 0xef, 0xef, 0, 0]);\n\n\n\n let mut aligned = Aligned::new([1u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 0xef, 0xef, 0xef, 0xef, 0xef, 1, 1]);\n\n}\n\n\n", "file_path": "tests/aeabi_memset.rs", "rank": 79, "score": 45066.83224399477 }, { "content": "#[test]\n\nfn zero() {\n\n let mut aligned = Aligned::new([0u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), 0, c)\n\n }\n\n\n\n assert_eq!(*xs, [0; 8]);\n\n\n\n let mut aligned = Aligned::new([1u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), 0, c)\n\n }\n\n\n\n assert_eq!(*xs, [1; 8]);\n\n}\n\n\n", "file_path": "tests/aeabi_memset.rs", "rank": 80, "score": 45066.83224399477 }, { "content": "#[test]\n\nfn three() {\n\n let mut aligned = Aligned::new([0u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let n = 3;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 0xef, 0xef, 0, 0, 0, 0, 0]);\n\n\n\n let mut aligned = Aligned::new([1u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 0xef, 0xef, 1, 1, 1, 1, 1]);\n\n}\n\n\n", "file_path": "tests/aeabi_memset.rs", "rank": 81, "score": 45066.83224399477 }, { "content": "#[test]\n\nfn one() {\n\n let mut aligned = Aligned::new([0u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let n = 1;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 0, 0, 0, 0, 0, 0, 0]);\n\n\n\n let mut aligned = Aligned::new([1u8; 8]);;\n\n assert_eq!(mem::align_of_val(&aligned), 4);\n\n let xs = &mut aligned.array;\n\n let c = 0xdeadbeef;\n\n\n\n unsafe {\n\n __aeabi_memset4(xs.as_mut_ptr(), n, c)\n\n }\n\n\n\n assert_eq!(*xs, [0xef, 1, 1, 1, 1, 1, 1, 1]);\n\n}\n\n\n", "file_path": "tests/aeabi_memset.rs", "rank": 82, "score": 45066.83224399477 }, { "content": "#[test]\n\nfn memcpy() {\n\n let mut dest = [0; 4];\n\n let src = [0xde, 0xad, 0xbe, 0xef];\n\n\n\n for n in 0..dest.len() {\n\n dest.copy_from_slice(&[0; 4]);\n\n\n\n unsafe { __aeabi_memcpy(dest.as_mut_ptr(), src.as_ptr(), n) }\n\n\n\n assert_eq!(&dest[0..n], &src[0..n])\n\n }\n\n}\n\n\n", "file_path": "tests/aeabi_memcpy.rs", "rank": 83, "score": 45066.83224399477 }, { "content": " sr += 1;\n\n\n\n // 1 <= sr <= u32::BITS - 1\n\n let mut q = n << (u32::BITS - sr);\n\n let mut r = n >> sr;\n\n\n\n let mut carry = 0;\n\n\n\n // Don't use a range because they may generate references to memcpy in unoptimized code\n\n let mut i = 0;\n\n while i < sr {\n\n i += 1;\n\n\n\n // r:q = ((r:q) << 1) | carry\n\n r = (r << 1) | (q >> (u32::BITS - 1));\n\n q = (q << 1) | carry;\n\n\n\n // carry = 0;\n\n // if r > d {\n\n // r -= d;\n", "file_path": "src/int/udiv.rs", "rank": 86, "score": 15.911465318057136 }, { "content": "\n\n /// A mask for the exponent\n\n const EXPONENT_MASK: Self::Int;\n\n\n\n /// Returns `self` transmuted to `Self::Int`\n\n fn repr(self) -> Self::Int;\n\n\n\n #[cfg(test)]\n\n /// Checks if two floats have the same bit representation. *Except* for NaNs! NaN can be\n\n /// represented in multiple different ways. This method returns `true` if two NaNs are\n\n /// compared.\n\n fn eq_repr(self, rhs: Self) -> bool;\n\n\n\n /// Returns a `Self::Int` transmuted back to `Self`\n\n fn from_repr(a: Self::Int) -> Self;\n\n\n\n /// Constructs a `Self` from its parts. Inputs are treated as bits and shifted into position.\n\n fn from_parts(sign: bool, exponent: Self::Int, significand: Self::Int) -> Self;\n\n\n\n /// Returns (normalized exponent, normalized significand)\n", "file_path": "src/float/mod.rs", "rank": 87, "score": 14.979785744316274 }, { "content": " }\n\n\n\n fn generate<R>(rng: &mut R) -> Option<Self>\n\n where\n\n R: Rng,\n\n Self: Sized,\n\n {\n\n let a = gen_f32(rng);\n\n let b = gen_f32(rng);\n\n let c = a - b;\n\n // TODO accept NaNs. We don't do that right now because we can't check\n\n // for NaN-ness on the thumb targets (due to missing intrinsics)\n\n if a.is_nan() || b.is_nan() || c.is_nan() {\n\n return None;\n\n }\n\n\n\n Some(\n\n Subsf3 {\n\n a: to_u32(a),\n\n b: to_u32(b),\n", "file_path": "build.rs", "rank": 88, "score": 14.75331711219562 }, { "content": " }\n\n\n\n fn generate<R>(rng: &mut R) -> Option<Self>\n\n where\n\n R: Rng,\n\n Self: Sized,\n\n {\n\n let a = gen_f32(rng);\n\n let b = gen_f32(rng);\n\n let c = a + b;\n\n // TODO accept NaNs. We don't do that right now because we can't check\n\n // for NaN-ness on the thumb targets (due to missing intrinsics)\n\n if a.is_nan() || b.is_nan() || c.is_nan() {\n\n return None;\n\n }\n\n\n\n Some(\n\n Addsf3 {\n\n a: to_u32(a),\n\n b: to_u32(b),\n", "file_path": "build.rs", "rank": 89, "score": 14.75331711219562 }, { "content": " }\n\n\n\n fn generate<R>(rng: &mut R) -> Option<Self>\n\n where\n\n R: Rng,\n\n Self: Sized,\n\n {\n\n let a = gen_f32(rng);\n\n let b = gen_i32(rng);\n\n let c = a.powi(b);\n\n // TODO accept NaNs. We don't do that right now because we can't check\n\n // for NaN-ness on the thumb targets\n\n if a.is_nan() || c.is_nan() {\n\n return None;\n\n }\n\n\n\n Some(\n\n Powisf2 {\n\n a: to_u32(a),\n\n b,\n", "file_path": "build.rs", "rank": 90, "score": 14.326401087829268 }, { "content": " // +/-infinity + -/+infinity = qNaN\n\n if (a.repr() ^ b.repr()) == sign_bit {\n\n return F::from_repr(qnan_rep);\n\n } else {\n\n // +/-infinity + anything remaining = +/- infinity\n\n return a;\n\n }\n\n }\n\n\n\n // anything remaining + +/-infinity = +/-infinity\n\n if b_abs == inf_rep {\n\n return b;\n\n }\n\n\n\n // zero + anything = anything\n\n if a_abs == Int::ZERO {\n\n // but we need to get the sign right for zero + zero\n\n if b_abs == Int::ZERO {\n\n return F::from_repr(a.repr() & b.repr());\n\n } else {\n", "file_path": "src/float/add.rs", "rank": 92, "score": 14.075096676569302 }, { "content": " }\n\n\n\n gen_int!(gen_i32, i32, i16);\n\n gen_int!(gen_i64, i64, i32);\n\n gen_int!(gen_i128, i128, i64);\n\n\n\n macro_rules! gen_float {\n\n ($name:ident,\n\n $fty:ident,\n\n $uty:ident,\n\n $bits:expr,\n\n $significand_bits:expr) => {\n\n pub fn $name<R>(rng: &mut R) -> $fty\n\n where\n\n R: Rng,\n\n {\n\n const BITS: u8 = $bits;\n\n const SIGNIFICAND_BITS: u8 = $significand_bits;\n\n\n\n const SIGNIFICAND_MASK: $uty = (1 << SIGNIFICAND_BITS) - 1;\n", "file_path": "build.rs", "rank": 93, "score": 13.972275641861609 }, { "content": " not(thumbv6m)))]\n\n /// Returns `n / d` and sets `*rem = n % d`\n\n pub extern \"C\" fn __udivmodsi4(n: u32, d: u32, rem: Option<&mut u32>) -> u32 {\n\n let q = __udivsi3(n, d);\n\n if let Some(rem) = rem {\n\n *rem = n - (q * d);\n\n }\n\n q\n\n }\n\n\n\n #[use_c_shim_if(all(target_arch = \"x86\", not(target_env = \"msvc\")))]\n\n /// Returns `n / d`\n\n pub extern \"C\" fn __udivdi3(n: u64, d: u64) -> u64 {\n\n __udivmoddi4(n, d, None)\n\n }\n\n\n\n #[use_c_shim_if(all(target_arch = \"x86\", not(target_env = \"msvc\")))]\n\n /// Returns `n % d`\n\n pub extern \"C\" fn __umoddi3(n: u64, d: u64) -> u64 {\n\n let mut rem = 0;\n", "file_path": "src/int/udiv.rs", "rank": 94, "score": 13.723676676311852 }, { "content": " // carry = 1;\n\n // }\n\n\n\n let s = (d.wrapping_sub(r).wrapping_sub(1)) as i32 >> (u32::BITS - 1);\n\n carry = (s & 1) as u32;\n\n r -= d & s as u32;\n\n }\n\n\n\n (q << 1) | carry\n\n }\n\n\n\n #[use_c_shim_if(all(target_arch = \"arm\", not(target_os = \"ios\")))]\n\n /// Returns `n % d`\n\n pub extern \"C\" fn __umodsi3(n: u32, d: u32) -> u32 {\n\n let q = __udivsi3(n, d);\n\n n - q * d\n\n }\n\n\n\n #[use_c_shim_if(all(target_arch = \"arm\",\n\n not(target_os = \"ios\"),\n", "file_path": "src/int/udiv.rs", "rank": 95, "score": 13.333213329943876 }, { "content": " let qnan_rep = exponent_mask | quiet_bit;\n\n\n\n let mut a_rep = a.repr();\n\n let mut b_rep = b.repr();\n\n let a_abs = a_rep & abs_mask;\n\n let b_abs = b_rep & abs_mask;\n\n\n\n // Detect if a or b is zero, infinity, or NaN.\n\n if a_abs.wrapping_sub(one) >= inf_rep - one ||\n\n b_abs.wrapping_sub(one) >= inf_rep - one {\n\n // NaN + anything = qNaN\n\n if a_abs > inf_rep {\n\n return F::from_repr(a_abs | quiet_bit);\n\n }\n\n // anything + NaN = qNaN\n\n if b_abs > inf_rep {\n\n return F::from_repr(b_abs | quiet_bit);\n\n }\n\n\n\n if a_abs == inf_rep {\n", "file_path": "src/float/add.rs", "rank": 96, "score": 13.181950375270032 }, { "content": "use int::{Int, CastInto};\n\nuse float::Float;\n\n\n\n/// Returns `a + b`\n", "file_path": "src/float/add.rs", "rank": 97, "score": 13.102194136095955 }, { "content": " {\n\n gen_i128(rng) as u128\n\n }\n\n\n\n pub fn gen_u32<R>(rng: &mut R) -> u32\n\n where\n\n R: Rng,\n\n {\n\n gen_i32(rng) as u32\n\n }\n\n\n\n fn gen_u64<R>(rng: &mut R) -> u64\n\n where\n\n R: Rng,\n\n {\n\n gen_i64(rng) as u64\n\n }\n\n\n\n pub fn to_u32(x: f32) -> u32 {\n\n unsafe { mem::transmute(x) }\n", "file_path": "build.rs", "rank": 98, "score": 12.787032673753526 }, { "content": " fn to_string(&self, buffer: &mut String) {\n\n writeln!(\n\n buffer,\n\n \"(({a}, {b}), ({c}, {rem})),\",\n\n a = self.a,\n\n b = self.b,\n\n c = self.c,\n\n rem = self.rem\n\n )\n\n .unwrap();\n\n }\n\n\n\n fn prologue() -> &'static str {\n\n \"\n\nuse compiler_builtins::int::udiv::__udivmodsi4;\n\n\n\nstatic TEST_CASES: &[((u32, u32), (u32, u32))] = &[\n\n\"\n\n }\n\n\n\n fn epilogue() -> &'static str {\n\n \"\n\n];\n\n\n", "file_path": "build.rs", "rank": 99, "score": 12.787020568209043 } ]
Rust
kube/src/client/tls.rs
Ka1wa/kube-rs
50acf98ce7513809634d297392663539c06478c2
#[cfg(feature = "native-tls")] pub mod native_tls { use tokio_native_tls::native_tls::{Certificate, Identity, TlsConnector}; use crate::{Error, Result}; const IDENTITY_PASSWORD: &str = " "; pub fn native_tls_connector( identity_pem: Option<&Vec<u8>>, root_cert: Option<&Vec<Vec<u8>>>, accept_invalid: bool, ) -> Result<TlsConnector> { let mut builder = TlsConnector::builder(); if let Some(pem) = identity_pem { let identity = pkcs12_from_pem(pem, IDENTITY_PASSWORD)?; builder.identity( Identity::from_pkcs12(&identity, IDENTITY_PASSWORD) .map_err(|e| Error::SslError(format!("{}", e)))?, ); } if let Some(ders) = root_cert { for der in ders { builder.add_root_certificate( Certificate::from_der(der).map_err(|e| Error::SslError(format!("{}", e)))?, ); } } if accept_invalid { builder.danger_accept_invalid_certs(true); } let connector = builder.build().map_err(|e| Error::SslError(format!("{}", e)))?; Ok(connector) } fn pkcs12_from_pem(pem: &[u8], password: &str) -> Result<Vec<u8>> { use openssl::{pkcs12::Pkcs12, pkey::PKey, x509::X509}; let x509 = X509::from_pem(pem)?; let pkey = PKey::private_key_from_pem(pem)?; let p12 = Pkcs12::builder().build(password, "kubeconfig", &pkey, &x509)?; let der = p12.to_der()?; Ok(der) } } #[cfg(feature = "rustls-tls")] pub mod rustls_tls { use std::sync::Arc; use rustls::{self, Certificate, ClientConfig, ServerCertVerified, ServerCertVerifier}; use webpki::DNSNameRef; use crate::{Error, Result}; pub fn rustls_client_config( identity_pem: Option<&Vec<u8>>, root_cert: Option<&Vec<Vec<u8>>>, accept_invalid: bool, ) -> Result<ClientConfig> { use std::io::Cursor; let mut client_config = ClientConfig::new(); if let Some(buf) = identity_pem { let (key, certs) = { let mut pem = Cursor::new(buf); let certs = rustls_pemfile::certs(&mut pem) .and_then(|certs| { if certs.is_empty() { Err(std::io::Error::new( std::io::ErrorKind::NotFound, "No X.509 Certificates Found", )) } else { Ok(certs.into_iter().map(rustls::Certificate).collect::<Vec<_>>()) } }) .map_err(|_| Error::SslError("No valid certificate was found".into()))?; pem.set_position(0); let mut sk = rustls_pemfile::pkcs8_private_keys(&mut pem) .and_then(|keys| { if keys.is_empty() { Err(std::io::Error::new( std::io::ErrorKind::NotFound, "No PKCS8 Key Found", )) } else { Ok(keys.into_iter().map(rustls::PrivateKey).collect::<Vec<_>>()) } }) .or_else(|_| { pem.set_position(0); rustls_pemfile::rsa_private_keys(&mut pem).and_then(|keys| { if keys.is_empty() { Err(std::io::Error::new( std::io::ErrorKind::NotFound, "No RSA Key Found", )) } else { Ok(keys.into_iter().map(rustls::PrivateKey).collect::<Vec<_>>()) } }) }) .map_err(|_| Error::SslError("No valid private key was found".into()))?; if let (Some(sk), false) = (sk.pop(), certs.is_empty()) { (sk, certs) } else { return Err(Error::SslError("private key or certificate not found".into())); } }; client_config .set_single_client_cert(certs, key) .map_err(|e| Error::SslError(format!("{}", e)))?; } if let Some(ders) = root_cert { for der in ders { client_config .root_store .add(&Certificate(der.to_owned())) .map_err(|e| Error::SslError(format!("{}", e)))?; } } if accept_invalid { client_config .dangerous() .set_certificate_verifier(Arc::new(NoCertificateVerification {})); } Ok(client_config) } struct NoCertificateVerification {} impl ServerCertVerifier for NoCertificateVerification { fn verify_server_cert( &self, _roots: &rustls::RootCertStore, _presented_certs: &[rustls::Certificate], _dns_name: DNSNameRef<'_>, _ocsp: &[u8], ) -> Result<ServerCertVerified, rustls::TLSError> { Ok(ServerCertVerified::assertion()) } } }
#[cfg(feature = "native-tls")] pub mod native_tls { use tokio_native_tls::native_tls::{Certificate, Identity, TlsConnector}; use crate::{Error, Result}; const IDENTITY_PASSWORD: &str = " "; pub fn native_tls_connector( identity_pem: Option<&Vec<u8>>, root_cert: Option<&Vec<Vec<u8>>>, accept_invalid: bool, ) -> Result<TlsConnector> { let mut builder = TlsConnector::builder(); if let Some(pem) = identity_pem { let identity = pkcs12_from_pem(pem, IDENTITY_PASSWORD)?; builder.identity( Identity::from_pkcs12(&identity, IDENTITY_PASSWORD) .map_err(|e| Error::SslError(format!("{}", e)))?, ); } if let Some(ders) = root_cert { for der in ders { builder.add_root_certificate( Certificate::from_der(der).map_err(|e| Error::SslError(format!("{}", e)))?, ); } } if accept_invalid { builder.danger_accept_invalid_certs(true); } let connector = builder.build().map_err(|e| Error::SslError(format!("{}", e)))?; Ok(connector) } fn pkcs12_from_pem(pem: &[u8], password: &str) -> Result<Vec<u8>> { use openssl::{pkcs12::Pkcs12, pkey::PKey, x509::X509}; let x509 = X509::from_pem(pem)?; let pkey = PKey::private_key_from_pem(pem)?; let p12 = Pkcs12::builder().build(password, "kubeconfig", &pkey, &x509)?; let der = p12.to_der()?; Ok(der) } } #[cfg(feature = "rustls-tls")] pub mod rustls_tls { use std::sync::Arc; use rustls::{self, Certificate, ClientConfig, ServerCertVerified, ServerCertVerifier}; use webpki::DNSNameRef; use crate::{Error, Result}; pub fn rustls_client_config( identity_pem: Option<&Vec<u8>>, root_cert: Option<&Vec<Vec<u8>>>, accept_invalid: bool, ) -> Result<ClientConfig> { use std::io::Cursor; let mut client_config = ClientConfig::new(); if let Some(buf) = identity_pem { let (key, certs) = { let mut pem = Cursor::new(buf); let certs = rustls_pemfile::certs(&mut pem) .and_then(|certs| { if certs.is_empty() { Err(std::io::Error::new( std::io::ErrorKind::NotFound, "No X.509 Certificates Found", )) } else { Ok(certs.into_iter().map(rustls::Certificate).collect::<Vec<_>>()) } }) .map_err(|_| Error::SslError("No valid certificate was found".into()))?; pem.set_position(0); let mut sk = rustls_pemfile::pkcs8_private_keys(&mut pem) .and_then(|keys| { if keys.is_empty() { Err(std::io::Error::new( std::io::ErrorKind::NotFound, "No PKCS8 Key Found", )) } else { Ok(keys.into_iter().map(rustls::PrivateKey).collect::<Vec<_>>()) } }) .or_else(|_| { pem.set_position(0); rustls_pemfile::rsa_private_keys(&mut pem).and_then(|keys| { if keys.is_empty() { Err(std::io::Error::new( std::io::ErrorKind::NotFound, "No RSA Key Found", )) } else { Ok(keys.into_iter().map(rustls::PrivateKey).collect::<Vec<_>>()) } }) }) .map_err(|_| Error::SslError("No valid private key was found".into()))?; if let (Some(sk), false) = (sk.pop(), certs.is_empty()) { (sk, certs) } else { return Err(Error::SslError("private key or certificate not found".into())); } }; client_config .set_single_client_cert(certs, key) .map_err(|e| Error::SslError(format!("{}", e)))?; } if let Some(ders) = root_cert { for der in ders { client_config .root_store .add(&Certificate(der.to_owned())) .map_err(|e| Error::SslError(format!("{}", e)))?; } } if accept_invalid { client_config .dangerous() .set_certificate_verifier(Arc::new(NoCertificateVerification {})); } Ok(client_config) } struct NoCertificateVerification {} impl ServerCertVerifier for NoCertificateVerification {
} }
fn verify_server_cert( &self, _roots: &rustls::RootCertStore, _presented_certs: &[rustls::Certificate], _dns_name: DNSNameRef<'_>, _ocsp: &[u8], ) -> Result<ServerCertVerified, rustls::TLSError> { Ok(ServerCertVerified::assertion()) }
function_block-full_function
[ { "content": "/// Returns certification from specified path in cluster.\n\npub fn load_cert() -> Result<Vec<Vec<u8>>> {\n\n Ok(utils::certs(&utils::read_file(SERVICE_CERTFILE)?))\n\n}\n\n\n", "file_path": "kube/src/config/incluster_config.rs", "rank": 0, "score": 250389.20424861487 }, { "content": "#[cfg(any(not(target_os = \"macos\"), not(feature = \"native-tls\")))]\n\nfn hacky_cert_lifetime_for_macos(_: &[u8]) -> bool {\n\n false\n\n}\n\n\n\n// Expose raw config structs\n\npub use file_config::{\n\n AuthInfo, AuthProviderConfig, Cluster, Context, ExecConfig, Kubeconfig, NamedAuthInfo, NamedCluster,\n\n NamedContext, NamedExtension, Preferences,\n\n};\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[cfg(not(feature = \"client\"))] // want to ensure this works without client features\n\n #[tokio::test]\n\n async fn config_loading_on_small_feature_set() {\n\n use super::Config;\n\n let cfgraw = r#\"\n\n apiVersion: v1\n\n clusters:\n", "file_path": "kube/src/config/mod.rs", "rank": 1, "score": 247343.011219575 }, { "content": "#[cfg(all(target_os = \"macos\", feature = \"native-tls\"))]\n\nfn hacky_cert_lifetime_for_macos(ca: &[u8]) -> bool {\n\n use openssl::x509::X509;\n\n let ca = X509::from_der(ca).expect(\"valid der is a der\");\n\n ca.not_before()\n\n .diff(ca.not_after())\n\n .map(|d| d.days.abs() > 824)\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "kube/src/config/mod.rs", "rank": 2, "score": 243080.53621156252 }, { "content": "// Verify upgrade response according to RFC6455.\n\n// Based on `tungstenite` and added subprotocol verification.\n\nfn verify_upgrade_response(res: &Response<Body>, key: &str) -> Result<()> {\n\n if res.status() != StatusCode::SWITCHING_PROTOCOLS {\n\n return Err(Error::ProtocolSwitch(res.status()));\n\n }\n\n\n\n let headers = res.headers();\n\n if !headers\n\n .get(http::header::UPGRADE)\n\n .and_then(|h| h.to_str().ok())\n\n .map(|h| h.eq_ignore_ascii_case(\"websocket\"))\n\n .unwrap_or(false)\n\n {\n\n return Err(Error::MissingUpgradeWebSocketHeader);\n\n }\n\n\n\n if !headers\n\n .get(http::header::CONNECTION)\n\n .and_then(|h| h.to_str().ok())\n\n .map(|h| h.eq_ignore_ascii_case(\"Upgrade\"))\n\n .unwrap_or(false)\n", "file_path": "kube/src/client/mod.rs", "rank": 3, "score": 242762.37038522947 }, { "content": "pub fn certs(data: &[u8]) -> Vec<Vec<u8>> {\n\n pem::parse_many(data)\n\n .into_iter()\n\n .filter_map(|p| {\n\n if p.tag == \"CERTIFICATE\" {\n\n Some(p.contents)\n\n } else {\n\n None\n\n }\n\n })\n\n .collect::<Vec<_>>()\n\n}\n", "file_path": "kube/src/config/utils.rs", "rank": 4, "score": 214294.65592322533 }, { "content": "/// Kubernetes returned error handling\n\n///\n\n/// Either kube returned an explicit ApiError struct,\n\n/// or it someohow returned something we couldn't parse as one.\n\n///\n\n/// In either case, present an ApiError upstream.\n\n/// The latter is probably a bug if encountered.\n\nfn handle_api_errors(text: &str, s: StatusCode) -> Result<()> {\n\n if s.is_client_error() || s.is_server_error() {\n\n // Print better debug when things do fail\n\n // trace!(\"Parsing error: {}\", text);\n\n if let Ok(errdata) = serde_json::from_str::<ErrorResponse>(text) {\n\n tracing::debug!(\"Unsuccessful: {:?}\", errdata);\n\n Err(Error::Api(errdata))\n\n } else {\n\n tracing::warn!(\"Unsuccessful data error parse: {}\", text);\n\n let ae = ErrorResponse {\n\n status: s.to_string(),\n\n code: s.as_u16(),\n\n message: format!(\"{:?}\", text),\n\n reason: \"Failed to parse error data\".into(),\n\n };\n\n tracing::debug!(\"Unsuccessful: {:?} (reconstruct)\", ae);\n\n Err(Error::Api(ae))\n\n }\n\n } else {\n\n Ok(())\n", "file_path": "kube/src/client/mod.rs", "rank": 5, "score": 195295.41031708146 }, { "content": "/// Caches objects from `watcher::Event`s to a local `Store`\n\n///\n\n/// Keep in mind that the `Store` is just a cache, and may be out of date.\n\n///\n\n/// Note: It is a bad idea to feed a single `reflector` from multiple `watcher`s, since\n\n/// the whole `Store` will be cleared whenever any of them emits a `Restarted` event.\n\npub fn reflector<K, W>(mut store: store::Writer<K>, stream: W) -> impl Stream<Item = W::Item>\n\nwhere\n\n K: Resource + Clone,\n\n K::DynamicType: Eq + Hash + Clone,\n\n W: Stream<Item = watcher::Result<watcher::Event<K>>>,\n\n{\n\n stream.inspect_ok(move |event| store.apply_watcher_event(event))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{reflector, store, ObjectRef};\n\n use crate::watcher;\n\n use futures::{stream, StreamExt, TryStreamExt};\n\n use k8s_openapi::{api::core::v1::ConfigMap, apimachinery::pkg::apis::meta::v1::ObjectMeta};\n\n use rand::{\n\n distributions::{Bernoulli, Uniform},\n\n Rng,\n\n };\n\n use std::collections::{BTreeMap, HashMap};\n", "file_path": "kube-runtime/src/reflector/mod.rs", "rank": 6, "score": 191510.42896948266 }, { "content": "pub fn read_file<P: AsRef<Path>>(file: P) -> Result<Vec<u8>> {\n\n fs::read(&file).map_err(|source| {\n\n ConfigError::ReadFile {\n\n path: file.as_ref().into(),\n\n source,\n\n }\n\n .into()\n\n })\n\n}\n\n\n", "file_path": "kube/src/config/utils.rs", "rank": 7, "score": 191498.3755698928 }, { "content": "fn extract_value(json: &serde_json::Value, path: &str) -> Result<String> {\n\n let pure_path = path.trim_matches(|c| c == '\"' || c == '{' || c == '}');\n\n match jsonpath_select(json, &format!(\"${}\", pure_path)) {\n\n Ok(v) if !v.is_empty() => {\n\n if let serde_json::Value::String(res) = v[0] {\n\n Ok(res.clone())\n\n } else {\n\n Err(ConfigError::AuthExec(format!(\"Target value at {:} is not a string\", pure_path)).into())\n\n }\n\n }\n\n\n\n Err(e) => Err(ConfigError::AuthExec(format!(\"Could not extract JSON value: {:}\", e)).into()),\n\n\n\n _ => Err(ConfigError::AuthExec(format!(\"Target value {:} not found\", pure_path)).into()),\n\n }\n\n}\n\n\n\n/// ExecCredentials is used by exec-based plugins to communicate credentials to\n\n/// HTTP transports.\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n", "file_path": "kube/src/client/auth/mod.rs", "rank": 8, "score": 180178.06348400004 }, { "content": "/// Returns token from specified path in cluster.\n\npub fn load_token() -> Result<String> {\n\n utils::read_file_to_string(SERVICE_TOKENFILE)\n\n}\n\n\n", "file_path": "kube/src/config/incluster_config.rs", "rank": 9, "score": 172553.5420786302 }, { "content": "/// Returns the default namespace from specified path in cluster.\n\npub fn load_default_ns() -> Result<String> {\n\n utils::read_file_to_string(SERVICE_DEFAULT_NS)\n\n}\n\n\n", "file_path": "kube/src/config/incluster_config.rs", "rank": 10, "score": 169995.24044465763 }, { "content": "pub fn data_or_file_with_base64<P: AsRef<Path>>(data: &Option<String>, file: &Option<P>) -> Result<Vec<u8>> {\n\n let mut blob = match (data, file) {\n\n (Some(d), _) => base64::decode(&d)\n\n .map_err(ConfigError::Base64Decode)\n\n .map_err(Error::Kubeconfig),\n\n (_, Some(f)) => read_file(f),\n\n _ => Err(ConfigError::NoBase64FileOrData.into()),\n\n }?;\n\n //Ensure there is a trailing newline in the blob\n\n //Don't bother if the blob is empty\n\n if blob.last().map(|end| *end != b'\\n').unwrap_or(false) {\n\n blob.push(b'\\n');\n\n }\n\n Ok(blob)\n\n}\n\n\n", "file_path": "kube/src/config/utils.rs", "rank": 11, "score": 168808.89422297006 }, { "content": "// dryRun serialization differ when used as body parameters and query strings:\n\n// query strings are either true/false\n\n// body params allow only: missing field, or [\"All\"]\n\n// The latter is a very awkward API causing users to do to\n\n// dp.dry_run = vec![\"All\".into()];\n\n// just to turn on dry_run..\n\n// so we hide this detail for now.\n\nfn dry_run_all_ser<S>(t: &bool, s: S) -> std::result::Result<S::Ok, S::Error>\n\nwhere\n\n S: serde::ser::Serializer,\n\n{\n\n use serde::ser::SerializeTuple;\n\n match t {\n\n true => {\n\n let mut map = s.serialize_tuple(1)?;\n\n map.serialize_element(\"All\")?;\n\n map.end()\n\n }\n\n false => s.serialize_none(),\n\n }\n\n}\n\n#[cfg(test)]\n\nmod test {\n\n use super::DeleteParams;\n\n #[test]\n\n fn delete_param_serialize() {\n\n let mut dp = DeleteParams::default();\n", "file_path": "kube-core/src/params.rs", "rank": 12, "score": 152345.1087310419 }, { "content": "type Result<T, E = Error> = std::result::Result<T, E>;\n\n\n", "file_path": "examples/secret_syncer.rs", "rank": 13, "score": 152195.82032729127 }, { "content": "/// Enqueues the object itself for reconciliation\n\npub fn trigger_self<K, S>(\n\n stream: S,\n\n dyntype: K::DynamicType,\n\n) -> impl Stream<Item = Result<ReconcileRequest<K>, S::Error>>\n\nwhere\n\n S: TryStream<Ok = K>,\n\n K: Resource,\n\n K::DynamicType: Clone,\n\n{\n\n trigger_with(stream, move |obj| {\n\n Some(ReconcileRequest {\n\n obj_ref: ObjectRef::from_obj_with(&obj, dyntype.clone()),\n\n reason: ReconcileReason::ObjectUpdated,\n\n })\n\n })\n\n}\n\n\n", "file_path": "kube-runtime/src/controller/mod.rs", "rank": 14, "score": 148698.20164665268 }, { "content": "/// Enqueues any owners of type `KOwner` for reconciliation\n\npub fn trigger_owners<KOwner, S>(\n\n stream: S,\n\n owner_type: KOwner::DynamicType,\n\n child_type: <S::Ok as Resource>::DynamicType,\n\n) -> impl Stream<Item = Result<ReconcileRequest<KOwner>, S::Error>>\n\nwhere\n\n S: TryStream,\n\n S::Ok: Resource,\n\n <S::Ok as Resource>::DynamicType: Clone,\n\n KOwner: Resource,\n\n KOwner::DynamicType: Clone,\n\n{\n\n trigger_with(stream, move |obj| {\n\n let meta = obj.meta().clone();\n\n let ns = meta.namespace;\n\n let owner_type = owner_type.clone();\n\n let child_ref = ObjectRef::from_obj_with(&obj, child_type.clone()).erase();\n\n meta.owner_references\n\n .into_iter()\n\n .flatten()\n", "file_path": "kube-runtime/src/controller/mod.rs", "rank": 15, "score": 146159.31102232815 }, { "content": "pub fn read_file_to_string<P: AsRef<Path>>(file: P) -> Result<String> {\n\n fs::read_to_string(&file).map_err(|source| {\n\n ConfigError::ReadFile {\n\n path: file.as_ref().into(),\n\n source,\n\n }\n\n .into()\n\n })\n\n}\n\n\n", "file_path": "kube/src/config/utils.rs", "rank": 16, "score": 144461.5521536637 }, { "content": "/// Helper for building custom trigger filters, see the implementations of [`trigger_self`] and [`trigger_owners`] for some examples.\n\npub fn trigger_with<T, K, I, S>(\n\n stream: S,\n\n mapper: impl Fn(T) -> I,\n\n) -> impl Stream<Item = Result<ReconcileRequest<K>, S::Error>>\n\nwhere\n\n S: TryStream<Ok = T>,\n\n I: IntoIterator,\n\n I::Item: Into<ReconcileRequest<K>>,\n\n K: Resource,\n\n{\n\n stream\n\n .map_ok(move |obj| stream::iter(mapper(obj).into_iter().map(Into::into).map(Ok)))\n\n .try_flatten()\n\n}\n\n\n", "file_path": "kube-runtime/src/controller/mod.rs", "rank": 17, "score": 143748.79913775157 }, { "content": "fn token_from_provider(provider: &AuthProviderConfig) -> Result<ProviderToken> {\n\n match provider.name.as_ref() {\n\n \"oidc\" => token_from_oidc_provider(provider),\n\n \"gcp\" => token_from_gcp_provider(provider),\n\n _ => Err(ConfigError::AuthExec(format!(\n\n \"Authentication with provider {:} not supported\",\n\n provider.name\n\n ))\n\n .into()),\n\n }\n\n}\n\n\n", "file_path": "kube/src/client/auth/mod.rs", "rank": 18, "score": 137445.9068685412 }, { "content": "/// Apply a reconciler to an input stream, with a given retry policy\n\n///\n\n/// Takes a `store` parameter for the core objects, which should usually be updated by a [`reflector`].\n\n///\n\n/// The `queue` indicates which objects should be reconciled. For the core objects this will usually be\n\n/// the [`reflector`] (piped through [`trigger_self`]). If your core objects own any subobjects then you\n\n/// can also make them trigger reconciliations by [merging](`futures::stream::select`) the [`reflector`]\n\n/// with a [`watcher`](watcher()) or [`reflector`](reflector()) for the subobject.\n\n///\n\n/// This is the \"hard-mode\" version of [`Controller`], which allows you some more customization\n\n/// (such as triggering from arbitrary [`Stream`]s), at the cost of being a bit more verbose.\n\npub fn applier<K, QueueStream, ReconcilerFut, T>(\n\n mut reconciler: impl FnMut(K, Context<T>) -> ReconcilerFut,\n\n mut error_policy: impl FnMut(&ReconcilerFut::Error, Context<T>) -> ReconcilerAction,\n\n context: Context<T>,\n\n store: Store<K>,\n\n queue: QueueStream,\n\n) -> impl Stream<Item = Result<(ObjectRef<K>, ReconcilerAction), Error<ReconcilerFut::Error, QueueStream::Error>>>\n\nwhere\n\n K: Clone + Resource + 'static,\n\n K::DynamicType: Debug + Eq + Hash + Clone + Unpin,\n\n ReconcilerFut: TryFuture<Ok = ReconcilerAction> + Unpin,\n\n ReconcilerFut::Error: std::error::Error + 'static,\n\n QueueStream: TryStream,\n\n QueueStream::Ok: Into<ReconcileRequest<K>>,\n\n QueueStream::Error: std::error::Error + 'static,\n\n{\n\n let (scheduler_shutdown_tx, scheduler_shutdown_rx) = channel::oneshot::channel();\n\n let err_context = context.clone();\n\n let (scheduler_tx, scheduler_rx) = channel::mpsc::channel::<ScheduleRequest<ReconcileRequest<K>>>(100);\n\n // Create a stream of ObjectRefs that need to be reconciled\n", "file_path": "kube-runtime/src/controller/mod.rs", "rank": 19, "score": 136473.8237580128 }, { "content": "fn token_from_gcp_provider(provider: &AuthProviderConfig) -> Result<ProviderToken> {\n\n if let Some(id_token) = provider.config.get(\"id-token\") {\n\n return Ok(ProviderToken::GcpCommand(id_token.clone(), None));\n\n }\n\n\n\n // Return cached access token if it's still valid\n\n if let Some(access_token) = provider.config.get(\"access-token\") {\n\n if let Some(expiry) = provider.config.get(\"expiry\") {\n\n let expiry_date = expiry\n\n .parse::<DateTime<Utc>>()\n\n .map_err(ConfigError::MalformedTokenExpirationDate)?;\n\n if Utc::now() + Duration::seconds(60) < expiry_date {\n\n return Ok(ProviderToken::GcpCommand(access_token.clone(), Some(expiry_date)));\n\n }\n\n }\n\n }\n\n\n\n // Command-based token source\n\n if let Some(cmd) = provider.config.get(\"cmd-path\") {\n\n let params = provider.config.get(\"cmd-args\").cloned().unwrap_or_default();\n", "file_path": "kube/src/client/auth/mod.rs", "rank": 20, "score": 135326.77390432183 }, { "content": "fn token_from_oidc_provider(provider: &AuthProviderConfig) -> Result<ProviderToken> {\n\n match provider.config.get(\"id-token\") {\n\n Some(id_token) => Ok(ProviderToken::Oidc(id_token.clone())),\n\n None => Err(ConfigError::AuthExec(\"No id-token for oidc Authentication provider\".into()).into()),\n\n }\n\n}\n\n\n", "file_path": "kube/src/client/auth/mod.rs", "rank": 21, "score": 135326.77390432183 }, { "content": "#[cfg(feature = \"ws\")]\n\nfn sec_websocket_key() -> String {\n\n let r: [u8; 16] = rand::random();\n\n base64::encode(&r)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{Api, Client};\n\n\n\n use futures::pin_mut;\n\n use http::{Request, Response};\n\n use hyper::Body;\n\n use k8s_openapi::api::core::v1::Pod;\n\n use tower_test::mock;\n\n\n\n #[tokio::test]\n\n async fn test_mock() {\n\n let (mock_service, handle) = mock::pair::<Request<Body>, Response<Body>>();\n\n let spawned = tokio::spawn(async move {\n\n // Receive a request for pod and respond with some data\n", "file_path": "kube/src/client/mod.rs", "rank": 22, "score": 135157.17395016926 }, { "content": "fn auth_exec(auth: &ExecConfig) -> Result<ExecCredential, ConfigError> {\n\n let mut cmd = Command::new(&auth.command);\n\n if let Some(args) = &auth.args {\n\n cmd.args(args);\n\n }\n\n if let Some(env) = &auth.env {\n\n let envs = env\n\n .iter()\n\n .flat_map(|env| match (env.get(\"name\"), env.get(\"value\")) {\n\n (Some(name), Some(value)) => Some((name, value)),\n\n _ => None,\n\n });\n\n cmd.envs(envs);\n\n }\n\n let out = cmd.output().map_err(ConfigError::AuthExecStart)?;\n\n if !out.status.success() {\n\n return Err(ConfigError::AuthExecRun {\n\n cmd: format!(\"{:?}\", cmd),\n\n status: out.status,\n\n out,\n", "file_path": "kube/src/client/auth/mod.rs", "rank": 23, "score": 132965.16791133094 }, { "content": "/// This generates the code for the `kube::core::object::HasSpec` trait implementation.\n\n///\n\n/// All CRDs have a spec so it is implemented for all of them.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `ident`: The identity (name) of the spec struct\n\n/// * `root ident`: The identity (name) of the main CRD struct (the one we generate in this macro)\n\nfn generate_hasspec(spec_ident: &Ident, root_ident: &Ident) -> TokenStream {\n\n quote! {\n\n impl ::kube::core::object::HasSpec for #root_ident {\n\n type Spec = #spec_ident;\n\n\n\n fn spec(&self) -> &#spec_ident {\n\n &self.spec\n\n }\n\n\n\n fn spec_mut(&mut self) -> &mut #spec_ident {\n\n &mut self.spec\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "kube-derive/src/custom_resource.rs", "rank": 24, "score": 130428.27852258642 }, { "content": "// Simple pluralizer. Handles the special cases.\n\nfn to_plural(word: &str) -> String {\n\n if word == \"endpoints\" || word == \"endpointslices\" {\n\n return word.to_owned();\n\n } else if word == \"nodemetrics\" {\n\n return \"nodes\".to_owned();\n\n } else if word == \"podmetrics\" {\n\n return \"pods\".to_owned();\n\n }\n\n\n\n // Words ending in s, x, z, ch, sh will be pluralized with -es (eg. foxes).\n\n if word.ends_with('s')\n\n || word.ends_with('x')\n\n || word.ends_with('z')\n\n || word.ends_with(\"ch\")\n\n || word.ends_with(\"sh\")\n\n {\n\n return format!(\"{}es\", word);\n\n }\n\n\n\n // Words ending in y that are preceded by a consonant will be pluralized by\n", "file_path": "kube-core/src/discovery.rs", "rank": 25, "score": 118367.47614749375 }, { "content": "// Simple pluralizer.\n\n// Duplicating the code from kube (without special casing) because it's simple enough.\n\n// Irregular plurals must be explicitly specified.\n\nfn to_plural(word: &str) -> String {\n\n // Words ending in s, x, z, ch, sh will be pluralized with -es (eg. foxes).\n\n if word.ends_with('s')\n\n || word.ends_with('x')\n\n || word.ends_with('z')\n\n || word.ends_with(\"ch\")\n\n || word.ends_with(\"sh\")\n\n {\n\n return format!(\"{}es\", word);\n\n }\n\n\n\n // Words ending in y that are preceded by a consonant will be pluralized by\n\n // replacing y with -ies (eg. puppies).\n\n if word.ends_with('y') {\n\n if let Some(c) = word.chars().nth(word.len() - 2) {\n\n if !matches!(c, 'a' | 'e' | 'i' | 'o' | 'u') {\n\n // Remove 'y' and add `ies`\n\n let mut chars = word.chars();\n\n chars.next_back();\n\n return format!(\"{}ies\", chars.as_str());\n", "file_path": "kube-derive/src/custom_resource.rs", "rank": 26, "score": 116486.89718983223 }, { "content": "// This function lets the app handle an added/modified event from k8s\n\nfn handle_event(ev: Event) -> anyhow::Result<()> {\n\n info!(\n\n \"New Event: {} (via {} {})\",\n\n ev.message.unwrap(),\n\n ev.involved_object.kind.unwrap(),\n\n ev.involved_object.name.unwrap()\n\n );\n\n Ok(())\n\n}\n", "file_path": "examples/event_watcher.rs", "rank": 27, "score": 113075.12950924614 }, { "content": "/// Returns Kubernetes address from specified environment variables.\n\npub fn kube_server() -> Option<String> {\n\n let host = kube_host()?;\n\n let port = kube_port()?;\n\n Some(format!(\"https://{}:{}\", host, port))\n\n}\n\n\n", "file_path": "kube/src/config/incluster_config.rs", "rank": 28, "score": 113043.9195520237 }, { "content": "pub fn kube_dns() -> http::Uri {\n\n http::Uri::builder()\n\n .scheme(\"https\")\n\n .authority(SERVICE_DNS)\n\n .path_and_query(\"/\")\n\n .build()\n\n .unwrap()\n\n}\n\n\n", "file_path": "kube/src/config/incluster_config.rs", "rank": 29, "score": 113039.04819998564 }, { "content": "/// Returns kubeconfig path from `$HOME/.kube/config`.\n\npub fn default_kube_path() -> Option<PathBuf> {\n\n home_dir().map(|h| h.join(\".kube\").join(\"config\"))\n\n}\n\n\n", "file_path": "kube/src/config/utils.rs", "rank": 30, "score": 111347.33420696642 }, { "content": "fn secret_name_for_configmap(cm: &ConfigMap) -> Result<String> {\n\n Ok(format!(\"cm---{}\", cm.metadata.name.as_deref().context(NoName)?))\n\n}\n\n\n\nasync fn apply(cm: ConfigMap, secrets: &kube::Api<Secret>) -> Result<ReconcilerAction> {\n\n println!(\"Reconciling {:?}\", cm);\n\n let secret_name = secret_name_for_configmap(&cm)?;\n\n secrets\n\n .patch(\n\n &secret_name,\n\n &PatchParams::apply(\"configmap-secret-syncer.nullable.se\"),\n\n &Patch::Apply(Secret {\n\n metadata: ObjectMeta {\n\n name: Some(secret_name.clone()),\n\n ..ObjectMeta::default()\n\n },\n\n string_data: cm.data,\n\n data: cm.binary_data,\n\n ..Secret::default()\n\n }),\n", "file_path": "examples/secret_syncer.rs", "rank": 31, "score": 109578.78337688933 }, { "content": "/// This processes the `status` field of a CRD.\n\n///\n\n/// As it is optional some features will be turned on or off depending on whether it's available or not.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `root ident`: The identity (name) of the main CRD struct (the one we generate in this macro)\n\n/// * `status`: The optional name of the `status` struct to use\n\n/// * `visibility`: Desired visibility of the generated field\n\n///\n\n/// returns: A `StatusInformation` struct\n\nfn process_status(root_ident: &Ident, status: &Option<String>, visibility: &Visibility) -> StatusInformation {\n\n if let Some(status_name) = &status {\n\n let ident = format_ident!(\"{}\", status_name);\n\n StatusInformation {\n\n field: quote! {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #visibility status: Option<#ident>,\n\n },\n\n default: quote! { status: None, },\n\n impl_hasstatus: quote! {\n\n impl ::kube::core::object::HasStatus for #root_ident {\n\n\n\n type Status = #ident;\n\n\n\n fn status(&self) -> Option<&#ident> {\n\n self.status.as_ref()\n\n }\n\n\n\n fn status_mut(&mut self) -> &mut Option<#ident> {\n\n &mut self.status\n", "file_path": "kube-derive/src/custom_resource.rs", "rank": 32, "score": 104914.83481128149 }, { "content": "fn to_absolute(dir: &Path, file: &str) -> Option<String> {\n\n let path = Path::new(&file);\n\n if path.is_relative() {\n\n dir.join(path).to_str().map(str::to_owned)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nimpl Cluster {\n\n pub(crate) fn load_certificate_authority(&self) -> Result<Option<Vec<u8>>> {\n\n if self.certificate_authority.is_none() && self.certificate_authority_data.is_none() {\n\n return Ok(None);\n\n }\n\n let res =\n\n utils::data_or_file_with_base64(&self.certificate_authority_data, &self.certificate_authority)?;\n\n Ok(Some(res))\n\n }\n\n}\n\n\n", "file_path": "kube/src/config/file_config.rs", "rank": 33, "score": 104113.39944780967 }, { "content": "fn conditions(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {\n\n serde_json::from_value(serde_json::json!({\n\n \"type\": \"array\",\n\n \"x-kubernetes-list-type\": \"map\",\n\n \"x-kubernetes-list-map-keys\": [\"type\"],\n\n \"items\": {\n\n \"type\": \"object\",\n\n \"properties\": {\n\n \"lastTransitionTime\": { \"format\": \"date-time\", \"type\": \"string\" },\n\n \"message\": { \"type\": \"string\" },\n\n \"observedGeneration\": { \"type\": \"integer\", \"format\": \"int64\", \"default\": 0 },\n\n \"reason\": { \"type\": \"string\" },\n\n \"status\": { \"type\": \"string\" },\n\n \"type\": { \"type\": \"string\" }\n\n },\n\n \"required\": [\n\n \"lastTransitionTime\",\n\n \"message\",\n\n \"reason\",\n\n \"status\",\n\n \"type\"\n\n ],\n\n },\n\n }))\n\n .unwrap()\n\n}\n\n\n\n// some tests\n\n// Verify FooCrd::crd\n", "file_path": "examples/crd_derive.rs", "rank": 34, "score": 100606.79981358448 }, { "content": "#[allow(clippy::type_complexity)]\n\nfn trystream_split_result<S>(\n\n stream: S,\n\n) -> (\n\n SplitCase<IntoStream<S>, S::Ok>,\n\n SplitCase<IntoStream<S>, S::Error>,\n\n)\n\nwhere\n\n S: TryStream + Unpin,\n\n S::Ok: Debug,\n\n S::Error: Debug,\n\n{\n\n let stream = Arc::new(Mutex::new(stream.into_stream().peekable()));\n\n (\n\n SplitCase {\n\n inner: stream.clone(),\n\n should_consume_item: Result::is_ok,\n\n try_extract_item_case: Result::ok,\n\n },\n\n SplitCase {\n\n inner: stream,\n", "file_path": "kube-runtime/src/utils.rs", "rank": 35, "score": 97579.7428535749 }, { "content": "/// Watches a Kubernetes Resource for changes continuously\n\n///\n\n/// Compared to [`Api::watch`], this automatically tries to recover the stream upon errors.\n\n///\n\n/// Errors from the underlying watch are propagated, after which the stream will go into recovery mode on the next poll.\n\n/// You can apply your own backoff by not polling the stream for a duration after errors.\n\n/// Keep in mind that some [`TryStream`](futures::TryStream) combinators (such as\n\n/// [`try_for_each`](futures::TryStreamExt::try_for_each) and [`try_concat`](futures::TryStreamExt::try_concat))\n\n/// will terminate eagerly as soon as they receive an [`Err`].\n\n///\n\n/// This is intended to provide a safe and atomic input interface for a state store like a [`reflector`],\n\n/// direct users may want to flatten composite events with [`try_flatten_applied`]:\n\n///\n\n/// ```no_run\n\n/// use kube::{api::{Api, ListParams, ResourceExt}, Client};\n\n/// use kube_runtime::{utils::try_flatten_applied, watcher};\n\n/// use k8s_openapi::api::core::v1::Pod;\n\n/// use futures::{StreamExt, TryStreamExt};\n\n/// #[tokio::main]\n\n/// async fn main() -> Result<(), kube_runtime::watcher::Error> {\n\n/// let client = Client::try_default().await.unwrap();\n\n/// let pods: Api<Pod> = Api::namespaced(client, \"apps\");\n\n/// let watcher = watcher(pods, ListParams::default());\n\n/// try_flatten_applied(watcher)\n\n/// .try_for_each(|p| async move {\n\n/// println!(\"Applied: {}\", p.name());\n\n/// Ok(())\n\n/// })\n\n/// .await?;\n\n/// Ok(())\n\n/// }\n\n/// ```\n\n/// [`try_flatten_applied`]: super::utils::try_flatten_applied\n\n/// [`reflector`]: super::reflector::reflector\n\n/// [`Api::watch`]: kube::Api::watch\n\n///\n\n/// # Recovery\n\n///\n\n/// (The details of recovery are considered an implementation detail and should not be relied on to be stable, but are\n\n/// documented here for posterity.)\n\n///\n\n/// If the watch connection is interrupted then we attempt to restart the watch using the last\n\n/// [resource versions](https://kubernetes.io/docs/reference/using-api/api-concepts/#efficient-detection-of-changes)\n\n/// that we have seen on the stream. If this is successful then the stream is simply resumed from where it left off.\n\n/// If this fails because the resource version is no longer valid then we start over with a new stream, starting with\n\n/// an [`Event::Restarted`].\n\npub fn watcher<K: Resource + Clone + DeserializeOwned + Debug + Send + 'static>(\n\n api: Api<K>,\n\n list_params: ListParams,\n\n) -> impl Stream<Item = Result<Event<K>>> + Send {\n\n futures::stream::unfold(\n\n (api, list_params, State::Empty),\n\n |(api, list_params, state)| async {\n\n let (event, state) = step(&api, &list_params, state).await;\n\n Some((event, (api, list_params, state)))\n\n },\n\n )\n\n}\n", "file_path": "kube-runtime/src/watcher.rs", "rank": 36, "score": 96235.12210817703 }, { "content": "// https://kubernetes.io/docs/reference/using-api/server-side-apply/#merge-strategy\n\nfn set_listable_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema::Schema {\n\n serde_json::from_value(serde_json::json!({\n\n \"type\": \"array\",\n\n \"items\": {\n\n \"format\": \"u32\",\n\n \"minium\": 0,\n\n \"type\": \"integer\"\n\n },\n\n \"x-kubernetes-list-type\": \"set\"\n\n }))\n\n .unwrap()\n\n}\n\n\n", "file_path": "examples/crd_derive_schema.rs", "rank": 37, "score": 96146.41868842137 }, { "content": "#[proc_macro_derive(CustomResource, attributes(kube))]\n\npub fn derive_custom_resource(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n custom_resource::derive(proc_macro2::TokenStream::from(input)).into()\n\n}\n", "file_path": "kube-derive/src/lib.rs", "rank": 38, "score": 95602.33557078961 }, { "content": "/// Extensions to [`Config`](crate::Config) for custom [`Client`](crate::Client).\n\n///\n\n/// See [`Client::new`](crate::Client::new) for an example.\n\n///\n\n/// This trait is sealed and cannot be implemented.\n\npub trait ConfigExt: private::Sealed {\n\n /// Layer to set the base URI of requests to the configured server.\n\n fn base_uri_layer(&self) -> BaseUriLayer;\n\n\n\n /// Optional layer to set up `Authorization` header depending on the config.\n\n fn auth_layer(&self) -> Result<Option<AuthLayer>>;\n\n\n\n /// Create [`hyper_tls::HttpsConnector`] based on config.\n\n ///\n\n /// # Example\n\n ///\n\n /// ```rust\n\n /// # async fn doc() -> Result<(), Box<dyn std::error::Error>> {\n\n /// # use kube::{client::ConfigExt, Config};\n\n /// let config = Config::infer().await?;\n\n /// let https = config.native_tls_https_connector()?;\n\n /// let hyper_client: hyper::Client<_, hyper::Body> = hyper::Client::builder().build(https);\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n", "file_path": "kube/src/client/config_ext.rs", "rank": 39, "score": 95193.6455710738 }, { "content": "/// Flattens each item in the list following the rules of [`watcher::Event::into_iter_touched`].\n\npub fn try_flatten_touched<K, S: TryStream<Ok = watcher::Event<K>>>(\n\n stream: S,\n\n) -> impl Stream<Item = Result<K, S::Error>> {\n\n stream\n\n .map_ok(|event| stream::iter(event.into_iter_touched().map(Ok)))\n\n .try_flatten()\n\n}\n\n\n\n/// Allows splitting a `Stream` into several streams that each emit a disjoint subset of the input stream's items,\n\n/// like a streaming variant of pattern matching.\n\n///\n\n/// NOTE: The cases MUST be reunited into the same final stream (using `futures::stream::select` or similar),\n\n/// since cases for rejected items will *not* register wakeup correctly, and may otherwise lose items and/or deadlock.\n\n///\n\n/// NOTE: The whole set of cases will deadlock if there is ever an item that no live case wants to consume.\n\n#[pin_project]\n\npub(crate) struct SplitCase<S: Stream, Case> {\n\n // Future-unaware `Mutex` is OK because it's only taken inside single poll()s\n\n inner: Arc<Mutex<Peekable<S>>>,\n\n /// Tests whether an item from the stream should be consumed\n", "file_path": "kube-runtime/src/utils.rs", "rank": 40, "score": 93442.8563558476 }, { "content": "/// Flattens each item in the list following the rules of [`watcher::Event::into_iter_applied`].\n\npub fn try_flatten_applied<K, S: TryStream<Ok = watcher::Event<K>>>(\n\n stream: S,\n\n) -> impl Stream<Item = Result<K, S::Error>> {\n\n stream\n\n .map_ok(|event| stream::iter(event.into_iter_applied().map(Ok)))\n\n .try_flatten()\n\n}\n\n\n", "file_path": "kube-runtime/src/utils.rs", "rank": 41, "score": 93442.8563558476 }, { "content": "// The main handler and core business logic, failures here implies rejected applies\n\nfn mutate(res: AdmissionResponse, obj: &DynamicObject) -> Result<AdmissionResponse, Box<dyn Error>> {\n\n // If the resource contains an \"illegal\" label, we reject it\n\n if obj.labels().contains_key(\"illegal\") {\n\n return Err(\"Resource contained 'illegal' label\".into());\n\n }\n\n\n\n // If the resource doesn't contain \"admission\", we add it to the resource.\n\n if !obj.labels().contains_key(\"admission\") {\n\n let patches = vec![\n\n // Ensure labels exist before adding a key to it\n\n json_patch::PatchOperation::Add(json_patch::AddOperation {\n\n path: \"/metadata/labels\".into(),\n\n value: serde_json::json!({}),\n\n }),\n\n // Add our label\n\n json_patch::PatchOperation::Add(json_patch::AddOperation {\n\n path: \"/metadata/labels/admission\".into(),\n\n value: serde_json::Value::String(\"modified-by-admission-controller\".into()),\n\n }),\n\n ];\n\n Ok(res.with_patch(json_patch::Patch(patches))?)\n\n } else {\n\n Ok(res)\n\n }\n\n}\n", "file_path": "examples/admission_controller.rs", "rank": 42, "score": 92641.27762451596 }, { "content": "fn append_new_named<T, F>(base: &mut Vec<T>, next: Vec<T>, f: F)\n\nwhere\n\n F: Fn(&T) -> &String,\n\n{\n\n use std::collections::HashSet;\n\n base.extend({\n\n let existing = base.iter().map(|x| f(x)).collect::<HashSet<_>>();\n\n next.into_iter()\n\n .filter(|x| !existing.contains(f(x)))\n\n .collect::<Vec<_>>()\n\n });\n\n}\n\n\n", "file_path": "kube/src/config/file_config.rs", "rank": 43, "score": 88028.31299695748 }, { "content": "/// Stream transformer that delays and deduplicates [`Stream`] items.\n\n///\n\n/// Items are deduplicated: if an item is submitted multiple times before being emitted then it will only be\n\n/// emitted at the earliest `Instant`.\n\n///\n\n/// Items can be \"held pending\" if the item doesn't match some predicate. Items trying to schedule an item\n\n/// that is already pending will be discarded (since it is already going to be emitted as soon as the consumer\n\n/// is ready for it).\n\n///\n\n/// The [`Scheduler`] terminates as soon as `requests` does.\n\npub fn scheduler<T: Eq + Hash + Clone, S: Stream<Item = ScheduleRequest<T>>>(requests: S) -> Scheduler<T, S> {\n\n Scheduler::new(requests)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::utils::KubeRuntimeStreamExt;\n\n\n\n use super::{scheduler, ScheduleRequest};\n\n use futures::{channel::mpsc, future, pin_mut, poll, stream, FutureExt, SinkExt, StreamExt};\n\n use std::task::Poll;\n\n use tokio::time::{advance, pause, sleep, Duration, Instant};\n\n\n\n fn unwrap_poll<T>(poll: Poll<T>) -> T {\n\n if let Poll::Ready(x) = poll {\n\n x\n\n } else {\n\n panic!(\"Tried to unwrap a pending poll!\")\n\n }\n\n }\n", "file_path": "kube-runtime/src/scheduler.rs", "rank": 44, "score": 79434.10217436137 }, { "content": "// Data we want access to in error/reconcile calls\n\nstruct Data {\n\n client: Client,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<()> {\n\n std::env::set_var(\"RUST_LOG\", \"info,kube-runtime=debug,kube=debug\");\n\n env_logger::init();\n\n let client = Client::try_default().await?;\n\n\n\n let cmgs = Api::<ConfigMapGenerator>::all(client.clone());\n\n let cms = Api::<ConfigMap>::all(client.clone());\n\n\n\n log::info!(\"starting configmapgen-controller\");\n\n log::info!(\"press <enter> to force a reconciliation of all objects\");\n\n\n\n let (mut reload_tx, reload_rx) = futures::channel::mpsc::channel(0);\n\n // Using a regular background thread since tokio::io::stdin() doesn't allow aborting reads,\n\n // and its worker prevents the Tokio runtime from shutting down.\n\n std::thread::spawn(move || {\n", "file_path": "examples/configmapgen_controller.rs", "rank": 45, "score": 72359.89386805089 }, { "content": "/// Internal metadata for a scheduled message.\n\nstruct ScheduledEntry {\n\n run_at: Instant,\n\n queue_key: delay_queue::Key,\n\n}\n\n\n\n#[pin_project(project = SchedulerProj)]\n\npub struct Scheduler<T, R> {\n\n /// Queue of already-scheduled messages.\n\n ///\n\n /// To ensure that the metadata is kept up-to-date, use `schedule_message` and\n\n /// `poll_pop_queue_message` rather than manipulating this directly.\n\n queue: DelayQueue<T>,\n\n /// Metadata for all currently scheduled messages. Used to detect duplicate messages.\n\n scheduled: HashMap<T, ScheduledEntry>,\n\n /// Messages that are scheduled to have happened, but have been held using `hold_unless`.\n\n pending: HashSet<T>,\n\n /// Incoming queue of scheduling requests.\n\n #[pin]\n\n requests: Fuse<R>,\n\n}\n", "file_path": "kube-runtime/src/scheduler.rs", "rank": 46, "score": 70086.56260452443 }, { "content": "struct FinalizerState {\n\n finalizer_index: Option<usize>,\n\n is_deleting: bool,\n\n}\n\n\n\nimpl FinalizerState {\n\n fn for_object<K: Resource>(obj: &K, finalizer_name: &str) -> Self {\n\n Self {\n\n finalizer_index: obj\n\n .finalizers()\n\n .iter()\n\n .enumerate()\n\n .find(|(_, fin)| *fin == finalizer_name)\n\n .map(|(i, _)| i),\n\n is_deleting: obj.meta().deletion_timestamp.is_some(),\n\n }\n\n }\n\n}\n\n\n\n/// Reconcile an object in a way that requires cleanup before an object can be deleted. It does this by\n", "file_path": "kube-runtime/src/finalizer.rs", "rank": 47, "score": 70086.56260452443 }, { "content": "#[derive(Debug, Default, FromDeriveInput)]\n\n#[darling(attributes(kube))]\n\nstruct KubeAttrs {\n\n group: String,\n\n version: String,\n\n kind: String,\n\n #[darling(default, rename = \"struct\")]\n\n kind_struct: Option<String>,\n\n /// lowercase plural of kind (inferred if omitted)\n\n #[darling(default)]\n\n plural: Option<String>,\n\n /// singular defaults to lowercased kind\n\n #[darling(default)]\n\n singular: Option<String>,\n\n #[darling(default)]\n\n namespaced: bool,\n\n #[darling(default = \"default_apiext\")]\n\n apiextensions: String,\n\n #[darling(multiple, rename = \"derive\")]\n\n derives: Vec<String>,\n\n #[darling(default)]\n\n status: Option<String>,\n\n #[darling(multiple, rename = \"category\")]\n\n categories: Vec<String>,\n\n #[darling(multiple, rename = \"shortname\")]\n\n shortnames: Vec<String>,\n\n #[darling(multiple, rename = \"printcolumn\")]\n\n printcolums: Vec<String>,\n\n #[darling(default)]\n\n scale: Option<String>,\n\n}\n\n\n", "file_path": "kube-derive/src/custom_resource.rs", "rank": 48, "score": 69038.28121129032 }, { "content": "#[derive(CustomResource, Debug, Clone, Deserialize, Serialize, JsonSchema)]\n\n#[kube(group = \"nullable.se\", version = \"v1\", kind = \"ConfigMapGenerator\")]\n\n#[kube(shortname = \"cmg\", namespaced)]\n\nstruct ConfigMapGeneratorSpec {\n\n content: String,\n\n}\n\n\n", "file_path": "examples/configmapgen_controller.rs", "rank": 49, "score": 69038.28121129032 }, { "content": "struct StatusInformation {\n\n /// The code to be used for the field in the main struct\n\n field: TokenStream,\n\n /// The initialization code to use in a `Default` and `::new()` implementation\n\n default: TokenStream,\n\n /// The implementation code for the `HasStatus` trait\n\n impl_hasstatus: TokenStream,\n\n}\n\n\n", "file_path": "kube-derive/src/custom_resource.rs", "rank": 50, "score": 69038.28121129032 }, { "content": "#[derive(CustomResource, Serialize, Deserialize, Debug, Clone, JsonSchema)]\n\nstruct FooSpec {\n\n foo: String,\n\n}\n\n\n", "file_path": "kube-derive/tests/ui/missing_required.rs", "rank": 51, "score": 68042.9680620303 }, { "content": "#[kube(apiextensions = \"v1\")]\n\nstruct FooSpec {\n\n non_nullable: String,\n\n\n\n #[serde(default = \"default_value\")]\n\n non_nullable_with_default: String,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n nullable_skipped: Option<String>,\n\n nullable: Option<String>,\n\n\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n #[serde(default = \"default_nullable\")]\n\n nullable_skipped_with_default: Option<String>,\n\n\n\n #[serde(default = \"default_nullable\")]\n\n nullable_with_default: Option<String>,\n\n\n\n // Using feature `chrono`\n\n timestamp: DateTime<Utc>,\n\n}\n\n\n", "file_path": "kube-derive/tests/crd_schema_test.rs", "rank": 52, "score": 68042.9680620303 }, { "content": "#[derive(CustomResource, Serialize, Deserialize, Debug, Clone, JsonSchema)]\n\n#[kube(group = \"clux.dev\", version = \"v1\", kind = \"Foo\", shortnames = \"foo\")]\n\nstruct FooSpec {\n\n foo: String,\n\n}\n\n\n", "file_path": "kube-derive/tests/ui/fail_with_suggestion.rs", "rank": 53, "score": 68042.9680620303 }, { "content": "// Internal state of an attached process\n\nstruct AttachedProcessState {\n\n waker: Option<Waker>,\n\n finished: bool,\n\n status: Option<Status>,\n\n stdin_writer: Option<DuplexStream>,\n\n stdout_reader: Option<DuplexStream>,\n\n stderr_reader: Option<DuplexStream>,\n\n}\n\n\n\nconst MAX_BUF_SIZE: usize = 1024;\n\n\n\n/// Represents an attached process in a container for [`attach`] and [`exec`].\n\n///\n\n/// Resolves when the connection terminates with an optional [`Status`].\n\n/// Provides access to `stdin`, `stdout`, and `stderr` if attached.\n\n///\n\n/// [`attach`]: crate::Api::attach\n\n/// [`exec`]: crate::Api::exec\n\n/// [`Status`]: k8s_openapi::apimachinery::pkg::apis::meta::v1::Status\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"ws\")))]\n", "file_path": "kube/src/api/remote_command.rs", "rank": 54, "score": 68042.9680620303 }, { "content": "/// An accessor trait for a kubernetes Resource.\n\n///\n\n/// This is for a subset of Kubernetes type that do not end in `List`.\n\n/// These types, using [`ObjectMeta`], SHOULD all have required properties:\n\n/// - `.metadata`\n\n/// - `.metadata.name`\n\n///\n\n/// And these optional properties:\n\n/// - `.metadata.namespace`\n\n/// - `.metadata.resource_version`\n\n///\n\n/// This avoids a bunch of the unnecessary unwrap mechanics for apps.\n\npub trait Resource {\n\n /// Type information for types that do not know their resource information at compile time.\n\n ///\n\n /// Types that know their metadata at compile time should select `DynamicType = ()`.\n\n /// Types that require some information at runtime should select `DynamicType`\n\n /// as type of this information.\n\n ///\n\n /// See [`DynamicObject`](crate::dynamic::DynamicObject) for a valid implementation of non-k8s-openapi resources.\n\n type DynamicType: Send + Sync + 'static;\n\n\n\n /// Returns kind of this object\n\n fn kind(dt: &Self::DynamicType) -> Cow<'_, str>;\n\n /// Returns group of this object\n\n fn group(dt: &Self::DynamicType) -> Cow<'_, str>;\n\n /// Returns version of this object\n\n fn version(dt: &Self::DynamicType) -> Cow<'_, str>;\n\n /// Returns apiVersion of this object\n\n fn api_version(dt: &Self::DynamicType) -> Cow<'_, str> {\n\n let group = Self::group(dt);\n\n if group.is_empty() {\n", "file_path": "kube-core/src/resource.rs", "rank": 55, "score": 66031.92975323803 }, { "content": "/// A trait to access the `status` of a Kubernetes resource.\n\n///\n\n/// Some built-in Kubernetes resources and custom resources do have a `status` field.\n\n/// This trait can be used to access this field.\n\n///\n\n/// This trait is automatically implemented by the kube-derive macro and is _not_ currently\n\n/// implemented for the Kubernetes API objects from `k8s_openapi`.\n\n///\n\n/// Note: Not all Kubernetes resources have a status (e.g. `ConfigMap`, `Secret`, ...).\n\npub trait HasStatus {\n\n /// The type of the `status` object\n\n type Status;\n\n\n\n /// Returns an optional reference to the `status` of the object\n\n fn status(&self) -> Option<&Self::Status>;\n\n\n\n /// Returns an optional mutable reference to the `status` of the object\n\n fn status_mut(&mut self) -> &mut Option<Self::Status>;\n\n}\n\n\n\n// -------------------------------------------------------\n\n\n\n/// A standard Kubernetes object with `.spec` and `.status`.\n\n///\n\n/// This is a convenience struct provided for serialization/deserialization.\n\n/// It is slightly stricter than ['DynamicObject`] in that it enforces the spec/status convention,\n\n/// and as such will not in general work with all api-discovered resources.\n\n///\n\n/// This can be used to tie existing resources to smaller, local struct variants to optimize for memory use.\n", "file_path": "kube-core/src/object.rs", "rank": 56, "score": 66031.80082881366 }, { "content": "/// A trait to access the `spec` of a Kubernetes resource.\n\n///\n\n/// Some built-in Kubernetes resources and all custom resources do have a `spec` field.\n\n/// This trait can be used to access this field.\n\n///\n\n/// This trait is automatically implemented by the kube-derive macro and is _not_ currently\n\n/// implemented for the Kubernetes API objects from `k8s_openapi`.\n\n///\n\n/// Note: Not all Kubernetes resources have a spec (e.g. `ConfigMap`, `Secret`, ...).\n\npub trait HasSpec {\n\n /// The type of the `spec` of this resource\n\n type Spec;\n\n\n\n /// Returns a reference to the `spec` of the object\n\n fn spec(&self) -> &Self::Spec;\n\n\n\n /// Returns a mutable reference to the `spec` of the object\n\n fn spec_mut(&mut self) -> &mut Self::Spec;\n\n}\n\n\n", "file_path": "kube-core/src/object.rs", "rank": 57, "score": 66031.80082881366 }, { "content": "#[cfg(feature = \"ws\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"ws\")))]\n\npub trait Execute {}\n\n\n\n#[cfg(feature = \"ws\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"ws\")))]\n\nimpl Execute for k8s_openapi::api::core::v1::Pod {}\n\n\n\n#[cfg(feature = \"ws\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"ws\")))]\n\nimpl<K> Api<K>\n\nwhere\n\n K: Clone + DeserializeOwned + Execute,\n\n{\n\n /// Execute a command in a pod\n\n pub async fn exec<I: Debug, T>(\n\n &self,\n\n name: &str,\n\n command: I,\n\n ap: &AttachParams,\n\n ) -> Result<AttachedProcess>\n\n where\n\n I: IntoIterator<Item = T>,\n\n T: Into<String>,\n\n {\n\n let mut req = self.request.exec(name, command, ap)?;\n\n req.extensions_mut().insert(\"exec\");\n\n let stream = self.client.connect(req).await?;\n\n Ok(AttachedProcess::new(stream, ap))\n\n }\n\n}\n", "file_path": "kube/src/api/subresource.rs", "rank": 58, "score": 66027.99820801511 }, { "content": "/// Marker trait for objects that has logs\n\npub trait Log {}\n\n\n\nimpl Log for k8s_openapi::api::core::v1::Pod {}\n\n\n\nimpl<K> Api<K>\n\nwhere\n\n K: DeserializeOwned + Log,\n\n{\n\n /// Fetch logs as a string\n\n pub async fn logs(&self, name: &str, lp: &LogParams) -> Result<String> {\n\n let mut req = self.request.logs(name, lp)?;\n\n req.extensions_mut().insert(\"logs\");\n\n self.client.request_text(req).await\n\n }\n\n\n\n /// Fetch logs as a stream of bytes\n\n pub async fn log_stream(&self, name: &str, lp: &LogParams) -> Result<impl Stream<Item = Result<Bytes>>> {\n\n let mut req = self.request.logs(name, lp)?;\n\n req.extensions_mut().insert(\"log_stream\");\n\n self.client.request_text_stream(req).await\n\n }\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n// Eviction subresource\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "kube/src/api/subresource.rs", "rank": 59, "score": 66027.99820801511 }, { "content": "/// Marker trait for objects that can be evicted\n\npub trait Evict {}\n\n\n\nimpl Evict for k8s_openapi::api::core::v1::Pod {}\n\n\n\nimpl<K> Api<K>\n\nwhere\n\n K: DeserializeOwned + Evict,\n\n{\n\n /// Create an eviction\n\n pub async fn evict(&self, name: &str, ep: &EvictParams) -> Result<Status> {\n\n let mut req = self.request.evict(name, ep)?;\n\n req.extensions_mut().insert(\"evict\");\n\n self.client.request::<Status>(req).await\n\n }\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n// Attach subresource\n\n// ----------------------------------------------------------------------------\n\n\n", "file_path": "kube/src/api/subresource.rs", "rank": 60, "score": 66027.99820801511 }, { "content": "#[cfg(feature = \"ws\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"ws\")))]\n\npub trait Attach {}\n\n\n\n#[cfg(feature = \"ws\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"ws\")))]\n\nimpl Attach for k8s_openapi::api::core::v1::Pod {}\n\n\n\n#[cfg(feature = \"ws\")]\n\n#[cfg_attr(docsrs, doc(cfg(feature = \"ws\")))]\n\nimpl<K> Api<K>\n\nwhere\n\n K: Clone + DeserializeOwned + Attach,\n\n{\n\n /// Attach to pod\n\n pub async fn attach(&self, name: &str, ap: &AttachParams) -> Result<AttachedProcess> {\n\n let mut req = self.request.attach(name, ap)?;\n\n req.extensions_mut().insert(\"attach\");\n\n let stream = self.client.connect(req).await?;\n\n Ok(AttachedProcess::new(stream, ap))\n\n }\n\n}\n\n\n\n// ----------------------------------------------------------------------------\n\n// Exec subresource\n\n// ----------------------------------------------------------------------------\n", "file_path": "kube/src/api/subresource.rs", "rank": 61, "score": 66027.99820801511 }, { "content": "/// Restartable Resource marker trait\n\npub trait Restart {}\n\n\n\nimpl Restart for Deployment {}\n\nimpl Restart for DaemonSet {}\n\nimpl Restart for StatefulSet {}\n\nimpl Restart for ReplicaSet {}\n\n\n\nimpl Request {\n\n /// Restart a resource\n\n pub fn restart(&self, name: &str) -> Result<http::Request<Vec<u8>>> {\n\n let patch = serde_json::json!({\n\n \"spec\": {\n\n \"template\": {\n\n \"metadata\": {\n\n \"annotations\": {\n\n \"kube.kubernetes.io/restartedAt\": Utc::now().to_rfc3339()\n\n }\n\n }\n\n }\n\n }\n\n });\n\n\n\n let pparams = PatchParams::default();\n\n self.patch(name, &pparams, &Patch::Merge(patch))\n\n }\n\n}\n", "file_path": "kube-core/src/util.rs", "rank": 62, "score": 66027.99820801511 }, { "content": "fn main() {\n\n println!(\"Kind {}\", FooCrd::kind(&()));\n\n let mut foo = FooCrd::new(\"hi\", MyFoo {\n\n name: \"hi\".into(),\n\n info: None,\n\n });\n\n foo.status = Some(FooStatus {\n\n is_bad: true,\n\n conditions: vec![],\n\n });\n\n println!(\"Spec: {:?}\", foo.spec);\n\n let crd = serde_json::to_string_pretty(&FooCrd::crd()).unwrap();\n\n println!(\"Foo CRD: \\n{}\", crd);\n\n\n\n println!(\"Spec (via HasSpec): {:?}\", foo.spec());\n\n println!(\"Status (via HasStatus): {:?}\", foo.status());\n\n}\n\n\n", "file_path": "examples/crd_derive.rs", "rank": 63, "score": 65384.98347196086 }, { "content": " pub trait Sealed {}\n\n impl Sealed for super::Config {}\n\n}\n\n\n\nimpl ConfigExt for Config {\n\n fn base_uri_layer(&self) -> BaseUriLayer {\n\n BaseUriLayer::new(self.cluster_url.clone())\n\n }\n\n\n\n fn auth_layer(&self) -> Result<Option<AuthLayer>> {\n\n Ok(match Auth::try_from(&self.auth_info)? {\n\n Auth::None => None,\n\n Auth::Basic(user, pass) => Some(AuthLayer(Either::A(\n\n AddAuthorizationLayer::basic(&user, &pass).as_sensitive(true),\n\n ))),\n\n Auth::Bearer(token) => Some(AuthLayer(Either::A(\n\n AddAuthorizationLayer::bearer(&token).as_sensitive(true),\n\n ))),\n\n Auth::RefreshableToken(r) => Some(AuthLayer(Either::B(RefreshTokenLayer::new(r)))),\n\n })\n", "file_path": "kube/src/client/config_ext.rs", "rank": 64, "score": 64984.52780636615 }, { "content": "#[test]\n\nfn verify_crd() {\n\n let output = serde_json::json!({\n\n \"apiVersion\": \"apiextensions.k8s.io/v1\",\n\n \"kind\": \"CustomResourceDefinition\",\n\n \"metadata\": {\n\n \"name\": \"fooz.clux.dev\"\n\n },\n\n \"spec\": {\n\n \"group\": \"clux.dev\",\n\n \"names\": {\n\n \"kind\": \"Foo\",\n\n \"plural\": \"fooz\",\n\n \"shortNames\": [\"f\"],\n\n \"singular\": \"foo\",\n\n \"categories\": []\n\n },\n\n \"scope\": \"Namespaced\",\n\n \"versions\": [\n\n {\n\n \"name\": \"v1\",\n", "file_path": "examples/crd_derive.rs", "rank": 65, "score": 64232.86572605418 }, { "content": "#[test]\n\nfn verify_resource() {\n\n use static_assertions::{assert_impl_all, assert_impl_one};\n\n assert_eq!(FooCrd::kind(&()), \"Foo\");\n\n assert_eq!(FooCrd::group(&()), \"clux.dev\");\n\n assert_eq!(FooCrd::version(&()), \"v1\");\n\n assert_eq!(FooCrd::api_version(&()), \"clux.dev/v1\");\n\n assert_impl_all!(FooCrd: Resource, Default);\n\n assert_impl_one!(MyFoo: JsonSchema);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn verify_url_gen() {\n\n let url = FooCrd::url_path(&(), Some(\"myns\".into()));\n\n assert_eq!(url, \"/apis/clux.dev/v1/namespaces/myns/fooz\");\n\n}\n\n\n", "file_path": "examples/crd_derive.rs", "rank": 66, "score": 64232.86572605418 }, { "content": "#[test]\n\nfn verify_default() {\n\n let fdef = FooCrd::default();\n\n let ser = serde_yaml::to_string(&fdef).unwrap();\n\n let exp = r#\"---\n\napiVersion: clux.dev/v1\n\nkind: Foo\n\nmetadata: {}\n\nspec:\n\n name: \"\"\n\n\"#;\n\n assert_eq!(exp, ser);\n\n}\n", "file_path": "examples/crd_derive.rs", "rank": 67, "score": 64232.86572605418 }, { "content": "#[cfg(feature = \"schema\")]\n\nfn main() {\n\n eprintln!(\"This example it disabled when using the schema feature\");\n\n}\n\n\n\n// Verify CustomResource derivable still\n", "file_path": "examples/crd_derive_no_schema.rs", "rank": 68, "score": 64232.86572605418 }, { "content": " /// Extension trait that will be implemented by kube-derive for legacy v1beta1::CustomResourceDefinitions\n\n ///\n\n /// This trait variant is only implemented with `#[kube(apiextensions = \"v1beta1\")]`\n\n pub trait CustomResourceExt {\n\n /// Helper to generate the legacy CRD without a JsonSchema\n\n ///\n\n /// This is using v1beta1::CustomResourceDefinitions (which will be removed in kubernetes 1.22)\n\n fn crd() -> super::apiexts::v1beta1::CustomResourceDefinition;\n\n /// Helper to return the name of this `CustomResourceDefinition` in kubernetes.\n\n ///\n\n /// This is not the name of an _instance_ of this custom resource but the `CustomResourceDefinition` object itself.\n\n fn crd_name() -> &'static str;\n\n /// Helper to generate the api information type for use with the dynamic `Api`\n\n fn api_resource() -> crate::discovery::ApiResource;\n\n }\n\n}\n\n\n\n/// re-export the current latest version until a newer one is available in cloud providers\n\npub use v1::CustomResourceExt;\n", "file_path": "kube-core/src/crd.rs", "rank": 69, "score": 63993.782555779704 }, { "content": "#[test]\n\nfn log_path() {\n\n use crate::api::{Request, Resource};\n\n use k8s_openapi::api::core::v1 as corev1;\n\n let lp = LogParams {\n\n container: Some(\"blah\".into()),\n\n ..LogParams::default()\n\n };\n\n let url = corev1::Pod::url_path(&(), Some(\"ns\"));\n\n let req = Request::new(url).logs(\"foo\", &lp).unwrap();\n\n assert_eq!(req.uri(), \"/api/v1/namespaces/ns/pods/foo/log?&container=blah\");\n\n}\n\n\n", "file_path": "kube/src/api/subresource.rs", "rank": 70, "score": 63142.060998967194 }, { "content": "#[test]\n\nfn evict_path() {\n\n use crate::api::{Request, Resource};\n\n use k8s_openapi::api::core::v1 as corev1;\n\n let ep = EvictParams::default();\n\n let url = corev1::Pod::url_path(&(), Some(\"ns\"));\n\n let req = Request::new(url).evict(\"foo\", &ep).unwrap();\n\n assert_eq!(req.uri(), \"/api/v1/namespaces/ns/pods/foo/eviction?\");\n\n}\n\n\n", "file_path": "kube/src/api/subresource.rs", "rank": 71, "score": 63142.060998967194 }, { "content": "#[cfg(feature = \"ws\")]\n\n#[test]\n\nfn attach_path() {\n\n use crate::api::{Request, Resource};\n\n use k8s_openapi::api::core::v1 as corev1;\n\n let ap = AttachParams {\n\n container: Some(\"blah\".into()),\n\n ..AttachParams::default()\n\n };\n\n let url = corev1::Pod::url_path(&(), Some(\"ns\"));\n\n let req = Request::new(url).attach(\"foo\", &ap).unwrap();\n\n assert_eq!(\n\n req.uri(),\n\n \"/api/v1/namespaces/ns/pods/foo/attach?&stdout=true&stderr=true&container=blah\"\n\n );\n\n}\n\n\n\n/// Marker trait for objects that has attach\n", "file_path": "kube/src/api/subresource.rs", "rank": 72, "score": 63142.060998967194 }, { "content": "#[cfg(feature = \"ws\")]\n\n#[test]\n\nfn exec_path() {\n\n use crate::api::{Request, Resource};\n\n use k8s_openapi::api::core::v1 as corev1;\n\n let ap = AttachParams {\n\n container: Some(\"blah\".into()),\n\n ..AttachParams::default()\n\n };\n\n let url = corev1::Pod::url_path(&(), Some(\"ns\"));\n\n let req = Request::new(url)\n\n .exec(\"foo\", vec![\"echo\", \"foo\", \"bar\"], &ap)\n\n .unwrap();\n\n assert_eq!(\n\n req.uri(),\n\n \"/api/v1/namespaces/ns/pods/foo/exec?&stdout=true&stderr=true&container=blah&command=echo&command=foo&command=bar\"\n\n );\n\n}\n\n\n\n/// Marker trait for objects that has exec\n", "file_path": "kube/src/api/subresource.rs", "rank": 73, "score": 63142.060998967194 }, { "content": "fn main() {}\n", "file_path": "kube-derive/tests/ui/missing_required.rs", "rank": 74, "score": 62107.801745760684 }, { "content": "fn main() {}\n", "file_path": "kube-derive/tests/ui/fail_with_suggestion.rs", "rank": 75, "score": 62107.801745760684 }, { "content": "fn main() {}\n", "file_path": "kube-derive/tests/ui/enum_fails.rs", "rank": 76, "score": 62107.801745760684 }, { "content": "#[test]\n\nfn test_to_plural_native() {\n\n // Extracted from `swagger.json`\n\n #[rustfmt::skip]\n\n let native_kinds = vec![\n\n (\"APIService\", \"apiservices\"),\n\n (\"Binding\", \"bindings\"),\n\n (\"CertificateSigningRequest\", \"certificatesigningrequests\"),\n\n (\"ClusterRole\", \"clusterroles\"), (\"ClusterRoleBinding\", \"clusterrolebindings\"),\n\n (\"ComponentStatus\", \"componentstatuses\"),\n\n (\"ConfigMap\", \"configmaps\"),\n\n (\"ControllerRevision\", \"controllerrevisions\"),\n\n (\"CronJob\", \"cronjobs\"),\n\n (\"CSIDriver\", \"csidrivers\"), (\"CSINode\", \"csinodes\"), (\"CSIStorageCapacity\", \"csistoragecapacities\"),\n\n (\"CustomResourceDefinition\", \"customresourcedefinitions\"),\n\n (\"DaemonSet\", \"daemonsets\"),\n\n (\"Deployment\", \"deployments\"),\n\n (\"Endpoints\", \"endpoints\"), (\"EndpointSlice\", \"endpointslices\"),\n\n (\"Event\", \"events\"),\n\n (\"FlowSchema\", \"flowschemas\"),\n\n (\"HorizontalPodAutoscaler\", \"horizontalpodautoscalers\"),\n", "file_path": "kube-core/src/discovery.rs", "rank": 77, "score": 62107.801745760684 }, { "content": "#[test]\n\nfn test_failures() {\n\n let t = trybuild::TestCases::new();\n\n t.compile_fail(\"tests/ui/*.rs\");\n\n}\n", "file_path": "kube-derive/tests/test_ui.rs", "rank": 78, "score": 62107.801745760684 }, { "content": "/// Helper methods for resources.\n\npub trait ResourceExt: Resource {\n\n /// Returns the name of the resource, panicking if it is\n\n /// missing. Use this function if you know that name is set, for example\n\n /// when resource was received from the apiserver.\n\n /// Because of `.metadata.generateName` field, in other contexts name\n\n /// may be missing.\n\n ///\n\n /// For non-panicking alternative, you can directly read `name` field\n\n /// on the `self.meta()`.\n\n fn name(&self) -> String;\n\n /// The namespace the resource is in\n\n fn namespace(&self) -> Option<String>;\n\n /// The resource version\n\n fn resource_version(&self) -> Option<String>;\n\n /// Unique ID (if you delete resource and then create a new\n\n /// resource with the same name, it will have different ID)\n\n fn uid(&self) -> Option<String>;\n\n /// Returns resource labels\n\n fn labels(&self) -> &BTreeMap<String, String>;\n\n /// Provides mutable access to the labels\n", "file_path": "kube-core/src/resource.rs", "rank": 79, "score": 62070.88989650711 }, { "content": " client-key-data: aGVsbG8K\n\n---\n\napiVersion: v1\n\nclusters:\n\n- cluster:\n\n certificate-authority-data: aGVsbG8K\n\n server: https://0.0.0.0:6443\n\n name: k3d-k3s-default\n\ncontexts:\n\n- context:\n\n cluster: k3d-k3s-default\n\n user: admin@k3d-k3s-default\n\n name: k3d-k3s-default\n\ncurrent-context: k3d-k3s-default\n\nkind: Config\n\npreferences: {}\n\nusers:\n\n- name: admin@k3d-k3s-default\n\n user:\n\n client-certificate-data: aGVsbG8K\n", "file_path": "kube/src/config/file_config.rs", "rank": 80, "score": 61350.35934052146 }, { "content": "pub trait BodyStreamExt: Body {\n\n fn into_stream(self) -> IntoStream<Self>\n\n where\n\n Self: Sized,\n\n {\n\n IntoStream::new(self)\n\n }\n\n}\n\n\n\nimpl<T> BodyStreamExt for T where T: Body {}\n", "file_path": "kube/src/client/body.rs", "rank": 81, "score": 61128.97207423503 }, { "content": "#[test]\n\nfn test_kube_host() {\n\n let expected = \"fake.io\";\n\n env::set_var(SERVICE_HOSTENV, expected);\n\n assert_eq!(kube_host().unwrap(), expected);\n\n kube_dns(); // verify kube_dns always unwraps\n\n}\n\n\n", "file_path": "kube/src/config/incluster_config.rs", "rank": 82, "score": 61125.80221671782 }, { "content": "#[test]\n\nfn test_kube_port() {\n\n let expected = \"8080\";\n\n env::set_var(SERVICE_PORTENV, expected);\n\n assert_eq!(kube_port().unwrap(), expected);\n\n}\n\n\n", "file_path": "kube/src/config/incluster_config.rs", "rank": 83, "score": 61125.80221671782 }, { "content": "#[cfg(not(feature = \"schema\"))]\n\n#[test]\n\nfn verify_bar_is_a_custom_resource() {\n\n use kube::Resource;\n\n use schemars::JsonSchema; // only for ensuring it's not implemented\n\n use static_assertions::{assert_impl_all, assert_not_impl_any};\n\n\n\n println!(\"Kind {}\", Bar::kind(&()));\n\n let bar = Bar::new(\"five\", MyBar { bars: 5 });\n\n println!(\"Spec: {:?}\", bar.spec);\n\n assert_impl_all!(Bar: kube::Resource);\n\n assert_not_impl_any!(MyBar: JsonSchema); // but no schemars schema implemented\n\n\n\n let crd = Bar::crd_with_manual_schema();\n\n for v in crd.spec.versions {\n\n assert!(v.schema.unwrap().open_api_v3_schema.is_some());\n\n }\n\n}\n", "file_path": "examples/crd_derive_no_schema.rs", "rank": 84, "score": 61125.80221671782 }, { "content": "#[test]\n\nfn test_kube_server() {\n\n let host = \"fake.io\";\n\n let port = \"8080\";\n\n env::set_var(SERVICE_HOSTENV, host);\n\n env::set_var(SERVICE_PORTENV, port);\n\n assert_eq!(kube_server().unwrap(), \"https://fake.io:8080\");\n\n}\n", "file_path": "kube/src/config/incluster_config.rs", "rank": 85, "score": 61125.80221671782 }, { "content": "fn default_value() -> String {\n\n \"default_value\".into()\n\n}\n\n\n", "file_path": "examples/crd_derive_schema.rs", "rank": 86, "score": 60233.908995853606 }, { "content": "#[test]\n\nfn test_crd_name() {\n\n use kube::core::CustomResourceExt;\n\n assert_eq!(\"foos.clux.dev\", Foo::crd_name());\n\n}\n\n\n", "file_path": "kube-derive/tests/crd_schema_test.rs", "rank": 87, "score": 60192.199095914824 }, { "content": "fn default_apiext() -> String {\n\n \"v1\".to_owned()\n\n}\n\n\n\npub(crate) fn derive(input: proc_macro2::TokenStream) -> proc_macro2::TokenStream {\n\n let derive_input: DeriveInput = match syn::parse2(input) {\n\n Err(err) => return err.to_compile_error(),\n\n Ok(di) => di,\n\n };\n\n // Limit derive to structs\n\n match derive_input.data {\n\n Data::Struct(_) => {}\n\n _ => {\n\n return syn::Error::new_spanned(\n\n &derive_input.ident,\n\n r#\"Enums or Unions can not #[derive(CustomResource)]\"#,\n\n )\n\n .to_compile_error()\n\n }\n\n }\n", "file_path": "kube-derive/src/custom_resource.rs", "rank": 88, "score": 59251.90946681074 }, { "content": "#[test]\n\nfn test_crd_schema_matches_expected() {\n\n use kube::core::CustomResourceExt;\n\n assert_eq!(\n\n Foo::crd(),\n\n serde_json::from_value(serde_json::json!({\n\n \"apiVersion\": \"apiextensions.k8s.io/v1\",\n\n \"kind\": \"CustomResourceDefinition\",\n\n \"metadata\": {\n\n \"name\": \"foos.clux.dev\"\n\n },\n\n \"spec\": {\n\n \"group\": \"clux.dev\",\n\n \"names\": {\n\n \"categories\": [\"clux\"],\n\n \"kind\": \"Foo\",\n\n \"plural\": \"foos\",\n\n \"shortNames\": [],\n\n \"singular\": \"foo\"\n\n },\n\n \"scope\": \"Namespaced\",\n", "file_path": "kube-derive/tests/crd_schema_test.rs", "rank": 89, "score": 58456.54336649612 }, { "content": "fn default_value() -> String {\n\n \"default_value\".into()\n\n}\n\n\n", "file_path": "kube-derive/tests/crd_schema_test.rs", "rank": 90, "score": 58318.30634600775 }, { "content": "fn default_nullable() -> Option<String> {\n\n Some(\"default_nullable\".into())\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> Result<()> {\n\n // Show the generated CRD\n\n println!(\"Foo CRD:\\n{}\\n\", serde_yaml::to_string(&Foo::crd())?);\n\n\n\n // Creating CRD v1 works as expected.\n\n println!(\"Creating CRD v1\");\n\n let client = Client::try_default().await?;\n\n delete_crd(client.clone()).await?;\n\n assert!(create_crd(client.clone()).await.is_ok());\n\n\n\n // Test creating Foo resource.\n\n let namespace = std::env::var(\"NAMESPACE\").unwrap_or_else(|_| \"default\".into());\n\n let foos = Api::<Foo>::namespaced(client.clone(), &namespace);\n\n // Create with defaults using typed Api first.\n\n // `non_nullable` and `non_nullable_with_default` are set to empty strings.\n", "file_path": "examples/crd_derive_schema.rs", "rank": 91, "score": 57657.67421542457 }, { "content": "fn kube_port() -> Option<String> {\n\n env::var(SERVICE_PORTENV).ok()\n\n}\n\n\n", "file_path": "kube/src/config/incluster_config.rs", "rank": 92, "score": 56724.07109462158 }, { "content": "fn kube_host() -> Option<String> {\n\n env::var(SERVICE_HOSTENV).ok()\n\n}\n\n\n", "file_path": "kube/src/config/incluster_config.rs", "rank": 93, "score": 56724.07109462158 }, { "content": "fn default_nullable() -> Option<String> {\n\n Some(\"default_nullable\".into())\n\n}\n\n\n", "file_path": "kube-derive/tests/crd_schema_test.rs", "rank": 94, "score": 55835.372704051275 }, { "content": "fn spawn_periodic_reader(reader: Store<Secret>) {\n\n tokio::spawn(async move {\n\n loop {\n\n // Periodically read our state\n\n let cms: Vec<_> = reader\n\n .state()\n\n .iter()\n\n .map(|s| format!(\"{}: {:?}\", s.name(), decode(s).keys()))\n\n .collect();\n\n info!(\"Current secrets: {:?}\", cms);\n\n tokio::time::sleep(std::time::Duration::from_secs(15)).await;\n\n }\n\n });\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> anyhow::Result<()> {\n\n std::env::set_var(\"RUST_LOG\", \"info,kube=debug\");\n\n env_logger::init();\n\n let client = Client::try_default().await?;\n", "file_path": "examples/secret_reflector.rs", "rank": 95, "score": 55351.23678802385 }, { "content": "fn pod_unready(p: &Pod) -> Option<String> {\n\n let status = p.status.as_ref().unwrap();\n\n if let Some(conds) = &status.conditions {\n\n let failed = conds\n\n .into_iter()\n\n .filter(|c| c.type_ == \"Ready\" && c.status == \"False\")\n\n .map(|c| c.message.clone().unwrap_or_default())\n\n .collect::<Vec<_>>()\n\n .join(\",\");\n\n if !failed.is_empty() {\n\n if p.metadata.labels.as_ref().unwrap().contains_key(\"job-name\") {\n\n return None; // ignore job based pods, they are meant to exit 0\n\n }\n\n return Some(format!(\"Unready pod {}: {}\", p.name(), failed));\n\n }\n\n }\n\n None\n\n}\n", "file_path": "examples/pod_watcher.rs", "rank": 96, "score": 55090.28024817577 }, { "content": "fn object_to_owner_reference<K: Resource<DynamicType = ()>>(\n\n meta: ObjectMeta,\n\n) -> Result<OwnerReference, Error> {\n\n Ok(OwnerReference {\n\n api_version: K::api_version(&()).to_string(),\n\n kind: K::kind(&()).to_string(),\n\n name: meta.name.context(MissingObjectKey {\n\n name: \".metadata.name\",\n\n })?,\n\n uid: meta.uid.context(MissingObjectKey {\n\n name: \".metadata.uid\",\n\n })?,\n\n ..OwnerReference::default()\n\n })\n\n}\n\n\n\n/// Controller triggers this whenever our main object or our children changed\n\nasync fn reconcile(generator: ConfigMapGenerator, ctx: Context<Data>) -> Result<ReconcilerAction, Error> {\n\n log::info!(\"working hard\");\n\n tokio::time::sleep(Duration::from_secs(2)).await;\n", "file_path": "examples/configmapgen_controller.rs", "rank": 97, "score": 54462.53839745355 } ]
Rust
rust/agents/kathy/src/kathy.rs
aaronwinter/optics-monorepo
82b74a232b87064456bda20ba5f1171bd2b6fe70
use std::time::Duration; use async_trait::async_trait; use color_eyre::eyre::bail; use ethers::core::types::H256; use optics_core::traits::Replica; use tokio::{task::JoinHandle, time::sleep}; use rand::distributions::Alphanumeric; use rand::{thread_rng, Rng}; use color_eyre::Result; use optics_base::{ agent::{AgentCore, OpticsAgent}, decl_agent, }; use optics_core::{traits::Home, Message}; use tracing::instrument::Instrumented; use tracing::{info, Instrument}; use crate::settings::KathySettings as Settings; decl_agent!(Kathy { duration: u64, generator: ChatGenerator, }); impl Kathy { pub fn new(duration: u64, generator: ChatGenerator, core: AgentCore) -> Self { Self { duration, generator, core, } } } #[async_trait] impl OpticsAgent for Kathy { const AGENT_NAME: &'static str = "kathy"; type Settings = Settings; async fn from_settings(settings: Settings) -> Result<Self> { Ok(Self::new( settings.interval.parse().expect("invalid u64"), settings.chat.into(), settings.base.try_into_core(Self::AGENT_NAME).await?, )) } #[tracing::instrument] fn run(&self, name: &str) -> Instrumented<JoinHandle<Result<()>>> { let replica_opt = self.replica_by_name(name); let name = name.to_owned(); let home = self.home(); let mut generator = self.generator.clone(); let duration = Duration::from_secs(self.duration); tokio::spawn(async move { if replica_opt.is_none() { bail!("No replica named {}", name); } let replica = replica_opt.unwrap(); let destination = replica.local_domain(); loop { sleep(duration).await; let msg = generator.gen_chat(); let recipient = generator.gen_recipient(); match msg { Some(body) => { let message = Message { destination, recipient, body, }; info!( target: "outgoing_messages", "Enqueuing message of length {} to {}::{}", length = message.body.len(), destination = message.destination, recipient = message.recipient ); home.dispatch(&message).await?; } _ => { info!("Reached the end of the static message queue. Shutting down."); return Ok(()); } } } }) .in_current_span() } } #[derive(Debug, Clone)] pub enum ChatGenerator { Static { recipient: H256, message: String, }, OrderedList { messages: Vec<String>, counter: usize, }, Random { length: usize, }, Default, } impl Default for ChatGenerator { fn default() -> Self { Self::Default } } impl ChatGenerator { fn rand_string(length: usize) -> String { thread_rng() .sample_iter(&Alphanumeric) .take(length) .map(char::from) .collect() } pub fn gen_recipient(&mut self) -> H256 { match self { ChatGenerator::Default => Default::default(), ChatGenerator::Static { recipient, message: _, } => *recipient, ChatGenerator::OrderedList { messages: _, counter: _, } => Default::default(), ChatGenerator::Random { length: _ } => H256::random(), } } pub fn gen_chat(&mut self) -> Option<Vec<u8>> { match self { ChatGenerator::Default => Some(Default::default()), ChatGenerator::Static { recipient: _, message, } => Some(message.as_bytes().to_vec()), ChatGenerator::OrderedList { messages, counter } => { if *counter >= messages.len() { return None; } let msg = messages[*counter].clone().into(); *counter += 1; Some(msg) } ChatGenerator::Random { length } => Some(Self::rand_string(*length).into()), } } }
use std::time::Duration; use async_trait::async_trait; use color_eyre::eyre::bail; use ethers::core::types::H256; use optics_core::traits::Replica; use tokio::{task::JoinHandle, time::sleep}; use rand::distributions::Alphanumeric; use rand::{thread_rng, Rng}; use color_eyre::Result; use optics_base::{ agent::{AgentCore, OpticsAgent}, decl_agent, }; use optics_core::{traits::Home, Message}; use tracing::instrument::Instrumented; use tracing::{info, Instrument}; use crate::settings::KathySettings as Settings; decl_agent!(Kathy { duration: u64, generator: ChatGenerator, }); impl Kathy { pub fn new(duration: u64, generator: ChatGenerator, core: AgentCore) -> Self { Self { duration, generator, core, } } } #[async_trait] impl OpticsAgent for Kathy { const AGENT_NAME: &'static str = "kathy"; type Settings = Settings; async fn from_settings(settings: Settings) -> Result<Self> { Ok(Self::new(
#[tracing::instrument] fn run(&self, name: &str) -> Instrumented<JoinHandle<Result<()>>> { let replica_opt = self.replica_by_name(name); let name = name.to_owned(); let home = self.home(); let mut generator = self.generator.clone(); let duration = Duration::from_secs(self.duration); tokio::spawn(async move { if replica_opt.is_none() { bail!("No replica named {}", name); } let replica = replica_opt.unwrap(); let destination = replica.local_domain(); loop { sleep(duration).await; let msg = generator.gen_chat(); let recipient = generator.gen_recipient(); match msg { Some(body) => { let message = Message { destination, recipient, body, }; info!( target: "outgoing_messages", "Enqueuing message of length {} to {}::{}", length = message.body.len(), destination = message.destination, recipient = message.recipient ); home.dispatch(&message).await?; } _ => { info!("Reached the end of the static message queue. Shutting down."); return Ok(()); } } } }) .in_current_span() } } #[derive(Debug, Clone)] pub enum ChatGenerator { Static { recipient: H256, message: String, }, OrderedList { messages: Vec<String>, counter: usize, }, Random { length: usize, }, Default, } impl Default for ChatGenerator { fn default() -> Self { Self::Default } } impl ChatGenerator { fn rand_string(length: usize) -> String { thread_rng() .sample_iter(&Alphanumeric) .take(length) .map(char::from) .collect() } pub fn gen_recipient(&mut self) -> H256 { match self { ChatGenerator::Default => Default::default(), ChatGenerator::Static { recipient, message: _, } => *recipient, ChatGenerator::OrderedList { messages: _, counter: _, } => Default::default(), ChatGenerator::Random { length: _ } => H256::random(), } } pub fn gen_chat(&mut self) -> Option<Vec<u8>> { match self { ChatGenerator::Default => Some(Default::default()), ChatGenerator::Static { recipient: _, message, } => Some(message.as_bytes().to_vec()), ChatGenerator::OrderedList { messages, counter } => { if *counter >= messages.len() { return None; } let msg = messages[*counter].clone().into(); *counter += 1; Some(msg) } ChatGenerator::Random { length } => Some(Self::rand_string(*length).into()), } } }
settings.interval.parse().expect("invalid u64"), settings.chat.into(), settings.base.try_into_core(Self::AGENT_NAME).await?, )) }
function_block-function_prefix_line
[ { "content": "/// Strips the '0x' prefix off of hex string so it can be deserialized.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `s` - The hex str\n\npub fn strip_0x_prefix(s: &str) -> &str {\n\n if s.len() < 2 || &s[..2] != \"0x\" {\n\n s\n\n } else {\n\n &s[2..]\n\n }\n\n}\n\n\n", "file_path": "rust/optics-core/src/utils.rs", "rank": 0, "score": 239863.5825261527 }, { "content": "/// Find a vector file assuming that a git checkout exists\n\n// TODO: look instead for the workspace `Cargo.toml`? use a cargo env var?\n\npub fn find_vector(final_component: &str) -> PathBuf {\n\n let cwd = std::env::current_dir().expect(\"no cwd?\");\n\n let git_dir = cwd\n\n .ancestors() // . ; ../ ; ../../ ; ...\n\n .find(|d| d.join(\".git\").is_dir())\n\n .expect(\"could not find .git somewhere! confused about workspace layout\");\n\n\n\n git_dir.join(\"vectors\").join(final_component)\n\n}\n\n\n", "file_path": "rust/optics-core/src/test_utils.rs", "rank": 1, "score": 205552.87071762732 }, { "content": "/// Destination and destination-specific nonce combined in single field (\n\n/// (destination << 32) & nonce)\n\npub fn destination_and_nonce(destination: u32, nonce: u32) -> u64 {\n\n assert!(destination < u32::MAX);\n\n assert!(nonce < u32::MAX);\n\n ((destination as u64) << 32) | nonce as u64\n\n}\n\n\n\n/// A Hex String of length `N` representing bytes of length `N / 2`\n\n#[derive(Debug, Clone)]\n\npub struct HexString<const N: usize>(String);\n\n\n\nimpl<const N: usize> AsRef<String> for HexString<N> {\n\n fn as_ref(&self) -> &String {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl<const N: usize> HexString<N> {\n\n /// Instantiate a new HexString from any `AsRef<str>`. Tolerates 0x\n\n /// prefixing. A succesful instantiation will create an owned copy of the\n\n /// string.\n", "file_path": "rust/optics-core/src/utils.rs", "rank": 2, "score": 199420.089320561 }, { "content": "/// Verify a proof that `leaf` exists at `index` in a Merkle tree rooted at `root`.\n\n///\n\n/// The `branch` argument is the main component of the proof: it should be a list of internal\n\n/// node hashes such that the root can be reconstructed (in bottom-up order).\n\npub fn verify_merkle_proof(\n\n leaf: H256,\n\n branch: &[H256],\n\n depth: usize,\n\n index: usize,\n\n root: H256,\n\n) -> bool {\n\n if branch.len() == depth {\n\n merkle_root_from_branch(leaf, branch, depth, index) == root\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "rust/optics-core/src/accumulator/merkle.rs", "rank": 3, "score": 176406.59181229345 }, { "content": "fn format_message(\n\n origin: u32,\n\n sender: H256,\n\n destination: u32,\n\n recipient: H256,\n\n body: &[u8],\n\n) -> Vec<u8> {\n\n let mut buf = vec![];\n\n buf.write_all(&origin.to_be_bytes()).unwrap();\n\n buf.write_all(sender.as_ref()).unwrap();\n\n buf.write_all(&destination.to_be_bytes()).unwrap();\n\n buf.write_all(recipient.as_ref()).unwrap();\n\n buf.write_all(body).unwrap();\n\n buf\n\n}\n\n\n\n/// The Home-chain Optics object\n\n#[derive(Debug, Clone)]\n\npub struct Home<S> {\n\n local: u32,\n", "file_path": "rust/optics-core/src/models/home.rs", "rank": 4, "score": 162172.10365511506 }, { "content": "/// Computes hash of home domain concatenated with \"OPTICS\"\n\npub fn home_domain_hash(home_domain: u32) -> H256 {\n\n H256::from_slice(\n\n Keccak256::new()\n\n .chain(home_domain.to_be_bytes())\n\n .chain(\"OPTICS\".as_bytes())\n\n .finalize()\n\n .as_slice(),\n\n )\n\n}\n\n\n", "file_path": "rust/optics-core/src/utils.rs", "rank": 5, "score": 151723.13392799016 }, { "content": "/// Reads merkle test case json file and returns a vector of `MerkleTestCase`s\n\npub fn load_merkle_test_json() -> Vec<MerkleTestCase> {\n\n let mut file = File::open(find_vector(\"merkle.json\")).unwrap();\n\n let mut data = String::new();\n\n file.read_to_string(&mut data).unwrap();\n\n serde_json::from_str(&data).unwrap()\n\n}\n", "file_path": "rust/optics-core/src/test_utils.rs", "rank": 6, "score": 151573.34724302555 }, { "content": " }\n\n}\n\n\n\nimpl OpticsMessage {\n\n /// Convert the message to a leaf\n\n pub fn to_leaf(&self) -> H256 {\n\n keccak256(self.to_vec()).into()\n\n }\n\n\n\n /// Get the encoded destination + nonce\n\n pub fn destination_and_nonce(&self) -> u64 {\n\n utils::destination_and_nonce(self.destination, self.nonce)\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for OpticsMessage {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"OpticsMessage {}->{}:{}\",\n\n self.origin, self.destination, self.nonce,\n\n )\n\n }\n\n}\n", "file_path": "rust/optics-core/src/types/messages.rs", "rank": 7, "score": 137666.08821224998 }, { "content": "use ethers::{types::H256, utils::keccak256};\n\n\n\nuse crate::{utils, Decode, Encode, OpticsError};\n\n\n\nconst OPTICS_MESSAGE_PREFIX_LEN: usize = 76;\n\n\n\n/// A full Optics message between chains\n\n#[derive(Debug, Default, Clone)]\n\npub struct OpticsMessage {\n\n /// 4 SLIP-44 ID\n\n pub origin: u32,\n\n /// 32 Address in home convention\n\n pub sender: H256,\n\n /// 4 Count of all previous messages to destination\n\n pub nonce: u32,\n\n /// 4 SLIP-44 ID\n\n pub destination: u32,\n\n /// 32 Address in destination convention\n\n pub recipient: H256,\n\n /// 0+ Message contents\n", "file_path": "rust/optics-core/src/types/messages.rs", "rank": 8, "score": 137664.41737850773 }, { "content": " pub body: Vec<u8>,\n\n}\n\n\n\n/// A partial Optics message between chains\n\n#[derive(Debug, Default, Clone)]\n\npub struct Message {\n\n /// 4 SLIP-44 ID\n\n pub destination: u32,\n\n /// 32 Address in destination convention\n\n pub recipient: H256,\n\n /// 0+ Message contents\n\n pub body: Vec<u8>,\n\n}\n\n\n\nimpl Encode for OpticsMessage {\n\n fn write_to<W>(&self, writer: &mut W) -> std::io::Result<usize>\n\n where\n\n W: std::io::Write,\n\n {\n\n writer.write_all(&self.origin.to_be_bytes())?;\n", "file_path": "rust/optics-core/src/types/messages.rs", "rank": 9, "score": 137663.30873206918 }, { "content": " writer.write_all(self.sender.as_ref())?;\n\n writer.write_all(&self.nonce.to_be_bytes())?;\n\n writer.write_all(&self.destination.to_be_bytes())?;\n\n writer.write_all(self.recipient.as_ref())?;\n\n writer.write_all(&self.body)?;\n\n Ok(OPTICS_MESSAGE_PREFIX_LEN + self.body.len())\n\n }\n\n}\n\n\n\nimpl Decode for OpticsMessage {\n\n fn read_from<R>(reader: &mut R) -> Result<Self, OpticsError>\n\n where\n\n R: std::io::Read,\n\n {\n\n let mut origin = [0u8; 4];\n\n reader.read_exact(&mut origin)?;\n\n\n\n let mut sender = H256::zero();\n\n reader.read_exact(sender.as_mut())?;\n\n\n", "file_path": "rust/optics-core/src/types/messages.rs", "rank": 10, "score": 137659.41071787954 }, { "content": " let mut nonce = [0u8; 4];\n\n reader.read_exact(&mut nonce)?;\n\n\n\n let mut destination = [0u8; 4];\n\n reader.read_exact(&mut destination)?;\n\n\n\n let mut recipient = H256::zero();\n\n reader.read_exact(recipient.as_mut())?;\n\n\n\n let mut body = vec![];\n\n reader.read_to_end(&mut body)?;\n\n\n\n Ok(Self {\n\n origin: u32::from_be_bytes(origin),\n\n sender,\n\n destination: u32::from_be_bytes(destination),\n\n recipient,\n\n nonce: u32::from_be_bytes(nonce),\n\n body,\n\n })\n", "file_path": "rust/optics-core/src/types/messages.rs", "rank": 11, "score": 137654.63525521915 }, { "content": "/// Compute a root hash from a leaf and a Merkle proof.\n\npub fn merkle_root_from_branch(leaf: H256, branch: &[H256], depth: usize, index: usize) -> H256 {\n\n assert_eq!(branch.len(), depth, \"proof length should equal depth\");\n\n\n\n let mut current = leaf;\n\n\n\n for (i, next) in branch.iter().enumerate().take(depth) {\n\n let ith_bit = (index >> i) & 0x01;\n\n if ith_bit == 1 {\n\n current = hash_concat(next, current);\n\n } else {\n\n current = hash_concat(current, next);\n\n }\n\n }\n\n\n\n current\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::accumulator::incremental;\n", "file_path": "rust/optics-core/src/accumulator/merkle.rs", "rank": 12, "score": 126150.70238889955 }, { "content": "fn main() -> Result<()> {\n\n tokio::runtime::Builder::new_current_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap()\n\n .block_on(_main())\n\n}\n", "file_path": "rust/agents/kathy/src/main.rs", "rank": 13, "score": 116732.51880512998 }, { "content": "pub fn setup_db(db_path: String) -> DB {\n\n let mut opts = Options::default();\n\n opts.create_if_missing(true);\n\n rocksdb::DB::open(&opts, db_path)\n\n .expect(\"Failed to open db path\")\n\n .into()\n\n}\n\n\n\npub async fn run_test_db<T, Fut>(test: T)\n\nwhere\n\n T: FnOnce(DB) -> Fut + panic::UnwindSafe,\n\n Fut: Future<Output = ()>,\n\n{\n\n // RocksDB only allows one unique db handle to be open at a time. Because\n\n // `cargo test` is multithreaded by default, we use random db pathnames to\n\n // avoid collisions between 2+ threads\n\n let rand_path: String = thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(8)\n\n .map(char::from)\n", "file_path": "rust/optics-test/src/test_utils.rs", "rank": 14, "score": 113526.3236025116 }, { "content": "/// Simple trait for types with a canonical encoding\n\npub trait Encode {\n\n /// Write the canonical encoding to the writer\n\n fn write_to<W>(&self, writer: &mut W) -> std::io::Result<usize>\n\n where\n\n W: std::io::Write;\n\n\n\n /// Serialize to a vec\n\n fn to_vec(&self) -> Vec<u8> {\n\n let mut buf = vec![];\n\n self.write_to(&mut buf).expect(\"!alloc\");\n\n buf\n\n }\n\n}\n\n\n", "file_path": "rust/optics-core/src/traits/encode.rs", "rank": 15, "score": 112019.63519050986 }, { "content": "/// Simple trait for types with a canonical encoding\n\npub trait Decode {\n\n /// Try to read from some source\n\n fn read_from<R>(reader: &mut R) -> Result<Self, OpticsError>\n\n where\n\n R: std::io::Read,\n\n Self: Sized;\n\n}\n\n\n\nimpl Encode for Signature {\n\n fn write_to<W>(&self, writer: &mut W) -> std::io::Result<usize>\n\n where\n\n W: std::io::Write,\n\n {\n\n writer.write_all(&self.to_vec())?;\n\n Ok(65)\n\n }\n\n}\n\n\n\nimpl Decode for Signature {\n\n fn read_from<R>(reader: &mut R) -> Result<Self, OpticsError>\n", "file_path": "rust/optics-core/src/traits/encode.rs", "rank": 16, "score": 112019.63519050986 }, { "content": "//! Configuration\n\n\n\nuse ethers::core::types::H256;\n\n\n\nuse crate::kathy::ChatGenerator;\n\n\n\nuse optics_base::decl_settings;\n\n\n\n#[derive(Debug, serde::Deserialize)]\n\n#[serde(tag = \"type\", rename_all = \"camelCase\")]\n\npub enum ChatGenConfig {\n\n Static {\n\n recipient: H256,\n\n message: String,\n\n },\n\n OrderedList {\n\n messages: Vec<String>,\n\n },\n\n Random {\n\n length: usize,\n", "file_path": "rust/agents/kathy/src/settings.rs", "rank": 17, "score": 95982.0240643169 }, { "content": " },\n\n #[serde(other)]\n\n Default,\n\n}\n\n\n\nimpl Default for ChatGenConfig {\n\n fn default() -> Self {\n\n Self::Default\n\n }\n\n}\n\n\n\nimpl From<ChatGenConfig> for ChatGenerator {\n\n fn from(conf: ChatGenConfig) -> ChatGenerator {\n\n match conf {\n\n ChatGenConfig::Static { recipient, message } => {\n\n ChatGenerator::Static { recipient, message }\n\n }\n\n ChatGenConfig::OrderedList { messages } => ChatGenerator::OrderedList {\n\n messages,\n\n counter: 0,\n", "file_path": "rust/agents/kathy/src/settings.rs", "rank": 18, "score": 95976.50025846976 }, { "content": " },\n\n ChatGenConfig::Random { length } => ChatGenerator::Random { length },\n\n ChatGenConfig::Default => ChatGenerator::Default,\n\n }\n\n }\n\n}\n\n\n\ndecl_settings!(Kathy {\n\n /// The message interval (in seconds)\n\n interval: String,\n\n /// Chat generation configuration\n\n #[serde(default)]\n\n chat: ChatGenConfig,\n\n});\n", "file_path": "rust/agents/kathy/src/settings.rs", "rank": 19, "score": 95970.276622811 }, { "content": "#[async_trait]\n\npub trait Common: Sync + Send + std::fmt::Debug {\n\n /// Return an identifier (not necessarily unique) for the chain this\n\n /// contract is running on.\n\n fn name(&self) -> &str;\n\n\n\n /// Get the status of a transaction.\n\n async fn status(&self, txid: H256) -> Result<Option<TxOutcome>, ChainCommunicationError>;\n\n\n\n /// Fetch the current updater value\n\n async fn updater(&self) -> Result<H256, ChainCommunicationError>;\n\n\n\n /// Fetch the current state.\n\n async fn state(&self) -> Result<State, ChainCommunicationError>;\n\n\n\n /// Fetch the current root.\n\n async fn committed_root(&self) -> Result<H256, ChainCommunicationError>;\n\n\n\n /// Fetch the first signed update building off of `old_root`. If `old_root`\n\n /// was never accepted or has never been updated, this will return `Ok(None )`.\n\n /// This should fetch events from the chain API\n", "file_path": "rust/optics-core/src/traits/mod.rs", "rank": 20, "score": 91922.45138827912 }, { "content": "/// Watcher Failure Notifications\n\npub mod failure;\n\n/// Optics messages\n\npub mod messages;\n\n\n\n/// Updates\n\npub mod update;\n\n\n\n/// Unified 32-byte identifier with convenience tooling for handling\n\n/// 20-byte ids (e.g ethereum addresses)\n\npub mod identifiers;\n\n\n\npub use failure::*;\n\npub use messages::*;\n\npub use update::*;\n", "file_path": "rust/optics-core/src/types/mod.rs", "rank": 21, "score": 90974.99130561866 }, { "content": "use crate::{utils::home_domain_hash, OpticsError, OpticsIdentifier, SignerExt};\n\nuse ethers::{\n\n prelude::{Address, Signature},\n\n types::H256,\n\n utils::hash_message,\n\n};\n\nuse ethers_signers::Signer;\n\nuse sha3::{Digest, Keccak256};\n\n\n\n/// Failure notification produced by watcher\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub struct FailureNotification {\n\n /// Domain of failed home\n\n pub home_domain: u32,\n\n /// Failed home's updater\n\n pub updater: OpticsIdentifier,\n\n}\n\n\n\nimpl FailureNotification {\n\n fn signing_hash(&self) -> H256 {\n", "file_path": "rust/optics-core/src/types/failure.rs", "rank": 22, "score": 90972.84286737742 }, { "content": "use crate::{utils::home_domain_hash, Decode, Encode, OpticsError, SignerExt};\n\nuse ethers::{\n\n prelude::{Address, Signature},\n\n types::H256,\n\n utils::hash_message,\n\n};\n\nuse ethers_signers::Signer;\n\nuse serde::{Deserialize, Serialize};\n\nuse sha3::{Digest, Keccak256};\n\n\n\n/// An Optics update message\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct Update {\n\n /// The home chain\n\n pub home_domain: u32,\n\n /// The previous root\n\n pub previous_root: H256,\n\n /// The new root\n\n pub new_root: H256,\n\n}\n", "file_path": "rust/optics-core/src/types/update.rs", "rank": 23, "score": 90972.48024711736 }, { "content": " fn prepended_hash(&self) -> H256 {\n\n hash_message(self.signing_hash())\n\n }\n\n\n\n /// Sign an update using the specified signer\n\n pub async fn sign_with<S: Signer>(self, signer: &S) -> Result<SignedUpdate, S::Error> {\n\n let signature = signer\n\n .sign_message_without_eip_155(self.signing_hash())\n\n .await?;\n\n Ok(SignedUpdate {\n\n update: self,\n\n signature,\n\n })\n\n }\n\n}\n\n\n\n/// A Signed Optics Update\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct SignedUpdate {\n\n /// The update\n", "file_path": "rust/optics-core/src/types/update.rs", "rank": 24, "score": 90971.91419884561 }, { "content": " R: std::io::Read,\n\n Self: Sized,\n\n {\n\n let update = Update::read_from(reader)?;\n\n let signature = Signature::read_from(reader)?;\n\n Ok(Self { update, signature })\n\n }\n\n}\n\n\n\nimpl SignedUpdate {\n\n /// Recover the Ethereum address of the signer\n\n pub fn recover(&self) -> Result<Address, OpticsError> {\n\n Ok(self.signature.recover(self.update.prepended_hash())?)\n\n }\n\n\n\n /// Check whether a message was signed by a specific address\n\n pub fn verify(&self, signer: Address) -> Result<(), OpticsError> {\n\n Ok(self\n\n .signature\n\n .verify(self.update.prepended_hash(), signer)?)\n\n }\n\n}\n", "file_path": "rust/optics-core/src/types/update.rs", "rank": 25, "score": 90971.63510377283 }, { "content": " H256::from_slice(\n\n Keccak256::new()\n\n .chain(home_domain_hash(self.home_domain))\n\n .chain(self.home_domain.to_be_bytes())\n\n .chain(self.updater.as_ref())\n\n .finalize()\n\n .as_slice(),\n\n )\n\n }\n\n\n\n fn prepended_hash(&self) -> H256 {\n\n hash_message(self.signing_hash())\n\n }\n\n\n\n /// Sign an `FailureNotification` using the specified signer\n\n pub async fn sign_with<S>(self, signer: &S) -> Result<SignedFailureNotification, S::Error>\n\n where\n\n S: Signer,\n\n {\n\n let signature = signer\n", "file_path": "rust/optics-core/src/types/failure.rs", "rank": 26, "score": 90970.29144573437 }, { "content": " pub update: Update,\n\n /// The signature\n\n pub signature: Signature,\n\n}\n\n\n\nimpl Encode for SignedUpdate {\n\n fn write_to<W>(&self, writer: &mut W) -> std::io::Result<usize>\n\n where\n\n W: std::io::Write,\n\n {\n\n let mut written = 0;\n\n written += self.update.write_to(writer)?;\n\n written += self.signature.write_to(writer)?;\n\n Ok(written)\n\n }\n\n}\n\n\n\nimpl Decode for SignedUpdate {\n\n fn read_from<R>(reader: &mut R) -> Result<Self, OpticsError>\n\n where\n", "file_path": "rust/optics-core/src/types/update.rs", "rank": 27, "score": 90970.17779147273 }, { "content": "use ethers::prelude::{H160, H256};\n\n\n\nuse crate::{Decode, Encode};\n\n\n\n/// Identifier type.\n\n///\n\n/// Normally these will map to address types for different networks. For Optics,\n\n/// we choose to _always_ serialize as 32 bytes\n\n#[derive(Debug, Default, Copy, Clone, PartialEq, serde::Serialize, serde::Deserialize)]\n\npub struct OpticsIdentifier(H256);\n\n\n\nimpl OpticsIdentifier {\n\n /// Check if the identifier is an ethereum address. This checks\n\n /// that the first 12 bytes are all 0.\n\n pub fn is_ethereum_address(&self) -> bool {\n\n self.0.as_bytes()[0..12].iter().all(|b| *b == 0)\n\n }\n\n\n\n /// Cast to an ethereum address by truncating.\n\n pub fn as_ethereum_address(&self) -> H160 {\n", "file_path": "rust/optics-core/src/types/identifiers.rs", "rank": 28, "score": 90969.96829047649 }, { "content": " .sign_message_without_eip_155(self.signing_hash())\n\n .await?;\n\n Ok(SignedFailureNotification {\n\n notification: self,\n\n signature,\n\n })\n\n }\n\n}\n\n\n\n/// Signed failure notification produced by watcher\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub struct SignedFailureNotification {\n\n /// Failure notification\n\n pub notification: FailureNotification,\n\n /// Signature\n\n pub signature: Signature,\n\n}\n\n\n\nimpl SignedFailureNotification {\n\n /// Recover the Ethereum address of the signer\n", "file_path": "rust/optics-core/src/types/failure.rs", "rank": 29, "score": 90969.91777083016 }, { "content": " self.0.as_ref()\n\n }\n\n}\n\n\n\nimpl AsMut<[u8]> for OpticsIdentifier {\n\n fn as_mut(&mut self) -> &mut [u8] {\n\n self.0.as_mut()\n\n }\n\n}\n\n\n\nimpl From<OpticsIdentifier> for H256 {\n\n fn from(addr: OpticsIdentifier) -> Self {\n\n addr.0\n\n }\n\n}\n\n\n\nimpl From<OpticsIdentifier> for [u8; 32] {\n\n fn from(addr: OpticsIdentifier) -> Self {\n\n addr.0.into()\n\n }\n", "file_path": "rust/optics-core/src/types/identifiers.rs", "rank": 30, "score": 90969.88674085264 }, { "content": " H160::from_slice(&self.0.as_ref()[12..])\n\n }\n\n}\n\n\n\nimpl From<H256> for OpticsIdentifier {\n\n fn from(address: H256) -> Self {\n\n OpticsIdentifier(address)\n\n }\n\n}\n\n\n\nimpl From<H160> for OpticsIdentifier {\n\n fn from(address: H160) -> Self {\n\n let mut id = OpticsIdentifier::default();\n\n id.as_mut()[12..].copy_from_slice(address.as_ref());\n\n id\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for OpticsIdentifier {\n\n fn as_ref(&self) -> &[u8] {\n", "file_path": "rust/optics-core/src/types/identifiers.rs", "rank": 31, "score": 90969.21518380078 }, { "content": "}\n\n\n\nimpl Encode for OpticsIdentifier {\n\n fn write_to<W>(&self, writer: &mut W) -> std::io::Result<usize>\n\n where\n\n W: std::io::Write,\n\n {\n\n self.0.write_to(writer)\n\n }\n\n}\n\n\n\nimpl Decode for OpticsIdentifier {\n\n fn read_from<R>(reader: &mut R) -> Result<Self, crate::OpticsError>\n\n where\n\n R: std::io::Read,\n\n Self: Sized,\n\n {\n\n Ok(OpticsIdentifier(H256::read_from(reader)?))\n\n }\n\n}\n", "file_path": "rust/optics-core/src/types/identifiers.rs", "rank": 32, "score": 90968.23917203586 }, { "content": " pub fn recover(&self) -> Result<Address, OpticsError> {\n\n Ok(self.signature.recover(self.notification.prepended_hash())?)\n\n }\n\n\n\n /// Check whether a message was signed by a specific address\n\n pub fn verify(&self, signer: Address) -> Result<(), OpticsError> {\n\n Ok(self\n\n .signature\n\n .verify(self.notification.prepended_hash(), signer)?)\n\n }\n\n}\n", "file_path": "rust/optics-core/src/types/failure.rs", "rank": 33, "score": 90967.87896622355 }, { "content": " }\n\n}\n\n\n\nimpl Decode for Update {\n\n fn read_from<R>(reader: &mut R) -> Result<Self, OpticsError>\n\n where\n\n R: std::io::Read,\n\n Self: Sized,\n\n {\n\n let mut home_domain = [0u8; 4];\n\n reader.read_exact(&mut home_domain)?;\n\n\n\n let mut previous_root = H256::zero();\n\n reader.read_exact(previous_root.as_mut())?;\n\n\n\n let mut new_root = H256::zero();\n\n reader.read_exact(new_root.as_mut())?;\n\n\n\n Ok(Self {\n\n home_domain: u32::from_be_bytes(home_domain),\n", "file_path": "rust/optics-core/src/types/update.rs", "rank": 34, "score": 90967.155509806 }, { "content": "\n\nimpl std::fmt::Display for Update {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"Update(domain {} moved from {} to {})\",\n\n self.home_domain, self.previous_root, self.new_root\n\n )\n\n }\n\n}\n\n\n\nimpl Encode for Update {\n\n fn write_to<W>(&self, writer: &mut W) -> std::io::Result<usize>\n\n where\n\n W: std::io::Write,\n\n {\n\n writer.write_all(&self.home_domain.to_be_bytes())?;\n\n writer.write_all(self.previous_root.as_ref())?;\n\n writer.write_all(self.new_root.as_ref())?;\n\n Ok(4 + 32 + 32)\n", "file_path": "rust/optics-core/src/types/update.rs", "rank": 35, "score": 90964.52563586953 }, { "content": " previous_root,\n\n new_root,\n\n })\n\n }\n\n}\n\n\n\nimpl Update {\n\n fn signing_hash(&self) -> H256 {\n\n // sign:\n\n // domain(home_domain) || previous_root || new_root\n\n H256::from_slice(\n\n Keccak256::new()\n\n .chain(home_domain_hash(self.home_domain))\n\n .chain(self.previous_root)\n\n .chain(self.new_root)\n\n .finalize()\n\n .as_slice(),\n\n )\n\n }\n\n\n", "file_path": "rust/optics-core/src/types/update.rs", "rank": 36, "score": 90964.01942477417 }, { "content": "#[async_trait]\n\npub trait ConnectionManager: Send + Sync + std::fmt::Debug {\n\n /// Return the contract's local domain ID\n\n fn local_domain(&self) -> u32;\n\n\n\n /// Returns true if provided address is enrolled replica\n\n async fn is_replica(&self, address: OpticsIdentifier) -> Result<bool, ChainCommunicationError>;\n\n\n\n /// Returns permission for address at given domain\n\n async fn watcher_permission(\n\n &self,\n\n address: OpticsIdentifier,\n\n domain: u32,\n\n ) -> Result<bool, ChainCommunicationError>;\n\n\n\n /// onlyOwner function. Enrolls replica at given domain chain.\n\n async fn owner_enroll_replica(\n\n &self,\n\n replica: OpticsIdentifier,\n\n domain: u32,\n\n ) -> Result<TxOutcome, ChainCommunicationError>;\n", "file_path": "rust/optics-core/src/traits/xapp.rs", "rank": 37, "score": 90440.5835158432 }, { "content": "type Result<T> = std::result::Result<T, DbError>;\n\n\n\nimpl DB {\n\n /// Opens db at `db_path` and creates if missing\n\n #[tracing::instrument(err)]\n\n pub fn from_path(db_path: &str) -> color_eyre::Result<DB> {\n\n // Canonicalize ensures existence, so we have to do that, then extend\n\n let mut path = Path::new(\".\").canonicalize()?;\n\n path.extend(&[db_path]);\n\n\n\n match path.is_dir() {\n\n true => info!(\n\n \"Opening existing db at {path}\",\n\n path = path.to_str().unwrap()\n\n ),\n\n false => info!(\"Creating db at {path}\", path = path.to_str().unwrap()),\n\n }\n\n\n\n let mut opts = Options::default();\n\n opts.create_if_missing(true);\n", "file_path": "rust/optics-core/src/db/mod.rs", "rank": 38, "score": 90326.04909979491 }, { "content": "#[async_trait]\n\npub trait Replica: Common + Send + Sync + std::fmt::Debug {\n\n /// Return the replica domain ID\n\n fn local_domain(&self) -> u32;\n\n\n\n /// Return the domain of the replica's linked home\n\n async fn remote_domain(&self) -> Result<u32, ChainCommunicationError>;\n\n\n\n /// Dispatch a transaction to prove inclusion of some leaf in the replica.\n\n async fn prove(&self, proof: &Proof) -> Result<TxOutcome, ChainCommunicationError>;\n\n\n\n /// Trigger processing of a message\n\n async fn process(&self, message: &OpticsMessage) -> Result<TxOutcome, ChainCommunicationError>;\n\n\n\n /// Prove a leaf in the replica and then process its message\n\n async fn prove_and_process(\n\n &self,\n\n message: &OpticsMessage,\n\n proof: &Proof,\n\n ) -> Result<TxOutcome, ChainCommunicationError> {\n\n self.prove(proof).await?;\n", "file_path": "rust/optics-core/src/traits/replica.rs", "rank": 39, "score": 89313.1163834462 }, { "content": "#[async_trait]\n\npub trait Home: Common + Send + Sync + std::fmt::Debug {\n\n /// Return the domain ID\n\n fn local_domain(&self) -> u32;\n\n\n\n /// Run a task indexing the chain (if necessary)\n\n fn index(\n\n &self,\n\n from_height: u32,\n\n chunk_size: u32,\n\n indexed_height: prometheus::IntGauge,\n\n ) -> Instrumented<JoinHandle<Result<()>>>;\n\n\n\n /// Return the domain hash\n\n fn home_domain_hash(&self) -> H256 {\n\n home_domain_hash(self.local_domain())\n\n }\n\n\n\n /// Fetch the message to destination at the nonce (or error).\n\n /// This should fetch events from the chain API.\n\n ///\n", "file_path": "rust/optics-core/src/traits/home.rs", "rank": 40, "score": 89313.1163834462 }, { "content": "use crate::db::{DbError, DB};\n\nuse crate::{Decode, Encode};\n\nuse color_eyre::Result;\n\n\n\n/// DB handle for storing data tied to a specific type/entity.\n\n///\n\n/// Key structure: ```<type_prefix>_<additional_prefix(es)>_<key>```\n\n#[derive(Debug, Clone)]\n\npub struct TypedDB {\n\n db: DB,\n\n type_prefix: Vec<u8>,\n\n}\n\n\n\nimpl TypedDB {\n\n /// Instantiate new `TypedDB`\n\n pub fn new(db: DB, type_prefix: impl Into<Vec<u8>>) -> Self {\n\n Self {\n\n db,\n\n type_prefix: type_prefix.into(),\n\n }\n", "file_path": "rust/optics-core/src/db/typed_db.rs", "rank": 41, "score": 89011.26351667148 }, { "content": " }\n\n\n\n /// Return reference to raw db\n\n pub fn db(&self) -> &DB {\n\n &self.db\n\n }\n\n\n\n fn full_prefix(&self, prefix: impl AsRef<[u8]>) -> Vec<u8> {\n\n let mut full_prefix = vec![];\n\n full_prefix.extend(self.type_prefix.as_ref() as &[u8]);\n\n full_prefix.extend(prefix.as_ref());\n\n full_prefix\n\n }\n\n\n\n /// Store encodable value\n\n pub fn store_encodable<V: Encode>(\n\n &self,\n\n prefix: impl AsRef<[u8]>,\n\n key: impl AsRef<[u8]>,\n\n value: &V,\n", "file_path": "rust/optics-core/src/db/typed_db.rs", "rank": 42, "score": 89001.77406819057 }, { "content": " ) -> Result<(), DbError> {\n\n self.db\n\n .store_encodable(&self.full_prefix(prefix), key, value)\n\n }\n\n\n\n /// Retrieve decodable value\n\n pub fn retrieve_decodable<V: Decode>(\n\n &self,\n\n prefix: impl AsRef<[u8]>,\n\n key: impl AsRef<[u8]>,\n\n ) -> Result<Option<V>, DbError> {\n\n self.db.retrieve_decodable(&self.full_prefix(prefix), key)\n\n }\n\n\n\n /// Store encodable kv pair\n\n pub fn store_keyed_encodable<K: Encode, V: Encode>(\n\n &self,\n\n prefix: impl AsRef<[u8]>,\n\n key: &K,\n\n value: &V,\n", "file_path": "rust/optics-core/src/db/typed_db.rs", "rank": 43, "score": 89001.39045265756 }, { "content": " ) -> Result<(), DbError> {\n\n self.store_encodable(prefix, key.to_vec(), value)\n\n }\n\n\n\n /// Retrieve decodable value given encodable key\n\n pub fn retrieve_keyed_decodable<K: Encode, V: Decode>(\n\n &self,\n\n prefix: impl AsRef<[u8]>,\n\n key: &K,\n\n ) -> Result<Option<V>, DbError> {\n\n self.retrieve_decodable(prefix, key.to_vec())\n\n }\n\n}\n", "file_path": "rust/optics-core/src/db/typed_db.rs", "rank": 44, "score": 88999.67558742034 }, { "content": "export class TestMessage__factory extends ContractFactory {\n\n constructor(signer?: Signer) {\n\n super(_abi, _bytecode, signer);\n\n }\n\n\n\n deploy(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): Promise<TestMessage> {\n\n return super.deploy(overrides || {}) as Promise<TestMessage>;\n\n }\n\n getDeployTransaction(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): TransactionRequest {\n\n return super.getDeployTransaction(overrides || {});\n\n }\n\n attach(address: string): TestMessage {\n\n return super.attach(address) as TestMessage;\n\n }\n\n connect(signer: Signer): TestMessage__factory {\n\n return super.connect(signer) as TestMessage__factory;\n\n }\n\n static readonly bytecode = _bytecode;\n\n static readonly abi = _abi;\n\n static createInterface(): TestMessageInterface {\n\n return new utils.Interface(_abi) as TestMessageInterface;\n\n }\n\n static connect(\n\n address: string,\n\n signerOrProvider: Signer | Provider\n\n ): TestMessage {\n\n return new Contract(address, _abi, signerOrProvider) as TestMessage;\n\n }\n", "file_path": "typescript/typechain/optics-core/factories/TestMessage__factory.ts", "rank": 45, "score": 87757.86136508414 }, { "content": "export class IMessageRecipient__factory {\n\n static readonly abi = _abi;\n\n static createInterface(): IMessageRecipientInterface {\n\n return new utils.Interface(_abi) as IMessageRecipientInterface;\n\n }\n\n static connect(\n\n address: string,\n\n signerOrProvider: Signer | Provider\n\n ): IMessageRecipient {\n\n return new Contract(address, _abi, signerOrProvider) as IMessageRecipient;\n\n }\n", "file_path": "typescript/typechain/optics-core/factories/IMessageRecipient__factory.ts", "rank": 46, "score": 87757.86136508414 }, { "content": " attach(address: string): TestMessage {\n\n return super.attach(address) as TestMessage;\n", "file_path": "typescript/typechain/optics-core/factories/TestMessage__factory.ts", "rank": 47, "score": 86484.56352671408 }, { "content": " static connect(\n\n address: string,\n\n signerOrProvider: Signer | Provider\n\n ): TestMessage {\n\n return new Contract(address, _abi, signerOrProvider) as TestMessage;\n", "file_path": "typescript/typechain/optics-core/factories/TestMessage__factory.ts", "rank": 48, "score": 86484.56352671408 }, { "content": " constructor(signer?: Signer) {\n\n super(_abi, _bytecode, signer);\n", "file_path": "typescript/typechain/optics-core/factories/TestMessage__factory.ts", "rank": 49, "score": 86484.56352671408 }, { "content": " static connect(\n\n address: string,\n\n signerOrProvider: Signer | Provider\n\n ): IMessageRecipient {\n\n return new Contract(address, _abi, signerOrProvider) as IMessageRecipient;\n", "file_path": "typescript/typechain/optics-core/factories/IMessageRecipient__factory.ts", "rank": 50, "score": 86484.56352671408 }, { "content": " deploy(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): Promise<TestMessage> {\n\n return super.deploy(overrides || {}) as Promise<TestMessage>;\n", "file_path": "typescript/typechain/optics-core/factories/TestMessage__factory.ts", "rank": 51, "score": 86484.56352671408 }, { "content": "#[async_trait]\n\npub trait OpticsAgent: Send + Sync + std::fmt::Debug + AsRef<AgentCore> {\n\n /// The agent's name\n\n const AGENT_NAME: &'static str;\n\n\n\n /// The settings object for this agent\n\n type Settings: AsRef<Settings>;\n\n\n\n /// Instantiate the agent from the standard settings object\n\n async fn from_settings(settings: Self::Settings) -> Result<Self>\n\n where\n\n Self: Sized;\n\n\n\n /// Return a handle to the metrics registry\n\n fn metrics(&self) -> Arc<CoreMetrics> {\n\n self.as_ref().metrics.clone()\n\n }\n\n\n\n /// Return a handle to the DB\n\n fn db(&self) -> DB {\n\n self.as_ref().db.clone()\n", "file_path": "rust/optics-base/src/agent.rs", "rank": 52, "score": 85562.71289262315 }, { "content": "export class TypedMemView__factory extends ContractFactory {\n\n constructor(signer?: Signer) {\n\n super(_abi, _bytecode, signer);\n\n }\n\n\n\n deploy(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): Promise<TypedMemView> {\n\n return super.deploy(overrides || {}) as Promise<TypedMemView>;\n\n }\n\n getDeployTransaction(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): TransactionRequest {\n\n return super.getDeployTransaction(overrides || {});\n\n }\n\n attach(address: string): TypedMemView {\n\n return super.attach(address) as TypedMemView;\n\n }\n\n connect(signer: Signer): TypedMemView__factory {\n\n return super.connect(signer) as TypedMemView__factory;\n\n }\n\n static readonly bytecode = _bytecode;\n\n static readonly abi = _abi;\n\n static createInterface(): TypedMemViewInterface {\n\n return new utils.Interface(_abi) as TypedMemViewInterface;\n\n }\n\n static connect(\n\n address: string,\n\n signerOrProvider: Signer | Provider\n\n ): TypedMemView {\n\n return new Contract(address, _abi, signerOrProvider) as TypedMemView;\n\n }\n", "file_path": "typescript/typechain/optics-core/factories/TypedMemView__factory.ts", "rank": 53, "score": 85424.91790403146 }, { "content": " static createInterface(): TestMessageInterface {\n\n return new utils.Interface(_abi) as TestMessageInterface;\n", "file_path": "typescript/typechain/optics-core/factories/TestMessage__factory.ts", "rank": 54, "score": 85249.24842337258 }, { "content": " static createInterface(): IMessageRecipientInterface {\n\n return new utils.Interface(_abi) as IMessageRecipientInterface;\n", "file_path": "typescript/typechain/optics-core/factories/IMessageRecipient__factory.ts", "rank": 55, "score": 85249.24842337258 }, { "content": " attach(address: string): TypedMemView {\n\n return super.attach(address) as TypedMemView;\n", "file_path": "typescript/typechain/optics-core/factories/TypedMemView__factory.ts", "rank": 56, "score": 84223.80041861488 }, { "content": " constructor(signer?: Signer) {\n\n super(_abi, _bytecode, signer);\n", "file_path": "typescript/typechain/optics-core/factories/TypedMemView__factory.ts", "rank": 57, "score": 84223.80041861488 }, { "content": " deploy(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): Promise<TypedMemView> {\n\n return super.deploy(overrides || {}) as Promise<TypedMemView>;\n", "file_path": "typescript/typechain/optics-core/factories/TypedMemView__factory.ts", "rank": 58, "score": 84223.80041861488 }, { "content": " static connect(\n\n address: string,\n\n signerOrProvider: Signer | Provider\n\n ): TypedMemView {\n\n return new Contract(address, _abi, signerOrProvider) as TypedMemView;\n", "file_path": "typescript/typechain/optics-core/factories/TypedMemView__factory.ts", "rank": 59, "score": 84223.80041861488 }, { "content": " getDeployTransaction(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): TransactionRequest {\n\n return super.getDeployTransaction(overrides || {});\n", "file_path": "typescript/typechain/optics-core/factories/TestMessage__factory.ts", "rank": 60, "score": 84050.17583873971 }, { "content": " static createInterface(): TypedMemViewInterface {\n\n return new utils.Interface(_abi) as TypedMemViewInterface;\n", "file_path": "typescript/typechain/optics-core/factories/TypedMemView__factory.ts", "rank": 61, "score": 83057.34047909435 }, { "content": " getDeployTransaction(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): TransactionRequest {\n\n return super.getDeployTransaction(overrides || {});\n", "file_path": "typescript/typechain/optics-core/factories/TypedMemView__factory.ts", "rank": 62, "score": 81924.00413357398 }, { "content": "export interface TypedEvent<EventArgs extends Result> extends Event {\n\n args: EventArgs;\n", "file_path": "typescript/typechain/optics-core/commons.ts", "rank": 63, "score": 81910.7256211112 }, { "content": " message(oldRoot: types.HexString, newRoot: types.HexString) {\n\n return ethers.utils.concat([this.domainHash(), oldRoot, newRoot]);\n", "file_path": "typescript/optics-tests/lib/core.ts", "rank": 64, "score": 81762.31733765772 }, { "content": "const formatMessage = (\n\n localDomain: types.Domain,\n\n senderAddr: types.Address,\n\n sequence: number,\n\n destinationDomain: types.Domain,\n\n recipientAddr: types.Address,\n\n body: types.HexString,\n\n): string => {\n\n senderAddr = ethersAddressToBytes32(senderAddr);\n\n recipientAddr = ethersAddressToBytes32(recipientAddr);\n\n\n\n return ethers.utils.solidityPack(\n\n ['uint32', 'bytes32', 'uint32', 'uint32', 'bytes32', 'bytes'],\n\n [localDomain, senderAddr, sequence, destinationDomain, recipientAddr, body],\n\n );\n", "file_path": "typescript/optics-tests/lib/core.ts", "rank": 65, "score": 81762.31733765772 }, { "content": "function messageHash(message: types.HexString): string {\n\n return ethers.utils.solidityKeccak256(['bytes'], [message]);\n", "file_path": "typescript/optics-tests/lib/core.ts", "rank": 66, "score": 81762.31733765772 }, { "content": "export interface TypedEventFilter<_EventArgsArray, _EventArgsObject>\n", "file_path": "typescript/typechain/optics-core/commons.ts", "rank": 67, "score": 80312.50754065125 }, { "content": "function formatSetRouter(domain: types.Domain, address: types.Address): string {\n\n return ethers.utils.solidityPack(\n\n ['bytes1', 'uint32', 'bytes32'],\n\n [GovernanceMessage.SETROUTER, domain, address],\n\n );\n", "file_path": "typescript/optics-tests/lib/core.ts", "rank": 68, "score": 80288.51572802491 }, { "content": "const _abi = [\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"body\",\n\n outputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n stateMutability: \"view\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"destination\",\n\n outputs: [\n\n {\n\n internalType: \"uint32\",\n\n name: \"\",\n\n type: \"uint32\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"leaf\",\n\n outputs: [\n\n {\n\n internalType: \"bytes32\",\n\n name: \"\",\n\n type: \"bytes32\",\n\n },\n\n ],\n\n stateMutability: \"view\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"nonce\",\n\n outputs: [\n\n {\n\n internalType: \"uint32\",\n\n name: \"\",\n\n type: \"uint32\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"origin\",\n\n outputs: [\n\n {\n\n internalType: \"uint32\",\n\n name: \"\",\n\n type: \"uint32\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"recipient\",\n\n outputs: [\n\n {\n\n internalType: \"bytes32\",\n\n name: \"\",\n\n type: \"bytes32\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"recipientAddress\",\n\n outputs: [\n\n {\n\n internalType: \"address\",\n\n name: \"\",\n\n type: \"address\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"sender\",\n\n outputs: [\n\n {\n\n internalType: \"bytes32\",\n\n name: \"\",\n\n type: \"bytes32\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n", "file_path": "typescript/typechain/optics-core/factories/TestMessage__factory.ts", "rank": 69, "score": 78632.73604786611 }, { "content": "const _bytecode =\n", "file_path": "typescript/typechain/optics-core/factories/TestMessage__factory.ts", "rank": 70, "score": 78632.73604786611 }, { "content": "const _abi = [\n\n {\n\n inputs: [\n\n {\n\n internalType: \"uint32\",\n\n name: \"_origin\",\n\n type: \"uint32\",\n\n },\n\n {\n\n internalType: \"bytes32\",\n\n name: \"_sender\",\n\n type: \"bytes32\",\n\n },\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"handle\",\n\n outputs: [],\n\n stateMutability: \"nonpayable\",\n\n type: \"function\",\n\n },\n", "file_path": "typescript/typechain/optics-core/factories/IMessageRecipient__factory.ts", "rank": 71, "score": 78632.73604786611 }, { "content": "const _bytecode =\n", "file_path": "typescript/typechain/optics-core/factories/TypedMemView__factory.ts", "rank": 72, "score": 77296.1479546676 }, { "content": "const _abi = [\n\n {\n\n inputs: [],\n\n name: \"NULL\",\n\n outputs: [\n\n {\n\n internalType: \"bytes29\",\n\n name: \"\",\n\n type: \"bytes29\",\n\n },\n\n ],\n\n stateMutability: \"view\",\n\n type: \"function\",\n\n },\n", "file_path": "typescript/typechain/optics-core/factories/TypedMemView__factory.ts", "rank": 73, "score": 77296.1479546676 }, { "content": "fn main() -> Result<()> {\n\n tokio::runtime::Builder::new_current_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap()\n\n .block_on(_main())\n\n}\n", "file_path": "rust/agents/relayer/src/main.rs", "rank": 74, "score": 70736.22345364737 }, { "content": "fn main() -> Result<()> {\n\n color_eyre::install()?;\n\n\n\n tokio::runtime::Builder::new_current_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap()\n\n .block_on(_main())\n\n}\n", "file_path": "rust/kms-cli/src/main.rs", "rank": 75, "score": 70736.22345364737 }, { "content": "fn main() -> Result<()> {\n\n tokio::runtime::Builder::new_current_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap()\n\n .block_on(_main())\n\n}\n", "file_path": "rust/agents/processor/src/main.rs", "rank": 76, "score": 70736.22345364737 }, { "content": "fn main() -> Result<()> {\n\n tokio::runtime::Builder::new_current_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap()\n\n .block_on(_main())\n\n}\n", "file_path": "rust/agents/updater/src/main.rs", "rank": 77, "score": 70736.22345364737 }, { "content": "fn main() -> Result<()> {\n\n tokio::runtime::Builder::new_current_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap()\n\n .block_on(_main())\n\n}\n", "file_path": "rust/agents/watcher/src/main.rs", "rank": 78, "score": 70736.22345364737 }, { "content": "/* Autogenerated file. Do not edit manually. */\n\n/* tslint:disable */\n\n/* eslint-disable */\n\n\n\nimport { Signer, utils, Contract, ContractFactory, Overrides } from \"ethers\";\n\nimport { Provider, TransactionRequest } from \"@ethersproject/providers\";\n\nimport type { TestMessage, TestMessageInterface } from \"../TestMessage\";\n\n\n\nconst _abi = [\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"body\",\n\n outputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n stateMutability: \"view\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"destination\",\n\n outputs: [\n\n {\n\n internalType: \"uint32\",\n\n name: \"\",\n\n type: \"uint32\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"leaf\",\n\n outputs: [\n\n {\n\n internalType: \"bytes32\",\n\n name: \"\",\n\n type: \"bytes32\",\n\n },\n\n ],\n\n stateMutability: \"view\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"nonce\",\n\n outputs: [\n\n {\n\n internalType: \"uint32\",\n\n name: \"\",\n\n type: \"uint32\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"origin\",\n\n outputs: [\n\n {\n\n internalType: \"uint32\",\n\n name: \"\",\n\n type: \"uint32\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"recipient\",\n\n outputs: [\n\n {\n\n internalType: \"bytes32\",\n\n name: \"\",\n\n type: \"bytes32\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"recipientAddress\",\n\n outputs: [\n\n {\n\n internalType: \"address\",\n\n name: \"\",\n\n type: \"address\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n\n {\n\n inputs: [\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"sender\",\n\n outputs: [\n\n {\n\n internalType: \"bytes32\",\n\n name: \"\",\n\n type: \"bytes32\",\n\n },\n\n ],\n\n stateMutability: \"pure\",\n\n type: \"function\",\n\n },\n\n];\n\n\n\nconst _bytecode =\n\n \"0x608060405234801561001057600080fd5b50611605806100206000396000f3fe608060405234801561001057600080fd5b50600436106100885760003560e01c8063c97c703a1161005b578063c97c703a14610350578063cb3eb0e11461046b578063d7a7a72c14610511578063f45387ba146105b757610088565b80634e7650041461008d5780636dc3c4f71461014c578063985a5c3114610204578063c81aa9c8146102aa575b600080fd5b610133600480360360208110156100a357600080fd5b8101906020810181356401000000008111156100be57600080fd5b8201836020820111156100d057600080fd5b803590602001918460018302840111640100000000831117156100f257600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600092019190915250929550610686945050505050565b6040805163ffffffff9092168252519081900360200190f35b6101f26004803603602081101561016257600080fd5b81019060208101813564010000000081111561017d57600080fd5b82018360208201111561018f57600080fd5b803590602001918460018302840111640100000000831117156101b157600080fd5b91908080601f0160208091040260200160405190810160405280939291908181526020018383808284376000920191909152509295506106c4945050505050565b60408051918252519081900360200190f35b6101f26004803603602081101561021a57600080fd5b81019060208101813564010000000081111561023557600080fd5b82018360208201111561024757600080fd5b8035906020019184600183028401116401000000008311171561026957600080fd5b91908080601f0160208091040260200160405190810160405280939291908181526020018383808284376000920191909152509295506106fa945050505050565b610133600480360360208110156102c057600080fd5b8101906020810181356401000000008111156102db57600080fd5b8201836020820111156102ed57600080fd5b8035906020019184600183028401116401000000008311171561030f57600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600092019190915250929550610730945050505050565b6103f66004803603602081101561036657600080fd5b81019060208101813564010000000081111561038157600080fd5b82018360208201111561039357600080fd5b803590602001918460018302840111640100000000831117156103b557600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600092019190915250929550610766945050505050565b6040805160208082528351818301528351919283929083019185019080838360005b83811015610430578181015183820152602001610418565b50505050905090810190601f16801561045d5780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b6101336004803603602081101561048157600080fd5b81019060208101813564010000000081111561049c57600080fd5b8201836020820111156104ae57600080fd5b803590602001918460018302840111640100000000831117156104d057600080fd5b91908080601f0160208091040260200160405190810160405280939291908181526020018383808284376000920191909152509295506107c7945050505050565b6101f26004803603602081101561052757600080fd5b81019060208101813564010000000081111561054257600080fd5b82018360208201111561055457600080fd5b8035906020019184600183028401116401000000008311171561057657600080fd5b91908080601f0160208091040260200160405190810160405280939291908181526020018383808284376000920191909152509295506107fd945050505050565b61065d600480360360208110156105cd57600080fd5b8101906020810181356401000000008111156105e857600080fd5b8201836020820111156105fa57600080fd5b8035906020019184600183028401116401000000008311171561061c57600080fd5b91908080601f016020809104026020016040519081016040528093929190818152602001838380828437600092019190915250929550610833945050505050565b6040805173ffffffffffffffffffffffffffffffffffffffff9092168252519081900360200190f35b60006106bc6106958383610869565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000001661088f565b90505b919050565b60006106bc6106d38383610869565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff000000166108c0565b60006106bc6107098383610869565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff000000166108f1565b60006106bc61073f8383610869565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000016610922565b60606106bc6107a0610779846000610869565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000016610953565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff000000166109c4565b60006106bc6107d68383610869565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000016610a08565b60006106bc61080c8383610869565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000016610a38565b60006106bc6108428383610869565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff00000016610a80565b81516000906020840161088464ffffffffff85168284610a93565b925050505b92915050565b60006106bc7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff000000831660246004610af4565b60006106bc7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff000000831660046020610b15565b60006106bc7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000008316602c6020610b15565b60006106bc7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff000000831660286004610af4565b60006106bc604c806109867fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000008616610cc0565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff000000861692916bffffffffffffffffffffffff91909116036000610cd4565b60606000806109d284610cc0565b6bffffffffffffffffffffffff16905060405191508192506109f78483602001610d66565b508181016020016040529052919050565b60006106bc7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000008316826004610af4565b60006106bc610a4683610a08565b610a4f846108c0565b610a588561088f565b610a6186610922565b610a6a876108f1565b610a7b610a7689610953565b6109c4565b610e9c565b60006106bc610a8e836108f1565b610ebf565b600080610aa08484610ec2565b9050604051811115610ab0575060005b80610ade577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff000000915050610aed565b610ae9858585610f34565b9150505b9392505050565b60008160200360080260ff16610b0b858585610b15565b901c949350505050565b600060ff8216610b2757506000610aed565b610b3084610cc0565b6bffffffffffffffffffffffff16610b4b8460ff8516610ec2565b1115610c2a57610b8c610b5d85610f47565b6bffffffffffffffffffffffff16610b7486610cc0565b6bffffffffffffffffffffffff16858560ff16610f5b565b6040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825283818151815260200191508051906020019080838360005b83811015610bef578181015183820152602001610bd7565b50505050905090810190601f168015610c1c5780820380516001836020036101000a031916815260200191505b509250505060405180910390fd5b60208260ff161115610c87576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252603a81526020018061150e603a913960400191505060405180910390fd5b600882026000610c9686610f47565b6bffffffffffffffffffffffff1690506000610cb1836110b6565b91909501511695945050505050565b60181c6bffffffffffffffffffffffff1690565b600080610ce086610f47565b6bffffffffffffffffffffffff169050610cf9866110ff565b610d0d85610d078489610ec2565b90610ec2565b1115610d3c577fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff000000915050610d5e565b610d468186610ec2565b9050610d5a8364ffffffffff168286610a93565b9150505b949350505050565b6000610d7183611129565b610dc6576040517f08c379a00000000000000000000000000000000000000000000000000000000081526004018080602001828103825260288152602001806115486028913960400191505060405180910390fd5b610dcf8361113b565b610e24576040517f08c379a000000000000000000000000000000000000000000000000000000000815260040180806020018281038252602b815260200180611570602b913960400191505060405180910390fd5b6000610e2f84610cc0565b6bffffffffffffffffffffffff1690506000610e4a85610f47565b6bffffffffffffffffffffffff1690506000604051905084811115610e6f5760206060fd5b8285848460045afa50610e92610e8487611178565b64ffffffffff168685610f34565b9695505050505050565b6000610eac87878787878761117e565b8051906020012090509695505050505050565b90565b8181018281101561088957604080517f08c379a000000000000000000000000000000000000000000000000000000000815260206004820152601960248201527f4f766572666c6f7720647572696e67206164646974696f6e2e00000000000000604482015290519081900360640190fd5b606092831b9190911790911b1760181b90565b60781c6bffffffffffffffffffffffff1690565b60606000610f6886611254565b9150506000610f7686611254565b9150506000610f8486611254565b9150506000610f9286611254565b91505083838383604051602001808061159b603591397fffffffffffff000000000000000000000000000000000000000000000000000060d087811b821660358401527f2077697468206c656e6774682030780000000000000000000000000000000000603b84015286901b16604a82015260500160216114ed82397fffffffffffff000000000000000000000000000000000000000000000000000060d094851b811660218301527f2077697468206c656e677468203078000000000000000000000000000000000060278301529290931b9091166036830152507f2e00000000000000000000000000000000000000000000000000000000000000603c82015260408051601d818403018152603d90920190529b9a5050505050505050505050565b7f80000000000000000000000000000000000000000000000000000000000000007fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff9091011d90565b600061110a82610cc0565b61111383610f47565b016bffffffffffffffffffffffff169050919050565b600061113482611328565b1592915050565b600061114682611178565b64ffffffffff1664ffffffffff1415611161575060006106bf565b600061116c836110ff565b60405110199392505050565b60d81c90565b6060868686868686604051602001808763ffffffff1660e01b81526004018681526020018563ffffffff1660e01b81526004018463ffffffff1660e01b815260040183815260200182805190602001908083835b6020831061120f57805182527fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffe090920191602091820191016111d2565b6001836020036101000a038019825116818451168082178552505050505050905001965050505050505060405160208183030381529060405290509695505050505050565b600080601f5b600f8160ff1611156112bc5760ff600882021684901c61127981611350565b61ffff16841793508160ff1660101461129457601084901b93505b507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0161125a565b50600f5b60ff8160ff1610156113225760ff600882021684901c6112df81611350565b61ffff16831792508160ff166000146112fa57601083901b92505b507fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff016112c0565b50915091565b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000009081161490565b600061136260048360ff16901c611380565b60ff161760081b62ffff001661137782611380565b60ff1617919050565b600060f08083179060ff8216141561139c5760309150506106bf565b8060ff1660f114156113b25760319150506106bf565b8060ff1660f214156113c85760329150506106bf565b8060ff1660f314156113de5760339150506106bf565b8060ff1660f414156113f45760349150506106bf565b8060ff1660f5141561140a5760359150506106bf565b8060ff1660f614156114205760369150506106bf565b8060ff1660f714156114365760379150506106bf565b8060ff1660f8141561144c5760389150506106bf565b8060ff1660f914156114625760399150506106bf565b8060ff1660fa14156114785760619150506106bf565b8060ff1660fb141561148e5760629150506106bf565b8060ff1660fc14156114a45760639150506106bf565b8060ff1660fd14156114ba5760649150506106bf565b8060ff1660fe14156114d05760659150506106bf565b8060ff1660ff14156114e65760669150506106bf565b5091905056fe2e20417474656d7074656420746f20696e646578206174206f666673657420307854797065644d656d566965772f696e646578202d20417474656d7074656420746f20696e646578206d6f7265207468616e20333220627974657354797065644d656d566965772f636f7079546f202d204e756c6c20706f696e74657220646572656654797065644d656d566965772f636f7079546f202d20496e76616c696420706f696e74657220646572656654797065644d656d566965772f696e646578202d204f76657272616e2074686520766965772e20536c696365206973206174203078a26469706673582212204cc30bdc44d2a7f6df9a10985fa9646d585b464e2755e5d8c36597ff946f514464736f6c63430007060033\";\n\n\n\nexport class TestMessage__factory extends ContractFactory {\n\n constructor(signer?: Signer) {\n\n super(_abi, _bytecode, signer);\n\n }\n\n\n\n deploy(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): Promise<TestMessage> {\n\n return super.deploy(overrides || {}) as Promise<TestMessage>;\n\n }\n\n getDeployTransaction(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): TransactionRequest {\n\n return super.getDeployTransaction(overrides || {});\n\n }\n\n attach(address: string): TestMessage {\n\n return super.attach(address) as TestMessage;\n\n }\n\n connect(signer: Signer): TestMessage__factory {\n\n return super.connect(signer) as TestMessage__factory;\n\n }\n\n static readonly bytecode = _bytecode;\n\n static readonly abi = _abi;\n\n static createInterface(): TestMessageInterface {\n\n return new utils.Interface(_abi) as TestMessageInterface;\n\n }\n\n static connect(\n\n address: string,\n\n signerOrProvider: Signer | Provider\n\n ): TestMessage {\n\n return new Contract(address, _abi, signerOrProvider) as TestMessage;\n\n }\n\n}\n", "file_path": "typescript/typechain/optics-core/factories/TestMessage__factory.ts", "rank": 79, "score": 69342.99355995395 }, { "content": "/* Autogenerated file. Do not edit manually. */\n\n/* tslint:disable */\n\n/* eslint-disable */\n\n\n\nimport { Contract, Signer, utils } from \"ethers\";\n\nimport { Provider } from \"@ethersproject/providers\";\n\nimport type {\n\n IMessageRecipient,\n\n IMessageRecipientInterface,\n\n} from \"../IMessageRecipient\";\n\n\n\nconst _abi = [\n\n {\n\n inputs: [\n\n {\n\n internalType: \"uint32\",\n\n name: \"_origin\",\n\n type: \"uint32\",\n\n },\n\n {\n\n internalType: \"bytes32\",\n\n name: \"_sender\",\n\n type: \"bytes32\",\n\n },\n\n {\n\n internalType: \"bytes\",\n\n name: \"_message\",\n\n type: \"bytes\",\n\n },\n\n ],\n\n name: \"handle\",\n\n outputs: [],\n\n stateMutability: \"nonpayable\",\n\n type: \"function\",\n\n },\n\n];\n\n\n\nexport class IMessageRecipient__factory {\n\n static readonly abi = _abi;\n\n static createInterface(): IMessageRecipientInterface {\n\n return new utils.Interface(_abi) as IMessageRecipientInterface;\n\n }\n\n static connect(\n\n address: string,\n\n signerOrProvider: Signer | Provider\n\n ): IMessageRecipient {\n\n return new Contract(address, _abi, signerOrProvider) as IMessageRecipient;\n\n }\n\n}\n", "file_path": "typescript/typechain/optics-core/factories/IMessageRecipient__factory.ts", "rank": 80, "score": 69342.99355995395 }, { "content": "/* Autogenerated file. Do not edit manually. */\n\n/* tslint:disable */\n\n/* eslint-disable */\n\n\n\nimport { Signer, utils, Contract, ContractFactory, Overrides } from \"ethers\";\n\nimport { Provider, TransactionRequest } from \"@ethersproject/providers\";\n\nimport type { TypedMemView, TypedMemViewInterface } from \"../TypedMemView\";\n\n\n\nconst _abi = [\n\n {\n\n inputs: [],\n\n name: \"NULL\",\n\n outputs: [\n\n {\n\n internalType: \"bytes29\",\n\n name: \"\",\n\n type: \"bytes29\",\n\n },\n\n ],\n\n stateMutability: \"view\",\n\n type: \"function\",\n\n },\n\n];\n\n\n\nconst _bytecode =\n\n \"0x60cd610025600b82828239805160001a60731461001857fe5b30600052607381538281f3fe730000000000000000000000000000000000000000301460806040526004361060335760003560e01c8063f26be3fc146038575b600080fd5b603e6073565b604080517fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000009092168252519081900360200190f35b7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0000008156fea2646970667358221220b07daa2cfa732a0712e14f333f7fb26a0736db4a4fce155c515a5dd1fc062de564736f6c63430007060033\";\n\n\n\nexport class TypedMemView__factory extends ContractFactory {\n\n constructor(signer?: Signer) {\n\n super(_abi, _bytecode, signer);\n\n }\n\n\n\n deploy(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): Promise<TypedMemView> {\n\n return super.deploy(overrides || {}) as Promise<TypedMemView>;\n\n }\n\n getDeployTransaction(\n\n overrides?: Overrides & { from?: string | Promise<string> }\n\n ): TransactionRequest {\n\n return super.getDeployTransaction(overrides || {});\n\n }\n\n attach(address: string): TypedMemView {\n\n return super.attach(address) as TypedMemView;\n\n }\n\n connect(signer: Signer): TypedMemView__factory {\n\n return super.connect(signer) as TypedMemView__factory;\n\n }\n\n static readonly bytecode = _bytecode;\n\n static readonly abi = _abi;\n\n static createInterface(): TypedMemViewInterface {\n\n return new utils.Interface(_abi) as TypedMemViewInterface;\n\n }\n\n static connect(\n\n address: string,\n\n signerOrProvider: Signer | Provider\n\n ): TypedMemView {\n\n return new Contract(address, _abi, signerOrProvider) as TypedMemView;\n\n }\n\n}\n", "file_path": "typescript/typechain/optics-core/factories/TypedMemView__factory.ts", "rank": 81, "score": 68315.87372212684 }, { "content": "fn init_kms(region: String) {\n\n // setup KMS\n\n let client =\n\n rusoto_core::Client::new_with(EnvironmentProvider::default(), HttpClient::new().unwrap());\n\n if KMS_CLIENT\n\n .set(KmsClient::new_with_client(\n\n client,\n\n region.parse().expect(\"invalid region\"),\n\n ))\n\n .is_err()\n\n {\n\n panic!(\"couldn't set cell\")\n\n }\n\n}\n\n\n\n#[derive(Clap)]\n\npub struct Tx {\n\n // TX\n\n /// The TX value (in wei)\n\n #[clap(short, long)]\n", "file_path": "rust/kms-cli/src/main.rs", "rank": 82, "score": 65043.70279394431 }, { "content": "fn prep_tx_request(opts: &Tx) -> TransactionRequest {\n\n let tx_req = TransactionRequest::default().to(opts.to);\n\n\n\n // These swallow parse errors\n\n let tx_req = apply_if!(\n\n tx_req,\n\n data,\n\n opts.data.clone().and_then(|data| hex::decode(&data).ok())\n\n );\n\n let tx_req = apply_if!(\n\n tx_req,\n\n value,\n\n opts.value\n\n .clone()\n\n .and_then(|value| U256::from_dec_str(&value).ok())\n\n );\n\n\n\n let tx_req = apply_if!(tx_req, opts.nonce);\n\n let tx_req = apply_if!(tx_req, opts.gas);\n\n\n", "file_path": "rust/kms-cli/src/main.rs", "rank": 83, "score": 59681.874603834745 }, { "content": "export class DetailsMessage extends BridgeMessage {\n\n action: Details;\n\n\n\n constructor(\n\n receipt: ContractReceipt,\n\n parsed: ParsedDetailsMessage,\n\n context: OpticsContext,\n\n ) {\n\n super(receipt, parsed.token, context, true);\n\n this.action = parsed.action;\n\n }\n\n\n\n get name(): string {\n\n return this.action.name;\n\n }\n\n\n\n get symbol(): string {\n\n return this.action.symbol;\n\n }\n\n\n\n get decimals(): number {\n\n return this.action.decimals;\n\n }\n", "file_path": "typescript/optics-provider/src/optics/messages/BridgeMessage.ts", "rank": 93, "score": 57330.25147082432 }, { "content": "export class OpticsMessage {\n\n readonly receipt: ContractReceipt;\n\n readonly event: DispatchEvent;\n\n readonly messageHash: string;\n\n readonly leafIndex: BigNumber;\n\n readonly destinationAndNonce: BigNumber;\n\n readonly committedRoot: string;\n\n readonly message: ParsedMessage;\n\n\n\n protected context: OpticsContext;\n\n\n\n constructor(receipt: ContractReceipt, context: OpticsContext) {\n\n this.receipt = receipt;\n\n\n\n // find the first dispatch log by attempting to parse them\n\n let event;\n\n const iface = new core.Home__factory().interface;\n\n for (const log of receipt.logs) {\n\n let parsed: LogDescription;\n\n try {\n\n parsed = iface.parseLog(log);\n\n } catch (e) {\n\n continue;\n\n }\n\n if (parsed.name === 'Dispatch') {\n\n event = parsed as unknown as DispatchEvent;\n\n }\n\n }\n\n\n\n if (!event) {\n\n throw new Error('No matching event found');\n\n }\n\n\n\n this.event = event;\n\n\n\n this.messageHash = event.args.messageHash;\n\n this.leafIndex = event.args.leafIndex;\n\n this.destinationAndNonce = event.args.destinationAndNonce;\n\n this.committedRoot = event.args.committedRoot;\n\n this.message = parseMessage(event.args.message);\n\n\n\n this.context = context;\n\n }\n\n\n\n async status(): Promise<MessageStatus> {\n\n const replica = this.context.getReplicaFor(this.from, this.destination);\n\n if (!replica) {\n\n throw new Error(\n\n `No replica on ${this.destination} for home ${this.from}`,\n\n );\n\n }\n\n\n\n return await replica.messages(this.messageHash);\n\n }\n\n\n\n /// Returns true when the message is delivered\n\n async delivered(): Promise<boolean> {\n\n const status = await this.status();\n\n return status === MessageStatus.Processed;\n\n }\n\n\n\n /// Resolves when the message has been delivered.\n\n /// May never resolve. May take hours to resolve.\n\n async wait(opts?: { pollTime?: number }): Promise<void> {\n\n const interval = opts?.pollTime ?? 5000;\n\n while (true) {\n\n if (await this.delivered()) {\n\n return;\n\n }\n\n await delay(interval);\n\n }\n\n }\n\n\n\n get from(): number {\n\n return this.message.from;\n\n }\n\n\n\n get origin(): number {\n\n return this.from;\n\n }\n\n\n\n get sender(): string {\n\n return this.message.sender;\n\n }\n\n\n\n get nonce(): number {\n\n return this.message.nonce;\n\n }\n\n\n\n get destination(): number {\n\n return this.message.destination;\n\n }\n\n\n\n get recipient(): string {\n\n return this.message.recipient;\n\n }\n\n\n\n get body(): string {\n\n return this.message.body;\n\n }\n\n\n\n get transactionHash(): string {\n\n return this.receipt.transactionHash;\n\n }\n", "file_path": "typescript/optics-provider/src/optics/messages/OpticsMessage.ts", "rank": 94, "score": 57330.25147082432 }, { "content": " get to(): string {\n\n return this.action.to;\n", "file_path": "typescript/optics-provider/src/optics/messages/BridgeMessage.ts", "rank": 95, "score": 57330.25147082432 }, { "content": "export class TransferMessage extends BridgeMessage {\n\n action: Transfer;\n\n\n\n constructor(\n\n receipt: ContractReceipt,\n\n parsed: ParsedTransferMessage,\n\n context: OpticsContext,\n\n ) {\n\n super(receipt, parsed.token, context, true);\n\n this.action = parsed.action;\n\n }\n\n\n\n async currentlyPrefilled(): Promise<boolean> {\n\n const bridge = this.context.mustGetBridge(this.destination);\n\n const lpAddress = await bridge.bridgeRouter.liquidityProvider(\n\n this.messageHash,\n\n );\n\n if (lpAddress !== ethers.constants.AddressZero) {\n\n return true;\n\n }\n\n return false;\n\n }\n\n\n\n get amount(): BigNumber {\n\n return this.action.amount;\n\n }\n\n\n\n get to(): string {\n\n return this.action.to;\n\n }\n", "file_path": "typescript/optics-provider/src/optics/messages/BridgeMessage.ts", "rank": 96, "score": 57330.25147082432 }, { "content": "class BridgeMessage extends OpticsMessage {\n\n readonly token: TokenIdentifier;\n\n\n\n readonly fromBridge: BridgeContracts;\n\n readonly toBridge: BridgeContracts;\n\n\n\n constructor(\n\n receipt: ContractReceipt,\n\n token: TokenIdentifier,\n\n context: OpticsContext,\n\n callerKnowsWhatTheyAreDoing: boolean,\n\n ) {\n\n if (!callerKnowsWhatTheyAreDoing) {\n\n throw new Error('Use `fromReceipt` to instantiate');\n\n }\n\n super(receipt, context);\n\n\n\n const fromBridge = context.mustGetBridge(this.message.from);\n\n const toBridge = context.mustGetBridge(this.message.destination);\n\n\n\n this.fromBridge = fromBridge;\n\n this.toBridge = toBridge;\n\n this.token = token;\n\n }\n\n\n\n static fromReceipt(\n\n receipt: ethers.ContractReceipt,\n\n context: OpticsContext,\n\n ): TransferMessage | DetailsMessage | RequestDetailsMessage {\n\n // kinda hate this but ok\n\n const oMessage = new OpticsMessage(receipt, context);\n\n\n\n let event = oMessage.event;\n\n\n\n const parsedEvent = parseMessage(event.args.message);\n\n const parsed = parseBody(parsedEvent.body);\n\n\n\n switch (parsed.action.action) {\n\n case 'transfer':\n\n return new TransferMessage(\n\n receipt,\n\n parsed as ParsedTransferMessage,\n\n context,\n\n );\n\n case 'details':\n\n return new DetailsMessage(\n\n receipt,\n\n parsed as ParsedDetailsMessage,\n\n context,\n\n );\n\n case 'requestDetails':\n\n return new RequestDetailsMessage(\n\n receipt,\n\n parsed as ParsedRequestDetailsMesasage,\n\n context,\n\n );\n\n }\n\n }\n\n\n\n async asset(): Promise<ResolvedTokenInfo> {\n\n return await this.context.tokenRepresentations(this.token);\n\n }\n\n\n\n // Get the asset at the orgin\n\n async assetAtOrigin(): Promise<xapps.ERC20 | undefined> {\n\n return (await this.asset()).tokens.get(this.origin);\n\n }\n\n\n\n // Get the asset at the destination\n\n async assetAtDestination(): Promise<xapps.ERC20 | undefined> {\n\n return (await this.asset()).tokens.get(this.destination);\n\n }\n", "file_path": "typescript/optics-provider/src/optics/messages/BridgeMessage.ts", "rank": 97, "score": 57330.25147082432 }, { "content": "pub struct Relayer {\n\n duration: u64,\n\n core: AgentCore,\n\n}\n\n\n\nimpl AsRef<AgentCore> for Relayer {\n\n fn as_ref(&self) -> &AgentCore {\n\n &self.core\n\n }\n\n}\n\n\n\n#[allow(clippy::unit_arg)]\n\nimpl Relayer {\n\n /// Instantiate a new relayer\n\n pub fn new(duration: u64, core: AgentCore) -> Self {\n\n Self { duration, core }\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "rust/agents/relayer/src/relayer.rs", "rank": 98, "score": 26.781761949042014 }, { "content": "#[allow(clippy::unit_arg)]\n\nimpl OpticsAgent for Relayer {\n\n const AGENT_NAME: &'static str = \"relayer\";\n\n\n\n type Settings = Settings;\n\n\n\n async fn from_settings(settings: Self::Settings) -> Result<Self>\n\n where\n\n Self: Sized,\n\n {\n\n Ok(Self::new(\n\n settings.interval.parse().expect(\"invalid uint\"),\n\n settings.as_ref().try_into_core(\"relayer\").await?,\n\n ))\n\n }\n\n\n\n #[tracing::instrument]\n\n fn run(&self, name: &str) -> Instrumented<JoinHandle<Result<()>>> {\n\n let replica_opt = self.replica_by_name(name);\n\n let home = self.home();\n", "file_path": "rust/agents/relayer/src/relayer.rs", "rank": 99, "score": 24.157415858014588 } ]
Rust
pallets/template/src/lib.rs
Vietdung113/substrateTemplateNode
a5a9edd4280149de5ad24b4fd5a2b572d647a278
#![cfg_attr(not(feature = "std"), no_std)] pub use pallet::*; #[cfg(test)] mod mock; #[cfg(test)] mod tests; #[cfg(feature = "runtime-benchmarks")] mod benchmarking; #[frame_support::pallet] pub mod pallet { use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; #[pallet::config] pub trait Config: frame_system::Config { type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; } #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet<T>(_); #[pallet::storage] #[pallet::getter(fn current_state)] pub type AccountToBalance<T: Config> = StorageMap<_, Blake2_128Concat, T::AccountId, u32, ValueQuery>; #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event<T: Config> { AccountCreated(T::AccountId), AccountExists(T::AccountId), TransferredCash(T::AccountId, u32, T::AccountId, u32, u32), } #[pallet::error] pub enum Error<T> { NoneValue, StorageOverflow, } #[pallet::call] impl<T: Config> Pallet<T> { #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn register(origin: OriginFor<T>) -> DispatchResult { let who = ensure_signed(origin)?; if <AccountToBalance<T>>::try_get(&who).is_err() { <AccountToBalance<T>>::insert(&who, 10); Self::deposit_event(Event::AccountCreated(who)); } else { Self::deposit_event(Event::AccountExists(who)); } Ok(()) } #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn transfer_money(origin: OriginFor<T>, to: T::AccountId, amount: u32) -> DispatchResult { let who = ensure_signed(origin)?; let whoBalance: Result<u32, ()> = <AccountToBalance<T>>::try_get(&who); assert!(whoBalance.is_ok(), "Sender does not have account"); assert!(whoBalance.ok().unwrap() >= amount, "Sender does not have enough money"); let toBalance: Result<u32, ()> = <AccountToBalance<T>>::try_get(&who); assert!(toBalance.is_ok(), "Receiver does not have account"); <AccountToBalance<T>>::try_mutate(&who, |maybe_value| { *maybe_value = *maybe_value - amount; let x: Result<u32, ()> = Ok(*maybe_value); x }); <AccountToBalance<T>>::try_mutate(&to, |maybe_value| { *maybe_value = *maybe_value + amount; let x: Result<u32, ()> = Ok(*maybe_value); x }); let finalWhoBalance = <AccountToBalance<T>>::try_get(&who).ok().unwrap(); let finalToBalance = <AccountToBalance<T>>::try_get(&to).ok().unwrap(); Self::deposit_event(Event::TransferredCash( who, finalWhoBalance, to, finalToBalance, amount )); Ok(()) } } }
#![cfg_attr(not(feature = "std"), no_std)] pub use pallet::*; #[cfg(test)] mod mock; #[cfg(test)] mod tests; #[cfg(feature = "runtime-benchmarks")] mod benchmarking; #[frame_support::pallet] pub mod pallet { use frame_support::pallet_prelude::*; use frame_system::pallet_prelude::*; #[pallet::config] pub trait Config: frame_system::Config { type Event: From<Event<Self>> + IsType<<Self as frame_system::Config>::Event>; } #[pallet::pallet] #[pallet::generate_store(pub(super) trait Store)] pub struct Pallet<T>(_); #[pallet::storage] #[pallet::getter(fn current_state)] pub type AccountToBalance<T: Config> = StorageMap<_, Blake2_128Concat, T::AccountId, u32, ValueQuery>; #[pallet::event] #[pallet::generate_deposit(pub(super) fn deposit_event)] pub enum Event<T: Config> { AccountCreated(T::AccountId), AccountExists(T::AccountId), TransferredCash(T::AccountId, u32, T::AccountId, u32, u32), } #[pallet::error] pub enum Error<T> { NoneValue, StorageOverflow, } #[pallet::call] impl<T: Config> Pallet<T> { #[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn register(origin: OriginFor<T>) -> DispatchResult { let who = ensure_signed(origin)?;
#[pallet::weight(10_000 + T::DbWeight::get().writes(1))] pub fn transfer_money(origin: OriginFor<T>, to: T::AccountId, amount: u32) -> DispatchResult { let who = ensure_signed(origin)?; let whoBalance: Result<u32, ()> = <AccountToBalance<T>>::try_get(&who); assert!(whoBalance.is_ok(), "Sender does not have account"); assert!(whoBalance.ok().unwrap() >= amount, "Sender does not have enough money"); let toBalance: Result<u32, ()> = <AccountToBalance<T>>::try_get(&who); assert!(toBalance.is_ok(), "Receiver does not have account"); <AccountToBalance<T>>::try_mutate(&who, |maybe_value| { *maybe_value = *maybe_value - amount; let x: Result<u32, ()> = Ok(*maybe_value); x }); <AccountToBalance<T>>::try_mutate(&to, |maybe_value| { *maybe_value = *maybe_value + amount; let x: Result<u32, ()> = Ok(*maybe_value); x }); let finalWhoBalance = <AccountToBalance<T>>::try_get(&who).ok().unwrap(); let finalToBalance = <AccountToBalance<T>>::try_get(&to).ok().unwrap(); Self::deposit_event(Event::TransferredCash( who, finalWhoBalance, to, finalToBalance, amount )); Ok(()) } } }
if <AccountToBalance<T>>::try_get(&who).is_err() { <AccountToBalance<T>>::insert(&who, 10); Self::deposit_event(Event::AccountCreated(who)); } else { Self::deposit_event(Event::AccountExists(who)); } Ok(()) }
function_block-function_prefix_line
[ { "content": "// Build genesis storage according to the mock runtime.\n\npub fn new_test_ext() -> sp_io::TestExternalities {\n\n\tsystem::GenesisConfig::default().build_storage::<Test>().unwrap().into()\n\n}\n", "file_path": "pallets/template/src/mock.rs", "rank": 1, "score": 118374.5519331848 }, { "content": "type Block = frame_system::mocking::MockBlock<Test>;\n\n\n\n// Configure a mock runtime to test the pallet.\n\nframe_support::construct_runtime!(\n\n\tpub enum Test where\n\n\t\tBlock = Block,\n\n\t\tNodeBlock = Block,\n\n\t\tUncheckedExtrinsic = UncheckedExtrinsic,\n\n\t{\n\n\t\tSystem: frame_system::{Pallet, Call, Config, Storage, Event<T>},\n\n\t\tTemplateModule: pallet_template::{Pallet, Call, Storage, Event<T>},\n\n\t}\n\n);\n\n\n\nimpl system::Config for Test {\n\n\ttype BaseCallFilter = frame_support::traits::Everything;\n\n\ttype BlockWeights = ();\n\n\ttype BlockLength = ();\n\n\ttype DbWeight = ();\n\n\ttype Origin = Origin;\n", "file_path": "pallets/template/src/mock.rs", "rank": 2, "score": 106938.79528032307 }, { "content": "type UncheckedExtrinsic = frame_system::mocking::MockUncheckedExtrinsic<Test>;\n", "file_path": "pallets/template/src/mock.rs", "rank": 3, "score": 101978.18825271257 }, { "content": "#[test]\n\nfn it_works_for_default_value() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\t// Dispatch a signed extrinsic.\n\n\t\tassert_ok!(TemplateModule::do_something(Origin::signed(1), 42));\n\n\t\t// Read pallet storage and assert an expected result.\n\n\t\tassert_eq!(TemplateModule::something(), Some(42));\n\n\t});\n\n}\n\n\n", "file_path": "pallets/template/src/tests.rs", "rank": 4, "score": 81579.28976007564 }, { "content": "#[test]\n\nfn correct_error_for_none_value() {\n\n\tnew_test_ext().execute_with(|| {\n\n\t\t// Ensure the expected error is thrown when no value is present.\n\n\t\tassert_noop!(TemplateModule::cause_error(Origin::signed(1)), Error::<Test>::NoneValue);\n\n\t});\n\n}\n", "file_path": "pallets/template/src/tests.rs", "rank": 5, "score": 78945.5801525347 }, { "content": "pub fn development_config() -> Result<ChainSpec, String> {\n\n\tlet wasm_binary = WASM_BINARY.ok_or_else(|| \"Development wasm not available\".to_string())?;\n\n\n\n\tOk(ChainSpec::from_genesis(\n\n\t\t// Name\n\n\t\t\"Development\",\n\n\t\t// ID\n\n\t\t\"dev\",\n\n\t\tChainType::Development,\n\n\t\tmove || {\n\n\t\t\ttestnet_genesis(\n\n\t\t\t\twasm_binary,\n\n\t\t\t\t// Initial PoA authorities\n\n\t\t\t\tvec![authority_keys_from_seed(\"Alice\")],\n\n\t\t\t\t// Sudo account\n\n\t\t\t\tget_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n\t\t\t\t// Pre-funded accounts\n\n\t\t\t\tvec![\n\n\t\t\t\t\tget_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n\t\t\t\t\tget_account_id_from_seed::<sr25519::Public>(\"Bob\"),\n", "file_path": "node/src/chain_spec.rs", "rank": 6, "score": 75080.90123384366 }, { "content": "pub fn new_partial(\n\n\tconfig: &Configuration,\n\n) -> Result<\n\n\tsc_service::PartialComponents<\n\n\t\tFullClient,\n\n\t\tFullBackend,\n\n\t\tFullSelectChain,\n\n\t\tsc_consensus::DefaultImportQueue<Block, FullClient>,\n\n\t\tsc_transaction_pool::FullPool<Block, FullClient>,\n\n\t\t(\n\n\t\t\tsc_finality_grandpa::GrandpaBlockImport<\n\n\t\t\t\tFullBackend,\n\n\t\t\t\tBlock,\n\n\t\t\t\tFullClient,\n\n\t\t\t\tFullSelectChain,\n\n\t\t\t>,\n\n\t\t\tsc_finality_grandpa::LinkHalf<Block, FullClient, FullSelectChain>,\n\n\t\t\tOption<Telemetry>,\n\n\t\t),\n\n\t>,\n", "file_path": "node/src/service.rs", "rank": 7, "score": 75014.6993038513 }, { "content": "pub fn local_testnet_config() -> Result<ChainSpec, String> {\n\n\tlet wasm_binary = WASM_BINARY.ok_or_else(|| \"Development wasm not available\".to_string())?;\n\n\n\n\tOk(ChainSpec::from_genesis(\n\n\t\t// Name\n\n\t\t\"Local Testnet\",\n\n\t\t// ID\n\n\t\t\"local_testnet\",\n\n\t\tChainType::Local,\n\n\t\tmove || {\n\n\t\t\ttestnet_genesis(\n\n\t\t\t\twasm_binary,\n\n\t\t\t\t// Initial PoA authorities\n\n\t\t\t\tvec![authority_keys_from_seed(\"Alice\"), authority_keys_from_seed(\"Bob\")],\n\n\t\t\t\t// Sudo account\n\n\t\t\t\tget_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n\t\t\t\t// Pre-funded accounts\n\n\t\t\t\tvec![\n\n\t\t\t\t\tget_account_id_from_seed::<sr25519::Public>(\"Alice\"),\n\n\t\t\t\t\tget_account_id_from_seed::<sr25519::Public>(\"Bob\"),\n", "file_path": "node/src/chain_spec.rs", "rank": 8, "score": 73146.07738687436 }, { "content": "/// Builds a new service for a full client.\n\npub fn new_full(mut config: Configuration) -> Result<TaskManager, ServiceError> {\n\n\tlet sc_service::PartialComponents {\n\n\t\tclient,\n\n\t\tbackend,\n\n\t\tmut task_manager,\n\n\t\timport_queue,\n\n\t\tmut keystore_container,\n\n\t\tselect_chain,\n\n\t\ttransaction_pool,\n\n\t\tother: (block_import, grandpa_link, mut telemetry),\n\n\t} = new_partial(&config)?;\n\n\n\n\tif let Some(url) = &config.keystore_remote {\n\n\t\tmatch remote_keystore(url) {\n\n\t\t\tOk(k) => keystore_container.set_remote_keystore(k),\n\n\t\t\tErr(e) =>\n\n\t\t\t\treturn Err(ServiceError::Other(format!(\n\n\t\t\t\t\t\"Error hooking up remote keystore for {}: {}\",\n\n\t\t\t\t\turl, e\n\n\t\t\t\t))),\n", "file_path": "node/src/service.rs", "rank": 9, "score": 72801.51361958295 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn native_version() -> NativeVersion {\n\n\tNativeVersion { runtime_version: VERSION, can_author_with: Default::default() }\n\n}\n\n\n\nconst NORMAL_DISPATCH_RATIO: Perbill = Perbill::from_percent(75);\n\n\n\nparameter_types! {\n\n\tpub const Version: RuntimeVersion = VERSION;\n\n\tpub const BlockHashCount: BlockNumber = 2400;\n\n\t/// We allow for 2 seconds of compute with a 6 second average block time.\n\n\tpub BlockWeights: frame_system::limits::BlockWeights = frame_system::limits::BlockWeights\n\n\t\t::with_sensible_defaults(2 * WEIGHT_PER_SECOND, NORMAL_DISPATCH_RATIO);\n\n\tpub BlockLength: frame_system::limits::BlockLength = frame_system::limits::BlockLength\n\n\t\t::max_with_normal_ratio(5 * 1024 * 1024, NORMAL_DISPATCH_RATIO);\n\n\tpub const SS58Prefix: u8 = 42;\n\n}\n\n\n\n// Configure FRAME pallets to include in runtime.\n\n\n\nimpl frame_system::Config for Runtime {\n", "file_path": "runtime/src/lib.rs", "rank": 10, "score": 67006.17788289771 }, { "content": "/// Parse and run command line arguments\n\npub fn run() -> sc_cli::Result<()> {\n\n\tlet cli = Cli::from_args();\n\n\n\n\tmatch &cli.subcommand {\n\n\t\tSome(Subcommand::Key(cmd)) => cmd.run(&cli),\n\n\t\tSome(Subcommand::BuildSpec(cmd)) => {\n\n\t\t\tlet runner = cli.create_runner(cmd)?;\n\n\t\t\trunner.sync_run(|config| cmd.run(config.chain_spec, config.network))\n\n\t\t},\n\n\t\tSome(Subcommand::CheckBlock(cmd)) => {\n\n\t\t\tlet runner = cli.create_runner(cmd)?;\n\n\t\t\trunner.async_run(|config| {\n\n\t\t\t\tlet PartialComponents { client, task_manager, import_queue, .. } =\n\n\t\t\t\t\tservice::new_partial(&config)?;\n\n\t\t\t\tOk((cmd.run(client, import_queue), task_manager))\n\n\t\t\t})\n\n\t\t},\n\n\t\tSome(Subcommand::ExportBlocks(cmd)) => {\n\n\t\t\tlet runner = cli.create_runner(cmd)?;\n\n\t\t\trunner.async_run(|config| {\n", "file_path": "node/src/command.rs", "rank": 11, "score": 63721.44210459557 }, { "content": "/// Generate an Aura authority key.\n\npub fn authority_keys_from_seed(s: &str) -> (AuraId, GrandpaId) {\n\n\t(get_from_seed::<AuraId>(s), get_from_seed::<GrandpaId>(s))\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 12, "score": 52158.516161829146 }, { "content": "//! Benchmarking setup for pallet-template\n\n\n\nuse super::*;\n\n\n\n#[allow(unused)]\n\nuse crate::Pallet as Template;\n\nuse frame_benchmarking::{benchmarks, whitelisted_caller};\n\nuse frame_system::RawOrigin;\n\n\n\nbenchmarks! {\n\n\tdo_something {\n\n\t\tlet s in 0 .. 100;\n\n\t\tlet caller: T::AccountId = whitelisted_caller();\n\n\t}: _(RawOrigin::Signed(caller), s)\n\n\tverify {\n\n\t\tassert_eq!(Something::<T>::get(), Some(s));\n\n\t}\n\n\n\n\timpl_benchmark_test_suite!(Template, crate::mock::new_test_ext(), crate::mock::Test);\n\n}\n", "file_path": "pallets/template/src/benchmarking.rs", "rank": 13, "score": 49077.35569447441 }, { "content": "\ttype Call = Call;\n\n\ttype Index = u64;\n\n\ttype BlockNumber = u64;\n\n\ttype Hash = H256;\n\n\ttype Hashing = BlakeTwo256;\n\n\ttype AccountId = u64;\n\n\ttype Lookup = IdentityLookup<Self::AccountId>;\n\n\ttype Header = Header;\n\n\ttype Event = Event;\n\n\ttype BlockHashCount = ConstU64<250>;\n\n\ttype Version = ();\n\n\ttype PalletInfo = PalletInfo;\n\n\ttype AccountData = ();\n\n\ttype OnNewAccount = ();\n\n\ttype OnKilledAccount = ();\n\n\ttype SystemWeightInfo = ();\n\n\ttype SS58Prefix = ConstU16<42>;\n\n\ttype OnSetCode = ();\n\n\ttype MaxConsumers = frame_support::traits::ConstU32<16>;\n\n}\n\n\n\nimpl pallet_template::Config for Test {\n\n\ttype Event = Event;\n\n}\n\n\n", "file_path": "pallets/template/src/mock.rs", "rank": 14, "score": 48849.60195536802 }, { "content": "use crate as pallet_template;\n\nuse frame_support::traits::{ConstU16, ConstU64};\n\nuse frame_system as system;\n\nuse sp_core::H256;\n\nuse sp_runtime::{\n\n\ttesting::Header,\n\n\ttraits::{BlakeTwo256, IdentityLookup},\n\n};\n\n\n", "file_path": "pallets/template/src/mock.rs", "rank": 15, "score": 48847.67504532963 }, { "content": "use crate::{mock::*, Error};\n\nuse frame_support::{assert_noop, assert_ok};\n\n\n\n#[test]\n", "file_path": "pallets/template/src/tests.rs", "rank": 16, "score": 48790.022979682944 }, { "content": "/// Generate an account ID from seed.\n\npub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId\n\nwhere\n\n\tAccountPublic: From<<TPublic::Pair as Pair>::Public>,\n\n{\n\n\tAccountPublic::from(get_from_seed::<TPublic>(seed)).into_account()\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 17, "score": 47345.56582318802 }, { "content": "fn main() {\n\n\tWasmBuilder::new()\n\n\t\t.with_current_project()\n\n\t\t.export_heap_base()\n\n\t\t.import_memory()\n\n\t\t.build()\n\n}\n", "file_path": "runtime/build.rs", "rank": 18, "score": 43534.42592224185 }, { "content": "fn main() {\n\n\tgenerate_cargo_keys();\n\n\n\n\trerun_if_git_head_changed();\n\n}\n", "file_path": "node/build.rs", "rank": 19, "score": 43534.42592224185 }, { "content": "type FullClient =\n\n\tsc_service::TFullClient<Block, RuntimeApi, NativeElseWasmExecutor<ExecutorDispatch>>;\n", "file_path": "node/src/service.rs", "rank": 20, "score": 42836.6042965207 }, { "content": "/// Generate a crypto pair from seed.\n\npub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Public {\n\n\tTPublic::Pair::from_string(&format!(\"//{}\", seed), None)\n\n\t\t.expect(\"static values are valid; qed\")\n\n\t\t.public()\n\n}\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 21, "score": 41911.69773671992 }, { "content": "/// Configure initial storage state for FRAME modules.\n\nfn testnet_genesis(\n\n\twasm_binary: &[u8],\n\n\tinitial_authorities: Vec<(AuraId, GrandpaId)>,\n\n\troot_key: AccountId,\n\n\tendowed_accounts: Vec<AccountId>,\n\n\t_enable_println: bool,\n\n) -> GenesisConfig {\n\n\tGenesisConfig {\n\n\t\tsystem: SystemConfig {\n\n\t\t\t// Add Wasm runtime to storage.\n\n\t\t\tcode: wasm_binary.to_vec(),\n\n\t\t},\n\n\t\tbalances: BalancesConfig {\n\n\t\t\t// Configure endowed accounts with initial balance of 1 << 60.\n\n\t\t\tbalances: endowed_accounts.iter().cloned().map(|k| (k, 1 << 60)).collect(),\n\n\t\t},\n\n\t\taura: AuraConfig {\n\n\t\t\tauthorities: initial_authorities.iter().map(|x| (x.0.clone())).collect(),\n\n\t\t},\n\n\t\tgrandpa: GrandpaConfig {\n\n\t\t\tauthorities: initial_authorities.iter().map(|x| (x.1.clone(), 1)).collect(),\n\n\t\t},\n\n\t\tsudo: SudoConfig {\n\n\t\t\t// Assign network admin rights.\n\n\t\t\tkey: Some(root_key),\n\n\t\t},\n\n\t\ttransaction_payment: Default::default(),\n\n\t}\n\n}\n", "file_path": "node/src/chain_spec.rs", "rank": 22, "score": 40094.83200076279 }, { "content": "/// Instantiate all full RPC extensions.\n\npub fn create_full<C, P>(deps: FullDeps<C, P>) -> jsonrpc_core::IoHandler<sc_rpc::Metadata>\n\nwhere\n\n\tC: ProvideRuntimeApi<Block>,\n\n\tC: HeaderBackend<Block> + HeaderMetadata<Block, Error = BlockChainError> + 'static,\n\n\tC: Send + Sync + 'static,\n\n\tC::Api: substrate_frame_rpc_system::AccountNonceApi<Block, AccountId, Index>,\n\n\tC::Api: pallet_transaction_payment_rpc::TransactionPaymentRuntimeApi<Block, Balance>,\n\n\tC::Api: BlockBuilder<Block>,\n\n\tP: TransactionPool + 'static,\n\n{\n\n\tuse pallet_transaction_payment_rpc::{TransactionPayment, TransactionPaymentApi};\n\n\tuse substrate_frame_rpc_system::{FullSystem, SystemApi};\n\n\n\n\tlet mut io = jsonrpc_core::IoHandler::default();\n\n\tlet FullDeps { client, pool, deny_unsafe } = deps;\n\n\n\n\tio.extend_with(SystemApi::to_delegate(FullSystem::new(client.clone(), pool, deny_unsafe)));\n\n\n\n\tio.extend_with(TransactionPaymentApi::to_delegate(TransactionPayment::new(client.clone())));\n\n\n\n\t// Extend this RPC with a custom API by using the following syntax.\n\n\t// `YourRpcStruct` should have a reference to a client, which is needed\n\n\t// to call into the runtime.\n\n\t// `io.extend_with(YourRpcTrait::to_delegate(YourRpcStruct::new(ReferenceToClient, ...)));`\n\n\n\n\tio\n\n}\n", "file_path": "node/src/rpc.rs", "rank": 23, "score": 38689.829524506415 }, { "content": "fn main() -> sc_cli::Result<()> {\n\n\tcommand::run()\n\n}\n", "file_path": "node/src/main.rs", "rank": 24, "score": 34022.995492032234 }, { "content": "type AccountPublic = <Signature as Verify>::Signer;\n\n\n", "file_path": "node/src/chain_spec.rs", "rank": 25, "score": 30620.76991963465 }, { "content": "type FullBackend = sc_service::TFullBackend<Block>;\n", "file_path": "node/src/service.rs", "rank": 26, "score": 30491.6288691304 }, { "content": "type FullSelectChain = sc_consensus::LongestChain<FullBackend, Block>;\n\n\n", "file_path": "node/src/service.rs", "rank": 27, "score": 27896.39767371067 }, { "content": "fn remote_keystore(_url: &String) -> Result<Arc<LocalKeystore>, &'static str> {\n\n\t// FIXME: here would the concrete keystore be built,\n\n\t// must return a concrete type (NOT `LocalKeystore`) that\n\n\t// implements `CryptoStore` and `SyncCryptoStore`\n\n\tErr(\"Remote Keystore not supported.\")\n\n}\n\n\n", "file_path": "node/src/service.rs", "rank": 28, "score": 24290.87321550098 }, { "content": "License: Unlicense", "file_path": "pallets/template/README.md", "rank": 35, "score": 15053.37791992208 }, { "content": "use sp_std::prelude::*;\n\n#[cfg(feature = \"std\")]\n\nuse sp_version::NativeVersion;\n\nuse sp_version::RuntimeVersion;\n\n\n\n// A few exports that help ease life for downstream crates.\n\npub use frame_support::{\n\n\tconstruct_runtime, parameter_types,\n\n\ttraits::{ConstU128, ConstU32, ConstU8, KeyOwnerProofSystem, Randomness, StorageInfo},\n\n\tweights::{\n\n\t\tconstants::{BlockExecutionWeight, ExtrinsicBaseWeight, RocksDbWeight, WEIGHT_PER_SECOND},\n\n\t\tIdentityFee, Weight,\n\n\t},\n\n\tStorageValue,\n\n};\n\npub use pallet_balances::Call as BalancesCall;\n\npub use pallet_timestamp::Call as TimestampCall;\n\nuse pallet_transaction_payment::CurrencyAdapter;\n\n#[cfg(any(feature = \"std\", test))]\n\npub use sp_runtime::BuildStorage;\n", "file_path": "runtime/src/lib.rs", "rank": 36, "score": 17.54129142192193 }, { "content": "\t/// The data to be stored in an account.\n\n\ttype AccountData = pallet_balances::AccountData<Balance>;\n\n\t/// Weight information for the extrinsics of this pallet.\n\n\ttype SystemWeightInfo = ();\n\n\t/// This is used as an identifier of the chain. 42 is the generic substrate prefix.\n\n\ttype SS58Prefix = SS58Prefix;\n\n\t/// The set code logic, just the default since we're not a parachain.\n\n\ttype OnSetCode = ();\n\n\ttype MaxConsumers = frame_support::traits::ConstU32<16>;\n\n}\n\n\n\nimpl pallet_randomness_collective_flip::Config for Runtime {}\n\n\n\nimpl pallet_aura::Config for Runtime {\n\n\ttype AuthorityId = AuraId;\n\n\ttype DisabledValidators = ();\n\n\ttype MaxAuthorities = ConstU32<32>;\n\n}\n\n\n\nimpl pallet_grandpa::Config for Runtime {\n", "file_path": "runtime/src/lib.rs", "rank": 37, "score": 13.614168311310445 }, { "content": "\ttype ExistentialDeposit = ConstU128<500>;\n\n\ttype AccountStore = System;\n\n\ttype WeightInfo = pallet_balances::weights::SubstrateWeight<Runtime>;\n\n}\n\n\n\nparameter_types! {\n\n\tpub const TransactionByteFee: Balance = 1;\n\n}\n\n\n\nimpl pallet_transaction_payment::Config for Runtime {\n\n\ttype OnChargeTransaction = CurrencyAdapter<Balances, ()>;\n\n\ttype TransactionByteFee = TransactionByteFee;\n\n\ttype OperationalFeeMultiplier = ConstU8<5>;\n\n\ttype WeightToFee = IdentityFee<Balance>;\n\n\ttype FeeMultiplierUpdate = ();\n\n}\n\n\n\nimpl pallet_sudo::Config for Runtime {\n\n\ttype Event = Event;\n\n\ttype Call = Call;\n", "file_path": "runtime/src/lib.rs", "rank": 38, "score": 13.60054194390754 }, { "content": "\t\tfn dispatch_benchmark(\n\n\t\t\tconfig: frame_benchmarking::BenchmarkConfig\n\n\t\t) -> Result<Vec<frame_benchmarking::BenchmarkBatch>, sp_runtime::RuntimeString> {\n\n\t\t\tuse frame_benchmarking::{baseline, Benchmarking, BenchmarkBatch, TrackedStorageKey};\n\n\n\n\t\t\tuse frame_system_benchmarking::Pallet as SystemBench;\n\n\t\t\tuse baseline::Pallet as BaselineBench;\n\n\n\n\t\t\timpl frame_system_benchmarking::Config for Runtime {}\n\n\t\t\timpl baseline::Config for Runtime {}\n\n\n\n\t\t\tlet whitelist: Vec<TrackedStorageKey> = vec![\n\n\t\t\t\t// Block Number\n\n\t\t\t\thex_literal::hex!(\"26aa394eea5630e07c48ae0c9558cef702a5c1b19ab7a04f536c519aca4983ac\").to_vec().into(),\n\n\t\t\t\t// Total Issuance\n\n\t\t\t\thex_literal::hex!(\"c2261276cc9d1f8598ea4b6a74b15c2f57c875e4cff74148e4628f264b974c80\").to_vec().into(),\n\n\t\t\t\t// Execution Phase\n\n\t\t\t\thex_literal::hex!(\"26aa394eea5630e07c48ae0c9558cef7ff553b5a9862a516939d82b3d3d8661a\").to_vec().into(),\n\n\t\t\t\t// Event Count\n\n\t\t\t\thex_literal::hex!(\"26aa394eea5630e07c48ae0c9558cef70a98fdbe9ce6c55837576c60c7af3850\").to_vec().into(),\n", "file_path": "runtime/src/lib.rs", "rank": 39, "score": 13.101152536555128 }, { "content": "\n\n\t#[cfg(feature = \"runtime-benchmarks\")]\n\n\timpl frame_benchmarking::Benchmark<Block> for Runtime {\n\n\t\tfn benchmark_metadata(extra: bool) -> (\n\n\t\t\tVec<frame_benchmarking::BenchmarkList>,\n\n\t\t\tVec<frame_support::traits::StorageInfo>,\n\n\t\t) {\n\n\t\t\tuse frame_benchmarking::{baseline, Benchmarking, BenchmarkList};\n\n\t\t\tuse frame_support::traits::StorageInfoTrait;\n\n\t\t\tuse frame_system_benchmarking::Pallet as SystemBench;\n\n\t\t\tuse baseline::Pallet as BaselineBench;\n\n\n\n\t\t\tlet mut list = Vec::<BenchmarkList>::new();\n\n\t\t\tlist_benchmarks!(list, extra);\n\n\n\n\t\t\tlet storage_info = AllPalletsWithSystem::storage_info();\n\n\n\n\t\t\treturn (list, storage_info)\n\n\t\t}\n\n\n", "file_path": "runtime/src/lib.rs", "rank": 40, "score": 12.647697812664044 }, { "content": "\tpub const MinimumPeriod: u64 = SLOT_DURATION / 2;\n\n}\n\n\n\nimpl pallet_timestamp::Config for Runtime {\n\n\t/// A timestamp: milliseconds since the unix epoch.\n\n\ttype Moment = u64;\n\n\ttype OnTimestampSet = Aura;\n\n\ttype MinimumPeriod = MinimumPeriod;\n\n\ttype WeightInfo = ();\n\n}\n\n\n\nimpl pallet_balances::Config for Runtime {\n\n\ttype MaxLocks = ConstU32<50>;\n\n\ttype MaxReserves = ();\n\n\ttype ReserveIdentifier = [u8; 8];\n\n\t/// The type for recording an account's balance.\n\n\ttype Balance = Balance;\n\n\t/// The ubiquitous event type.\n\n\ttype Event = Event;\n\n\ttype DustRemoval = ();\n", "file_path": "runtime/src/lib.rs", "rank": 41, "score": 12.331130151543242 }, { "content": "pub use sp_runtime::{Perbill, Permill};\n\n\n\n/// Import the template pallet.\n\npub use pallet_template;\n\n\n\n/// An index to a block.\n\npub type BlockNumber = u32;\n\n\n\n/// Alias to 512-bit hash when used in the context of a transaction signature on the chain.\n\npub type Signature = MultiSignature;\n\n\n\n/// Some way of identifying an account on the chain. We intentionally make it equivalent\n\n/// to the public key of our transaction signing scheme.\n\npub type AccountId = <<Signature as Verify>::Signer as IdentifyAccount>::AccountId;\n\n\n\n/// Balance of an account.\n\npub type Balance = u128;\n\n\n\n/// Index of a transaction in the chain.\n\npub type Index = u32;\n", "file_path": "runtime/src/lib.rs", "rank": 42, "score": 11.51283454073197 }, { "content": "use node_template_runtime::{\n\n\tAccountId, AuraConfig, BalancesConfig, GenesisConfig, GrandpaConfig, Signature, SudoConfig,\n\n\tSystemConfig, WASM_BINARY,\n\n};\n\nuse sc_service::ChainType;\n\nuse sp_consensus_aura::sr25519::AuthorityId as AuraId;\n\nuse sp_core::{sr25519, Pair, Public};\n\nuse sp_finality_grandpa::AuthorityId as GrandpaId;\n\nuse sp_runtime::traits::{IdentifyAccount, Verify};\n\n\n\n// The URL for the telemetry server.\n\n// const STAGING_TELEMETRY_URL: &str = \"wss://telemetry.polkadot.io/submit/\";\n\n\n\n/// Specialized `ChainSpec`. This is a specialization of the general Substrate ChainSpec type.\n\npub type ChainSpec = sc_service::GenericChainSpec<GenesisConfig>;\n\n\n\n/// Generate a crypto pair from seed.\n", "file_path": "node/src/chain_spec.rs", "rank": 43, "score": 11.01101721661497 }, { "content": "}\n\n\n\n/// Configure the pallet-template in pallets/template.\n\nimpl pallet_template::Config for Runtime {\n\n\ttype Event = Event;\n\n}\n\n\n\n// Create the runtime by composing the FRAME pallets that were previously configured.\n\nconstruct_runtime!(\n\n\tpub enum Runtime where\n\n\t\tBlock = Block,\n\n\t\tNodeBlock = opaque::Block,\n\n\t\tUncheckedExtrinsic = UncheckedExtrinsic\n\n\t{\n\n\t\tSystem: frame_system,\n\n\t\tRandomnessCollectiveFlip: pallet_randomness_collective_flip,\n\n\t\tTimestamp: pallet_timestamp,\n\n\t\tAura: pallet_aura,\n\n\t\tGrandpa: pallet_grandpa,\n\n\t\tBalances: pallet_balances,\n", "file_path": "runtime/src/lib.rs", "rank": 44, "score": 10.1580623833524 }, { "content": "#![cfg_attr(not(feature = \"std\"), no_std)]\n\n// `construct_runtime!` does a lot of recursion and requires us to increase the limit to 256.\n\n#![recursion_limit = \"256\"]\n\n\n\n// Make the WASM binary available.\n\n#[cfg(feature = \"std\")]\n\ninclude!(concat!(env!(\"OUT_DIR\"), \"/wasm_binary.rs\"));\n\n\n\nuse pallet_grandpa::{\n\n\tfg_primitives, AuthorityId as GrandpaId, AuthorityList as GrandpaAuthorityList,\n\n};\n\nuse sp_api::impl_runtime_apis;\n\nuse sp_consensus_aura::sr25519::AuthorityId as AuraId;\n\nuse sp_core::{crypto::KeyTypeId, OpaqueMetadata};\n\nuse sp_runtime::{\n\n\tcreate_runtime_str, generic, impl_opaque_keys,\n\n\ttraits::{AccountIdLookup, BlakeTwo256, Block as BlockT, IdentifyAccount, NumberFor, Verify},\n\n\ttransaction_validity::{TransactionSource, TransactionValidity},\n\n\tApplyExtrinsicResult, MultiSignature,\n\n};\n", "file_path": "runtime/src/lib.rs", "rank": 45, "score": 9.754868056129919 }, { "content": "### Pallets\n\n\n\nThe runtime in this project is constructed using many FRAME pallets that ship with the\n\n[core Substrate repository](https://github.com/paritytech/substrate/tree/master/frame) and a\n\ntemplate pallet that is [defined in the `pallets`](./pallets/template/src/lib.rs) directory.\n\n\n\nA FRAME pallet is compromised of a number of blockchain primitives:\n\n\n\n- Storage: FRAME defines a rich set of powerful\n\n [storage abstractions](https://docs.substrate.io/v3/runtime/storage) that makes\n\n it easy to use Substrate's efficient key-value database to manage the evolving state of a\n\n blockchain.\n\n- Dispatchables: FRAME pallets define special types of functions that can be invoked (dispatched)\n\n from outside of the runtime in order to update its state.\n\n- Events: Substrate uses [events and errors](https://docs.substrate.io/v3/runtime/events-and-errors)\n\n to notify users of important changes in the runtime.\n\n- Errors: When a dispatchable fails, it returns an error.\n\n- Config: The `Config` configuration interface is used to define the types and parameters upon\n\n which a FRAME pallet depends.\n\n\n\n### Run in Docker\n\n\n\nFirst, install [Docker](https://docs.docker.com/get-docker/) and\n\n[Docker Compose](https://docs.docker.com/compose/install/).\n\n\n\nThen run the following command to start a single node development chain.\n\n\n\n```bash\n\n./scripts/docker_run.sh\n\n```\n\n\n\nThis command will firstly compile your code, and then start a local development network. You can\n\nalso replace the default command\n\n(`cargo build --release && ./target/release/node-template --dev --ws-external`)\n\nby appending your own. A few useful ones are as follow.\n\n\n\n```bash\n\n# Run Substrate node without re-compiling\n\n./scripts/docker_run.sh ./target/release/node-template --dev --ws-external\n\n\n\n# Purge the local dev chain\n\n./scripts/docker_run.sh ./target/release/node-template purge-chain --dev\n\n\n\n# Check whether the code is compilable\n\n./scripts/docker_run.sh cargo check\n\n```\n", "file_path": "README.md", "rank": 46, "score": 9.729288311957081 }, { "content": "\tframe_system::CheckNonce<Runtime>,\n\n\tframe_system::CheckWeight<Runtime>,\n\n\tpallet_transaction_payment::ChargeTransactionPayment<Runtime>,\n\n);\n\n/// Unchecked extrinsic type as expected by this runtime.\n\npub type UncheckedExtrinsic = generic::UncheckedExtrinsic<Address, Call, Signature, SignedExtra>;\n\n/// Executive: handles dispatch to the various modules.\n\npub type Executive = frame_executive::Executive<\n\n\tRuntime,\n\n\tBlock,\n\n\tframe_system::ChainContext<Runtime>,\n\n\tRuntime,\n\n\tAllPalletsWithSystem,\n\n>;\n\n\n\n#[cfg(feature = \"runtime-benchmarks\")]\n\n#[macro_use]\n\nextern crate frame_benchmarking;\n\n\n\n#[cfg(feature = \"runtime-benchmarks\")]\n", "file_path": "runtime/src/lib.rs", "rank": 47, "score": 9.623708933771375 }, { "content": "//! Service and ServiceFactory implementation. Specialized wrapper over substrate service.\n\n\n\nuse node_template_runtime::{self, opaque::Block, RuntimeApi};\n\nuse sc_client_api::{BlockBackend, ExecutorProvider};\n\nuse sc_consensus_aura::{ImportQueueParams, SlotProportion, StartAuraParams};\n\npub use sc_executor::NativeElseWasmExecutor;\n\nuse sc_finality_grandpa::SharedVoterState;\n\nuse sc_keystore::LocalKeystore;\n\nuse sc_service::{error::Error as ServiceError, Configuration, TaskManager};\n\nuse sc_telemetry::{Telemetry, TelemetryWorker};\n\nuse sp_consensus::SlotData;\n\nuse sp_consensus_aura::sr25519::AuthorityPair as AuraPair;\n\nuse std::{sync::Arc, time::Duration};\n\n\n\n// Our native executor instance.\n\npub struct ExecutorDispatch;\n\n\n\nimpl sc_executor::NativeExecutionDispatch for ExecutorDispatch {\n\n\t/// Only enable the benchmarking host functions when we actually want to benchmark.\n\n\t#[cfg(feature = \"runtime-benchmarks\")]\n", "file_path": "node/src/service.rs", "rank": 48, "score": 9.153483931975149 }, { "content": "\t\t\t\t// System Events\n\n\t\t\t\thex_literal::hex!(\"26aa394eea5630e07c48ae0c9558cef780d41e5e16056765bc8461851072c9d7\").to_vec().into(),\n\n\t\t\t];\n\n\n\n\t\t\tlet mut batches = Vec::<BenchmarkBatch>::new();\n\n\t\t\tlet params = (&config, &whitelist);\n\n\t\t\tadd_benchmarks!(params, batches);\n\n\n\n\t\t\tOk(batches)\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "runtime/src/lib.rs", "rank": 49, "score": 8.405505312665426 }, { "content": "mod benches {\n\n\tdefine_benchmarks!(\n\n\t\t[frame_benchmarking, BaselineBench::<Runtime>]\n\n\t\t[frame_system, SystemBench::<Runtime>]\n\n\t\t[pallet_balances, Balances]\n\n\t\t[pallet_timestamp, Timestamp]\n\n\t\t[pallet_template, TemplateModule]\n\n\t);\n\n}\n\n\n\nimpl_runtime_apis! {\n\n\timpl sp_api::Core<Block> for Runtime {\n\n\t\tfn version() -> RuntimeVersion {\n\n\t\t\tVERSION\n\n\t\t}\n\n\n\n\t\tfn execute_block(block: Block) {\n\n\t\t\tExecutive::execute_block(block);\n\n\t\t}\n\n\n", "file_path": "runtime/src/lib.rs", "rank": 50, "score": 8.118150412162013 }, { "content": "\n\n/// A hash of some data used by the chain.\n\npub type Hash = sp_core::H256;\n\n\n\n/// Opaque types. These are used by the CLI to instantiate machinery that don't need to know\n\n/// the specifics of the runtime. They can then be made to be agnostic over specific formats\n\n/// of data like extrinsics, allowing for them to continue syncing the network through upgrades\n\n/// to even the core data structures.\n\npub mod opaque {\n\n\tuse super::*;\n\n\n\n\tpub use sp_runtime::OpaqueExtrinsic as UncheckedExtrinsic;\n\n\n\n\t/// Opaque block header type.\n\n\tpub type Header = generic::Header<BlockNumber, BlakeTwo256>;\n\n\t/// Opaque block type.\n\n\tpub type Block = generic::Block<Header, UncheckedExtrinsic>;\n\n\t/// Opaque block identifier type.\n\n\tpub type BlockId = generic::BlockId<Block>;\n\n\n", "file_path": "runtime/src/lib.rs", "rank": 51, "score": 7.944981371261919 }, { "content": "\tspec_version: 100,\n\n\timpl_version: 1,\n\n\tapis: RUNTIME_API_VERSIONS,\n\n\ttransaction_version: 1,\n\n\tstate_version: 1,\n\n};\n\n\n\n/// This determines the average expected block time that we are targeting.\n\n/// Blocks will be produced at a minimum duration defined by `SLOT_DURATION`.\n\n/// `SLOT_DURATION` is picked up by `pallet_timestamp` which is in turn picked\n\n/// up by `pallet_aura` to implement `fn slot_duration()`.\n\n///\n\n/// Change this to adjust the block time.\n\npub const MILLISECS_PER_BLOCK: u64 = 6000;\n\n\n\n// NOTE: Currently it is not possible to change the slot duration after the chain has started.\n\n// Attempting to do so will brick block production.\n\npub const SLOT_DURATION: u64 = MILLISECS_PER_BLOCK;\n\n\n\n// Time is measured by number of blocks.\n\npub const MINUTES: BlockNumber = 60_000 / (MILLISECS_PER_BLOCK as BlockNumber);\n\npub const HOURS: BlockNumber = MINUTES * 60;\n\npub const DAYS: BlockNumber = HOURS * 24;\n\n\n\n/// The version information used to identify this runtime when compiled natively.\n\n#[cfg(feature = \"std\")]\n", "file_path": "runtime/src/lib.rs", "rank": 52, "score": 7.928975039230457 }, { "content": "\ttype Event = Event;\n\n\ttype Call = Call;\n\n\n\n\ttype KeyOwnerProofSystem = ();\n\n\n\n\ttype KeyOwnerProof =\n\n\t\t<Self::KeyOwnerProofSystem as KeyOwnerProofSystem<(KeyTypeId, GrandpaId)>>::Proof;\n\n\n\n\ttype KeyOwnerIdentification = <Self::KeyOwnerProofSystem as KeyOwnerProofSystem<(\n\n\t\tKeyTypeId,\n\n\t\tGrandpaId,\n\n\t)>>::IdentificationTuple;\n\n\n\n\ttype HandleEquivocation = ();\n\n\n\n\ttype WeightInfo = ();\n\n\ttype MaxAuthorities = ConstU32<32>;\n\n}\n\n\n\nparameter_types! {\n", "file_path": "runtime/src/lib.rs", "rank": 53, "score": 7.8059379544888206 }, { "content": "pub mod chain_spec;\n\npub mod rpc;\n\npub mod service;\n", "file_path": "node/src/lib.rs", "rank": 54, "score": 7.628930894863727 }, { "content": "//! A collection of node-specific RPC methods.\n\n//! Substrate provides the `sc-rpc` crate, which defines the core RPC layer\n\n//! used by Substrate nodes. This file extends those RPC definitions with\n\n//! capabilities that are specific to this project's runtime configuration.\n\n\n\n#![warn(missing_docs)]\n\n\n\nuse std::sync::Arc;\n\n\n\nuse node_template_runtime::{opaque::Block, AccountId, Balance, Index};\n\npub use sc_rpc_api::DenyUnsafe;\n\nuse sc_transaction_pool_api::TransactionPool;\n\nuse sp_api::ProvideRuntimeApi;\n\nuse sp_block_builder::BlockBuilder;\n\nuse sp_blockchain::{Error as BlockChainError, HeaderBackend, HeaderMetadata};\n\n\n\n/// Full client dependencies.\n\npub struct FullDeps<C, P> {\n\n\t/// The client instance to use.\n\n\tpub client: Arc<C>,\n\n\t/// Transaction pool instance.\n\n\tpub pool: Arc<P>,\n\n\t/// Whether to deny unsafe calls\n\n\tpub deny_unsafe: DenyUnsafe,\n\n}\n\n\n\n/// Instantiate all full RPC extensions.\n", "file_path": "node/src/rpc.rs", "rank": 55, "score": 7.540758309501529 }, { "content": "\t\t\tlet runner = cli.create_runner(cmd)?;\n\n\t\t\trunner.sync_run(|config| cmd.run(config.database))\n\n\t\t},\n\n\t\tSome(Subcommand::Revert(cmd)) => {\n\n\t\t\tlet runner = cli.create_runner(cmd)?;\n\n\t\t\trunner.async_run(|config| {\n\n\t\t\t\tlet PartialComponents { client, task_manager, backend, .. } =\n\n\t\t\t\t\tservice::new_partial(&config)?;\n\n\t\t\t\tOk((cmd.run(client, backend), task_manager))\n\n\t\t\t})\n\n\t\t},\n\n\t\tSome(Subcommand::Benchmark(cmd)) =>\n\n\t\t\tif cfg!(feature = \"runtime-benchmarks\") {\n\n\t\t\t\tlet runner = cli.create_runner(cmd)?;\n\n\n\n\t\t\t\trunner.sync_run(|config| cmd.run::<Block, service::ExecutorDispatch>(config))\n\n\t\t\t} else {\n\n\t\t\t\tErr(\"Benchmarking wasn't enabled when building the node. You can enable it with \\\n\n\t\t\t\t `--features runtime-benchmarks`.\"\n\n\t\t\t\t\t.into())\n", "file_path": "node/src/command.rs", "rank": 56, "score": 6.445443464272664 }, { "content": "//! Substrate Node Template CLI library.\n\n#![warn(missing_docs)]\n\n\n\nmod chain_spec;\n\n#[macro_use]\n\nmod service;\n\nmod cli;\n\nmod command;\n\nmod rpc;\n\n\n", "file_path": "node/src/main.rs", "rank": 57, "score": 6.425109895740297 }, { "content": "\t/// Validate blocks.\n\n\tCheckBlock(sc_cli::CheckBlockCmd),\n\n\n\n\t/// Export blocks.\n\n\tExportBlocks(sc_cli::ExportBlocksCmd),\n\n\n\n\t/// Export the state of a given block into a chain spec.\n\n\tExportState(sc_cli::ExportStateCmd),\n\n\n\n\t/// Import blocks.\n\n\tImportBlocks(sc_cli::ImportBlocksCmd),\n\n\n\n\t/// Remove the whole chain.\n\n\tPurgeChain(sc_cli::PurgeChainCmd),\n\n\n\n\t/// Revert the chain to a previous state.\n\n\tRevert(sc_cli::RevertCmd),\n\n\n\n\t/// The custom benchmark subcommand benchmarking runtime pallets.\n\n\t#[clap(name = \"benchmark\", about = \"Benchmark runtime pallets.\")]\n\n\tBenchmark(frame_benchmarking_cli::BenchmarkCmd),\n\n}\n", "file_path": "node/src/cli.rs", "rank": 58, "score": 5.8991950513903255 }, { "content": "\t\tTransactionPayment: pallet_transaction_payment,\n\n\t\tSudo: pallet_sudo,\n\n\t\t// Include the custom logic from the pallet-template in the runtime.\n\n\t\tTemplateModule: pallet_template,\n\n\t}\n\n);\n\n\n\n/// The address format for describing accounts.\n\npub type Address = sp_runtime::MultiAddress<AccountId, ()>;\n\n/// Block header type as expected by this runtime.\n\npub type Header = generic::Header<BlockNumber, BlakeTwo256>;\n\n/// Block type as expected by this runtime.\n\npub type Block = generic::Block<Header, UncheckedExtrinsic>;\n\n/// The SignedExtension to the basic transaction logic.\n\npub type SignedExtra = (\n\n\tframe_system::CheckNonZeroSender<Runtime>,\n\n\tframe_system::CheckSpecVersion<Runtime>,\n\n\tframe_system::CheckTxVersion<Runtime>,\n\n\tframe_system::CheckGenesis<Runtime>,\n\n\tframe_system::CheckEra<Runtime>,\n", "file_path": "runtime/src/lib.rs", "rank": 59, "score": 5.828073559180677 }, { "content": "\t/// The basic call filter to use in dispatchable.\n\n\ttype BaseCallFilter = frame_support::traits::Everything;\n\n\t/// Block & extrinsics weights: base values and limits.\n\n\ttype BlockWeights = BlockWeights;\n\n\t/// The maximum length of a block (in bytes).\n\n\ttype BlockLength = BlockLength;\n\n\t/// The identifier used to distinguish between accounts.\n\n\ttype AccountId = AccountId;\n\n\t/// The aggregated dispatch type that is available for extrinsics.\n\n\ttype Call = Call;\n\n\t/// The lookup mechanism to get account ID from whatever is passed in dispatchers.\n\n\ttype Lookup = AccountIdLookup<AccountId, ()>;\n\n\t/// The index type for storing how many extrinsics an account has signed.\n\n\ttype Index = Index;\n\n\t/// The index type for blocks.\n\n\ttype BlockNumber = BlockNumber;\n\n\t/// The type for hashing blocks and tries.\n\n\ttype Hash = Hash;\n\n\t/// The hashing algorithm used.\n\n\ttype Hashing = BlakeTwo256;\n", "file_path": "runtime/src/lib.rs", "rank": 60, "score": 5.820719805103979 }, { "content": "\t/// The header type.\n\n\ttype Header = generic::Header<BlockNumber, BlakeTwo256>;\n\n\t/// The ubiquitous event type.\n\n\ttype Event = Event;\n\n\t/// The ubiquitous origin type.\n\n\ttype Origin = Origin;\n\n\t/// Maximum number of block number to block hash mappings to keep (oldest pruned first).\n\n\ttype BlockHashCount = BlockHashCount;\n\n\t/// The weight of database operations that the runtime can invoke.\n\n\ttype DbWeight = RocksDbWeight;\n\n\t/// Version of the runtime.\n\n\ttype Version = Version;\n\n\t/// Converts a module to the index of the module in `construct_runtime!`.\n\n\t///\n\n\t/// This type is being generated by `construct_runtime!`.\n\n\ttype PalletInfo = PalletInfo;\n\n\t/// What to do if a new account is created.\n\n\ttype OnNewAccount = ();\n\n\t/// What to do if an account is fully reaped from the system.\n\n\ttype OnKilledAccount = ();\n", "file_path": "runtime/src/lib.rs", "rank": 61, "score": 5.310289944038234 }, { "content": "use sc_cli::RunCmd;\n\n\n\n#[derive(Debug, clap::Parser)]\n\npub struct Cli {\n\n\t#[clap(subcommand)]\n\n\tpub subcommand: Option<Subcommand>,\n\n\n\n\t#[clap(flatten)]\n\n\tpub run: RunCmd,\n\n}\n\n\n\n#[derive(Debug, clap::Subcommand)]\n\npub enum Subcommand {\n\n\t/// Key management cli utilities\n\n\t#[clap(subcommand)]\n\n\tKey(sc_cli::KeySubcommand),\n\n\n\n\t/// Build a chain specification.\n\n\tBuildSpec(sc_cli::BuildSpecCmd),\n\n\n", "file_path": "node/src/cli.rs", "rank": 62, "score": 5.212268467770487 }, { "content": "\ttype ExtendHostFunctions = frame_benchmarking::benchmarking::HostFunctions;\n\n\t/// Otherwise we only use the default Substrate host functions.\n\n\t#[cfg(not(feature = \"runtime-benchmarks\"))]\n\n\ttype ExtendHostFunctions = ();\n\n\n\n\tfn dispatch(method: &str, data: &[u8]) -> Option<Vec<u8>> {\n\n\t\tnode_template_runtime::api::dispatch(method, data)\n\n\t}\n\n\n\n\tfn native_version() -> sc_executor::NativeVersion {\n\n\t\tnode_template_runtime::native_version()\n\n\t}\n\n}\n\n\n", "file_path": "node/src/service.rs", "rank": 63, "score": 5.143568250491974 }, { "content": "\timpl_opaque_keys! {\n\n\t\tpub struct SessionKeys {\n\n\t\t\tpub aura: Aura,\n\n\t\t\tpub grandpa: Grandpa,\n\n\t\t}\n\n\t}\n\n}\n\n\n\n// To learn more about runtime versioning and what each of the following value means:\n\n// https://docs.substrate.io/v3/runtime/upgrades#runtime-versioning\n\n#[sp_version::runtime_version]\n\npub const VERSION: RuntimeVersion = RuntimeVersion {\n\n\tspec_name: create_runtime_str!(\"node-template\"),\n\n\timpl_name: create_runtime_str!(\"node-template\"),\n\n\tauthoring_version: 1,\n\n\t// The version of the runtime specification. A full node will not attempt to use its native\n\n\t// runtime in substitute for the on-chain Wasm runtime unless all of `spec_name`,\n\n\t// `spec_version`, and `authoring_version` are the same between Wasm and native.\n\n\t// This value is set to 100 to notify Polkadot-JS App (https://polkadot.js.org/apps) to use\n\n\t// the compatible custom types.\n", "file_path": "runtime/src/lib.rs", "rank": 64, "score": 5.039745897446597 }, { "content": "\timpl frame_system_rpc_runtime_api::AccountNonceApi<Block, AccountId, Index> for Runtime {\n\n\t\tfn account_nonce(account: AccountId) -> Index {\n\n\t\t\tSystem::account_nonce(account)\n\n\t\t}\n\n\t}\n\n\n\n\timpl pallet_transaction_payment_rpc_runtime_api::TransactionPaymentApi<Block, Balance> for Runtime {\n\n\t\tfn query_info(\n\n\t\t\tuxt: <Block as BlockT>::Extrinsic,\n\n\t\t\tlen: u32,\n\n\t\t) -> pallet_transaction_payment_rpc_runtime_api::RuntimeDispatchInfo<Balance> {\n\n\t\t\tTransactionPayment::query_info(uxt, len)\n\n\t\t}\n\n\t\tfn query_fee_details(\n\n\t\t\tuxt: <Block as BlockT>::Extrinsic,\n\n\t\t\tlen: u32,\n\n\t\t) -> pallet_transaction_payment::FeeDetails<Balance> {\n\n\t\t\tTransactionPayment::query_fee_details(uxt, len)\n\n\t\t}\n\n\t}\n", "file_path": "runtime/src/lib.rs", "rank": 65, "score": 4.982092608029516 }, { "content": "### Runtime\n\n\n\nIn Substrate, the terms\n\n\"[runtime](https://docs.substrate.io/v3/getting-started/glossary#runtime)\" and\n\n\"[state transition function](https://docs.substrate.io/v3/getting-started/glossary#state-transition-function-stf)\"\n\nare analogous - they refer to the core logic of the blockchain that is responsible for validating\n\nblocks and executing the state changes they define. The Substrate project in this repository uses\n\nthe [FRAME](https://docs.substrate.io/v3/runtime/frame) framework to construct a\n\nblockchain runtime. FRAME allows runtime developers to declare domain-specific logic in modules\n\ncalled \"pallets\". At the heart of FRAME is a helpful\n\n[macro language](https://docs.substrate.io/v3/runtime/macros) that makes it easy to\n\ncreate pallets and flexibly compose them to create blockchains that can address\n\n[a variety of needs](https://www.substrate.io/substrate-users/).\n\n\n\nReview the [FRAME runtime implementation](./runtime/src/lib.rs) included in this template and note\n\nthe following:\n\n\n\n- This file configures several pallets to include in the runtime. Each pallet configuration is\n\n defined by a code block that begins with `impl $PALLET_NAME::Config for Runtime`.\n\n- The pallets are composed into a single runtime by way of the\n\n [`construct_runtime!`](https://crates.parity.io/frame_support/macro.construct_runtime.html)\n\n macro, which is part of the core\n\n [FRAME Support](https://docs.substrate.io/v3/runtime/frame#support-crate)\n\n library.\n\n\n", "file_path": "README.md", "rank": 66, "score": 4.814007097376857 }, { "content": "There are several files in the `node` directory - take special note of the following:\n\n\n\n- [`chain_spec.rs`](./node/src/chain_spec.rs): A\n\n [chain specification](https://docs.substrate.io/v3/runtime/chain-specs) is a\n\n source code file that defines a Substrate chain's initial (genesis) state. Chain specifications\n\n are useful for development and testing, and critical when architecting the launch of a\n\n production chain. Take note of the `development_config` and `testnet_genesis` functions, which\n\n are used to define the genesis state for the local development chain configuration. These\n\n functions identify some\n\n [well-known accounts](https://docs.substrate.io/v3/tools/subkey#well-known-keys)\n\n and use them to configure the blockchain's initial state.\n\n- [`service.rs`](./node/src/service.rs): This file defines the node implementation. Take note of\n\n the libraries that this file imports and the names of the functions it invokes. In particular,\n\n there are references to consensus-related topics, such as the\n\n [longest chain rule](https://docs.substrate.io/v3/advanced/consensus#longest-chain-rule),\n\n the [Aura](https://docs.substrate.io/v3/advanced/consensus#aura) block authoring\n\n mechanism and the\n\n [GRANDPA](https://docs.substrate.io/v3/advanced/consensus#grandpa) finality\n\n gadget.\n\n\n\nAfter the node has been [built](#build), refer to the embedded documentation to learn more about the\n\ncapabilities and configuration parameters that it exposes:\n\n\n\n```shell\n\n./target/release/node-template --help\n\n```\n\n\n", "file_path": "README.md", "rank": 67, "score": 4.276257538727968 }, { "content": "\n\n\t// if the node isn't actively participating in consensus then it doesn't\n\n\t// need a keystore, regardless of which protocol we use below.\n\n\tlet keystore =\n\n\t\tif role.is_authority() { Some(keystore_container.sync_keystore()) } else { None };\n\n\n\n\tlet grandpa_config = sc_finality_grandpa::Config {\n\n\t\t// FIXME #1578 make this available through chainspec\n\n\t\tgossip_duration: Duration::from_millis(333),\n\n\t\tjustification_period: 512,\n\n\t\tname: Some(name),\n\n\t\tobserver_enabled: false,\n\n\t\tkeystore,\n\n\t\tlocal_role: role,\n\n\t\ttelemetry: telemetry.as_ref().map(|x| x.handle()),\n\n\t\tprotocol_name: grandpa_protocol_name,\n\n\t};\n\n\n\n\tif enable_grandpa {\n\n\t\t// start the full GRANDPA voter\n", "file_path": "node/src/service.rs", "rank": 68, "score": 4.257896413645435 }, { "content": "\t\t// NOTE: non-authorities could run the GRANDPA observer protocol, but at\n\n\t\t// this point the full voter should provide better guarantees of block\n\n\t\t// and vote data availability than the observer. The observer has not\n\n\t\t// been tested extensively yet and having most nodes in a network run it\n\n\t\t// could lead to finality stalls.\n\n\t\tlet grandpa_config = sc_finality_grandpa::GrandpaParams {\n\n\t\t\tconfig: grandpa_config,\n\n\t\t\tlink: grandpa_link,\n\n\t\t\tnetwork,\n\n\t\t\tvoting_rule: sc_finality_grandpa::VotingRulesBuilder::default().build(),\n\n\t\t\tprometheus_registry,\n\n\t\t\tshared_voter_state: SharedVoterState::empty(),\n\n\t\t\ttelemetry: telemetry.as_ref().map(|x| x.handle()),\n\n\t\t};\n\n\n\n\t\t// the GRANDPA voter task is considered infallible, i.e.\n\n\t\t// if it fails we take down the service with it.\n\n\t\ttask_manager.spawn_essential_handle().spawn_blocking(\n\n\t\t\t\"grandpa-voter\",\n\n\t\t\tNone,\n\n\t\t\tsc_finality_grandpa::run_grandpa_voter(grandpa_config)?,\n\n\t\t);\n\n\t}\n\n\n\n\tnetwork_starter.start_network();\n\n\tOk(task_manager)\n\n}\n", "file_path": "node/src/service.rs", "rank": 69, "score": 4.214675905984897 }, { "content": "\t}\n\n\n\n\tfn author() -> String {\n\n\t\tenv!(\"CARGO_PKG_AUTHORS\").into()\n\n\t}\n\n\n\n\tfn support_url() -> String {\n\n\t\t\"support.anonymous.an\".into()\n\n\t}\n\n\n\n\tfn copyright_start_year() -> i32 {\n\n\t\t2017\n\n\t}\n\n\n\n\tfn load_spec(&self, id: &str) -> Result<Box<dyn sc_service::ChainSpec>, String> {\n\n\t\tOk(match id {\n\n\t\t\t\"dev\" => Box::new(chain_spec::development_config()?),\n\n\t\t\t\"\" | \"local\" => Box::new(chain_spec::local_testnet_config()?),\n\n\t\t\tpath =>\n\n\t\t\t\tBox::new(chain_spec::ChainSpec::from_json_file(std::path::PathBuf::from(path))?),\n\n\t\t})\n\n\t}\n\n\n\n\tfn native_runtime_version(_: &Box<dyn ChainSpec>) -> &'static RuntimeVersion {\n\n\t\t&node_template_runtime::VERSION\n\n\t}\n\n}\n\n\n\n/// Parse and run command line arguments\n", "file_path": "node/src/command.rs", "rank": 70, "score": 3.951706629324199 }, { "content": "### Single-Node Development Chain\n\n\n\nThis command will start the single-node development chain with non-persistent state:\n\n\n\n```bash\n\n./target/release/node-template --dev\n\n```\n\n\n\nPurge the development chain's state:\n\n\n\n```bash\n\n./target/release/node-template purge-chain --dev\n\n```\n\n\n\nStart the development chain with detailed logging:\n\n\n\n```bash\n\nRUST_BACKTRACE=1 ./target/release/node-template -ldebug --dev\n\n```\n\n\n\n> Development chain means that the state of our chain will be in a tmp folder while the nodes are\n\n> running. Also, **alice** account will be authority and sudo account as declared in the\n\n> [genesis state](https://github.com/substrate-developer-hub/substrate-node-template/blob/main/node/src/chain_spec.rs#L49).\n\n> At the same time the following accounts will be pre-funded:\n\n> - Alice\n\n> - Bob\n\n> - Alice//stash\n\n> - Bob//stash\n\n\n\nIn case of being interested in maintaining the chain' state between runs a base path must be added\n\nso the db can be stored in the provided folder instead of a temporal one. We could use this folder\n\nto store different chain databases, as a different folder will be created per different chain that\n\nis ran. The following commands shows how to use a newly created folder as our db base path.\n\n\n\n```bash\n\n// Create a folder to use as the db base path\n\n$ mkdir my-chain-state\n\n\n\n// Use of that folder to store the chain state\n\n$ ./target/release/node-template --dev --base-path ./my-chain-state/\n\n\n\n// Check the folder structure created inside the base path after running the chain\n\n$ ls ./my-chain-state\n\nchains\n\n$ ls ./my-chain-state/chains/\n\ndev\n\n$ ls ./my-chain-state/chains/dev\n\ndb keystore network\n\n```\n\n\n\n\n\n### Connect with Polkadot-JS Apps Front-end\n\n\n\nOnce the node template is running locally, you can connect it with **Polkadot-JS Apps** front-end\n\nto interact with your chain. [Click\n\nhere](https://polkadot.js.org/apps/#/explorer?rpc=ws://localhost:9944) connecting the Apps to your\n\nlocal node template.\n\n\n", "file_path": "README.md", "rank": 71, "score": 3.871739193309488 }, { "content": "\t\t\t\tlet PartialComponents { client, task_manager, .. } = service::new_partial(&config)?;\n\n\t\t\t\tOk((cmd.run(client, config.database), task_manager))\n\n\t\t\t})\n\n\t\t},\n\n\t\tSome(Subcommand::ExportState(cmd)) => {\n\n\t\t\tlet runner = cli.create_runner(cmd)?;\n\n\t\t\trunner.async_run(|config| {\n\n\t\t\t\tlet PartialComponents { client, task_manager, .. } = service::new_partial(&config)?;\n\n\t\t\t\tOk((cmd.run(client, config.chain_spec), task_manager))\n\n\t\t\t})\n\n\t\t},\n\n\t\tSome(Subcommand::ImportBlocks(cmd)) => {\n\n\t\t\tlet runner = cli.create_runner(cmd)?;\n\n\t\t\trunner.async_run(|config| {\n\n\t\t\t\tlet PartialComponents { client, task_manager, import_queue, .. } =\n\n\t\t\t\t\tservice::new_partial(&config)?;\n\n\t\t\t\tOk((cmd.run(client, import_queue), task_manager))\n\n\t\t\t})\n\n\t\t},\n\n\t\tSome(Subcommand::PurgeChain(cmd)) => {\n", "file_path": "node/src/command.rs", "rank": 72, "score": 3.348632533716253 }, { "content": "\tServiceError,\n\n> {\n\n\tif config.keystore_remote.is_some() {\n\n\t\treturn Err(ServiceError::Other(\"Remote Keystores are not supported.\".into()))\n\n\t}\n\n\n\n\tlet telemetry = config\n\n\t\t.telemetry_endpoints\n\n\t\t.clone()\n\n\t\t.filter(|x| !x.is_empty())\n\n\t\t.map(|endpoints| -> Result<_, sc_telemetry::Error> {\n\n\t\t\tlet worker = TelemetryWorker::new(16)?;\n\n\t\t\tlet telemetry = worker.handle().new_telemetry(endpoints);\n\n\t\t\tOk((worker, telemetry))\n\n\t\t})\n\n\t\t.transpose()?;\n\n\n\n\tlet executor = NativeElseWasmExecutor::<ExecutorDispatch>::new(\n\n\t\tconfig.wasm_method,\n\n\t\tconfig.default_heap_pages,\n", "file_path": "node/src/service.rs", "rank": 73, "score": 3.209793146329723 }, { "content": "\t\t\tclient: client.clone(),\n\n\t\t\ttransaction_pool: transaction_pool.clone(),\n\n\t\t\tspawn_handle: task_manager.spawn_handle(),\n\n\t\t\timport_queue,\n\n\t\t\tblock_announce_validator_builder: None,\n\n\t\t\twarp_sync: Some(warp_sync),\n\n\t\t})?;\n\n\n\n\tif config.offchain_worker.enabled {\n\n\t\tsc_service::build_offchain_workers(\n\n\t\t\t&config,\n\n\t\t\ttask_manager.spawn_handle(),\n\n\t\t\tclient.clone(),\n\n\t\t\tnetwork.clone(),\n\n\t\t);\n\n\t}\n\n\n\n\tlet role = config.role.clone();\n\n\tlet force_authoring = config.force_authoring;\n\n\tlet backoff_authoring_blocks: Option<()> = None;\n", "file_path": "node/src/service.rs", "rank": 74, "score": 3.1669114784828993 }, { "content": "use crate::{\n\n\tchain_spec,\n\n\tcli::{Cli, Subcommand},\n\n\tservice,\n\n};\n\nuse node_template_runtime::Block;\n\nuse sc_cli::{ChainSpec, RuntimeVersion, SubstrateCli};\n\nuse sc_service::PartialComponents;\n\n\n\nimpl SubstrateCli for Cli {\n\n\tfn impl_name() -> String {\n\n\t\t\"Substrate Node\".into()\n\n\t}\n\n\n\n\tfn impl_version() -> String {\n\n\t\tenv!(\"SUBSTRATE_CLI_IMPL_VERSION\").into()\n\n\t}\n\n\n\n\tfn description() -> String {\n\n\t\tenv!(\"CARGO_PKG_DESCRIPTION\").into()\n", "file_path": "node/src/command.rs", "rank": 75, "score": 3.1576082750174383 }, { "content": "\t\t};\n\n\t}\n\n\tlet grandpa_protocol_name = sc_finality_grandpa::protocol_standard_name(\n\n\t\t&client.block_hash(0).ok().flatten().expect(\"Genesis block exists; qed\"),\n\n\t\t&config.chain_spec,\n\n\t);\n\n\n\n\tconfig\n\n\t\t.network\n\n\t\t.extra_sets\n\n\t\t.push(sc_finality_grandpa::grandpa_peers_set_config(grandpa_protocol_name.clone()));\n\n\tlet warp_sync = Arc::new(sc_finality_grandpa::warp_proof::NetworkProvider::new(\n\n\t\tbackend.clone(),\n\n\t\tgrandpa_link.shared_authority_set().clone(),\n\n\t\tVec::default(),\n\n\t));\n\n\n\n\tlet (network, system_rpc_tx, network_starter) =\n\n\t\tsc_service::build_network(sc_service::BuildNetworkParams {\n\n\t\t\tconfig: &config,\n", "file_path": "node/src/service.rs", "rank": 76, "score": 3.1540854352473184 }, { "content": "\t\t\t},\n\n\t\tNone => {\n\n\t\t\tlet runner = cli.create_runner(&cli.run)?;\n\n\t\t\trunner.run_node_until_exit(|config| async move {\n\n\t\t\t\tservice::new_full(config).map_err(sc_cli::Error::Service)\n\n\t\t\t})\n\n\t\t},\n\n\t}\n\n}\n", "file_path": "node/src/command.rs", "rank": 77, "score": 3.085376404073031 }, { "content": "use substrate_wasm_builder::WasmBuilder;\n\n\n", "file_path": "runtime/build.rs", "rank": 78, "score": 2.9113204190777022 }, { "content": "\t\tconfig.max_runtime_instances,\n\n\t\tconfig.runtime_cache_size,\n\n\t);\n\n\n\n\tlet (client, backend, keystore_container, task_manager) =\n\n\t\tsc_service::new_full_parts::<Block, RuntimeApi, _>(\n\n\t\t\t&config,\n\n\t\t\ttelemetry.as_ref().map(|(_, telemetry)| telemetry.handle()),\n\n\t\t\texecutor,\n\n\t\t)?;\n\n\tlet client = Arc::new(client);\n\n\n\n\tlet telemetry = telemetry.map(|(worker, telemetry)| {\n\n\t\ttask_manager.spawn_handle().spawn(\"telemetry\", None, worker.run());\n\n\t\ttelemetry\n\n\t});\n\n\n\n\tlet select_chain = sc_consensus::LongestChain::new(backend.clone());\n\n\n\n\tlet transaction_pool = sc_transaction_pool::BasicPool::new_full(\n", "file_path": "node/src/service.rs", "rank": 79, "score": 2.8104884695771672 }, { "content": "\tlet name = config.network.node_name.clone();\n\n\tlet enable_grandpa = !config.disable_grandpa;\n\n\tlet prometheus_registry = config.prometheus_registry().cloned();\n\n\n\n\tlet rpc_extensions_builder = {\n\n\t\tlet client = client.clone();\n\n\t\tlet pool = transaction_pool.clone();\n\n\n\n\t\tBox::new(move |deny_unsafe, _| {\n\n\t\t\tlet deps =\n\n\t\t\t\tcrate::rpc::FullDeps { client: client.clone(), pool: pool.clone(), deny_unsafe };\n\n\n\n\t\t\tOk(crate::rpc::create_full(deps))\n\n\t\t})\n\n\t};\n\n\n\n\tlet _rpc_handlers = sc_service::spawn_tasks(sc_service::SpawnTasksParams {\n\n\t\tnetwork: network.clone(),\n\n\t\tclient: client.clone(),\n\n\t\tkeystore: keystore_container.sync_keystore(),\n", "file_path": "node/src/service.rs", "rank": 80, "score": 2.7453894477962404 }, { "content": "\t\tconfig.transaction_pool.clone(),\n\n\t\tconfig.role.is_authority().into(),\n\n\t\tconfig.prometheus_registry(),\n\n\t\ttask_manager.spawn_essential_handle(),\n\n\t\tclient.clone(),\n\n\t);\n\n\n\n\tlet (grandpa_block_import, grandpa_link) = sc_finality_grandpa::block_import(\n\n\t\tclient.clone(),\n\n\t\t&(client.clone() as Arc<_>),\n\n\t\tselect_chain.clone(),\n\n\t\ttelemetry.as_ref().map(|x| x.handle()),\n\n\t)?;\n\n\n\n\tlet slot_duration = sc_consensus_aura::slot_duration(&*client)?.slot_duration();\n\n\n\n\tlet import_queue =\n\n\t\tsc_consensus_aura::import_queue::<AuraPair, _, _, _, _, _, _>(ImportQueueParams {\n\n\t\t\tblock_import: grandpa_block_import.clone(),\n\n\t\t\tjustification_import: Some(Box::new(grandpa_block_import.clone())),\n", "file_path": "node/src/service.rs", "rank": 81, "score": 2.7243548085698963 }, { "content": "use substrate_build_script_utils::{generate_cargo_keys, rerun_if_git_head_changed};\n\n\n", "file_path": "node/build.rs", "rank": 82, "score": 2.706505913456904 }, { "content": "### Specifying nightly version\n\n\n\nUse the `WASM_BUILD_TOOLCHAIN` environment variable to specify the Rust nightly version a Substrate\n\nproject should use for Wasm compilation:\n\n\n\n```bash\n\nWASM_BUILD_TOOLCHAIN=nightly-<yyyy-MM-dd> cargo build --release\n\n```\n\n\n\n> Note that this only builds _the runtime_ with the specified nightly. The rest of project will be\n\n> compiled with **your default toolchain**, i.e. the latest installed stable toolchain.\n\n\n\n### Downgrading Rust nightly\n\n\n\nIf your computer is configured to use the latest Rust nightly and you would like to downgrade to a\n\nspecific nightly version, follow these steps:\n\n\n\n```bash\n\nrustup uninstall nightly\n\nrustup install nightly-<yyyy-MM-dd>\n\nrustup target add wasm32-unknown-unknown --toolchain nightly-<yyyy-MM-dd>\n\n```\n", "file_path": "docs/rust-setup.md", "rank": 83, "score": 2.5932384364927064 }, { "content": "installed targets for active toolchain\n\n--------------------------------------\n\n\n\nwasm32-unknown-unknown\n\nx86_64-unknown-linux-gnu\n\n\n\nactive toolchain\n\n----------------\n\n\n\nstable-x86_64-unknown-linux-gnu (default)\n\nrustc 1.50.0 (cb75ad5db 2021-02-10)\n\n```\n\n\n\nAs you can see above, the default toolchain is stable, and the\n\n`nightly-x86_64-unknown-linux-gnu` toolchain as well as its `wasm32-unknown-unknown` target is installed.\n\nYou also see that `nightly-2020-10-06-x86_64-unknown-linux-gnu` is installed, but is not used unless explicitly defined as illustrated in the [specify your nightly version](#specifying-nightly-version)\n\nsection.\n\n\n\n### WebAssembly compilation\n\n\n\nSubstrate uses [WebAssembly](https://webassembly.org) (Wasm) to produce portable blockchain\n\nruntimes. You will need to configure your Rust compiler to use\n\n[`nightly` builds](https://doc.rust-lang.org/book/appendix-07-nightly-rust.html) to allow you to\n\ncompile Substrate runtime code to the Wasm target.\n\n\n\n> There are upstream issues in Rust that need to be resolved before all of Substrate can use the stable Rust toolchain.\n\n> [This is our tracking issue](https://github.com/paritytech/substrate/issues/1252) if you're curious as to why and how this will be resolved.\n\n\n", "file_path": "docs/rust-setup.md", "rank": 84, "score": 2.5407001426938183 }, { "content": "# Install Homebrew if necessary https://brew.sh/\n\n/bin/bash -c \"$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install.sh)\"\n\n\n\n# Make sure Homebrew is up-to-date, install openssl\n\nbrew update\n\nbrew install openssl\n\n```\n\n\n\n### Windows\n\n\n\n**_PLEASE NOTE:_** Native development of Substrate is _not_ very well supported! It is _highly_\n\nrecommend to use [Windows Subsystem Linux](https://docs.microsoft.com/en-us/windows/wsl/install-win10)\n\n(WSL) and follow the instructions for [Ubuntu/Debian](#ubuntudebian).\n\nPlease refer to the separate\n\n[guide for native Windows development](https://docs.substrate.io/v3/getting-started/windows-users/).\n\n\n\n## Rust developer environment\n\n\n\nThis guide uses <https://rustup.rs> installer and the `rustup` tool to manage the Rust toolchain.\n\nFirst install and configure `rustup`:\n\n\n\n```bash\n\n# Install\n\ncurl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh\n\n# Configure\n\nsource ~/.cargo/env\n\n```\n\n\n\nConfigure the Rust toolchain to default to the latest stable version, add nightly and the nightly wasm target:\n\n\n\n```bash\n\nrustup default stable\n\nrustup update\n\nrustup update nightly\n\nrustup target add wasm32-unknown-unknown --toolchain nightly\n\n```\n\n\n\n## Test your set-up\n\n\n\nNow the best way to ensure that you have successfully prepared a computer for Substrate\n\ndevelopment is to follow the steps in [our first Substrate tutorial](https://docs.substrate.io/tutorials/v3/create-your-first-substrate-chain/).\n\n\n\n## Troubleshooting Substrate builds\n\n\n\nSometimes you can't get the Substrate node template\n\nto compile out of the box. Here are some tips to help you work through that.\n\n\n\n### Rust configuration check\n\n\n\nTo see what Rust toolchain you are presently using, run:\n\n\n\n```bash\n\nrustup show\n\n```\n\n\n\nThis will show something like this (Ubuntu example) output:\n\n\n\n```text\n\nDefault host: x86_64-unknown-linux-gnu\n\nrustup home: /home/user/.rustup\n\n\n\ninstalled toolchains\n\n--------------------\n\n\n\nstable-x86_64-unknown-linux-gnu (default)\n\nnightly-2020-10-06-x86_64-unknown-linux-gnu\n\nnightly-x86_64-unknown-linux-gnu\n\n\n", "file_path": "docs/rust-setup.md", "rank": 85, "score": 2.4971169577625245 }, { "content": "# Substrate Node Template\n\n\n\n[![Try on playground](https://img.shields.io/badge/Playground-Node_Template-brightgreen?logo=Parity%20Substrate)](https://docs.substrate.io/playground/) [![Matrix](https://img.shields.io/matrix/substrate-technical:matrix.org)](https://matrix.to/#/#substrate-technical:matrix.org)\n\n\n\nA fresh FRAME-based [Substrate](https://www.substrate.io/) node, ready for hacking :rocket:\n\n\n\n## Getting Started\n\n\n\nFollow the steps below to get started with the Node Template, or get it up and running right from\n\nyour browser in just a few clicks using\n\nthe [Substrate Playground](https://docs.substrate.io/playground/) :hammer_and_wrench:\n\n\n\n### Using Nix\n\n\n\nInstall [nix](https://nixos.org/) and optionally [direnv](https://github.com/direnv/direnv) and\n\n[lorri](https://github.com/target/lorri) for a fully plug and play experience for setting up the\n\ndevelopment environment. To get all the correct dependencies activate direnv `direnv allow` and\n\nlorri `lorri shell`.\n\n\n\n### Rust Setup\n\n\n\nFirst, complete the [basic Rust setup instructions](./docs/rust-setup.md).\n\n\n\n### Run\n\n\n\nUse Rust's native `cargo` command to build and launch the template node:\n\n\n\n```sh\n\ncargo run --release -- --dev\n\n```\n\n\n\n### Build\n\n\n\nThe `cargo run` command will perform an initial build. Use the following command to build the node\n\nwithout launching it:\n\n\n\n```sh\n\ncargo build --release\n\n```\n\n\n\n### Embedded Docs\n\n\n\nOnce the project has been built, the following command can be used to explore all parameters and\n\nsubcommands:\n\n\n\n```sh\n\n./target/release/node-template -h\n\n```\n\n\n\n## Run\n\n\n\nThe provided `cargo run` command will launch a temporary node and its state will be discarded after\n\nyou terminate the process. After the project has been built, there are other ways to launch the\n\nnode.\n\n\n", "file_path": "README.md", "rank": 86, "score": 2.351632562905455 }, { "content": "#### Latest nightly for Substrate `master`\n\n\n\nDevelopers who are building Substrate _itself_ should always use the latest bug-free versions of\n\nRust stable and nightly. This is because the Substrate codebase follows the tip of Rust nightly,\n\nwhich means that changes in Substrate often depend on upstream changes in the Rust nightly compiler.\n\nTo ensure your Rust compiler is always up to date, you should run:\n\n\n\n```bash\n\nrustup update\n\nrustup update nightly\n\nrustup target add wasm32-unknown-unknown --toolchain nightly\n\n```\n\n\n\n> NOTE: It may be necessary to occasionally rerun `rustup update` if a change in the upstream Substrate\n\n> codebase depends on a new feature of the Rust compiler. When you do this, both your nightly\n\n> and stable toolchains will be pulled to the most recent release, and for nightly, it is\n\n> generally _not_ expected to compile WASM without error (although it very often does).\n\n> Be sure to [specify your nightly version](#specifying-nightly-version) if you get WASM build errors\n\n> from `rustup` and [downgrade nightly as needed](#downgrading-rust-nightly).\n\n\n\n#### Rust nightly toolchain\n\n\n\nIf you want to guarantee that your build works on your computer as you update Rust and other\n\ndependencies, you should use a specific Rust nightly version that is known to be\n\ncompatible with the version of Substrate they are using; this version will vary from project to\n\nproject and different projects may use different mechanisms to communicate this version to\n\ndevelopers. For instance, the Polkadot client specifies this information in its\n\n[release notes](https://github.com/paritytech/polkadot/releases).\n\n\n\n```bash\n\n# Specify the specific nightly toolchain in the date below:\n\nrustup install nightly-<yyyy-MM-dd>\n\n```\n\n\n\n#### Wasm toolchain\n\n\n\nNow, configure the nightly version to work with the Wasm compilation target:\n\n\n\n```bash\n\nrustup target add wasm32-unknown-unknown --toolchain nightly-<yyyy-MM-dd>\n\n```\n\n\n", "file_path": "docs/rust-setup.md", "rank": 87, "score": 2.165323508796724 }, { "content": "\t\t\topaque::SessionKeys::generate(seed)\n\n\t\t}\n\n\n\n\t\tfn decode_session_keys(\n\n\t\t\tencoded: Vec<u8>,\n\n\t\t) -> Option<Vec<(Vec<u8>, KeyTypeId)>> {\n\n\t\t\topaque::SessionKeys::decode_into_raw_public_keys(&encoded)\n\n\t\t}\n\n\t}\n\n\n\n\timpl fg_primitives::GrandpaApi<Block> for Runtime {\n\n\t\tfn grandpa_authorities() -> GrandpaAuthorityList {\n\n\t\t\tGrandpa::grandpa_authorities()\n\n\t\t}\n\n\n\n\t\tfn current_set_id() -> fg_primitives::SetId {\n\n\t\t\tGrandpa::current_set_id()\n\n\t\t}\n\n\n\n\t\tfn submit_report_equivocation_unsigned_extrinsic(\n", "file_path": "runtime/src/lib.rs", "rank": 88, "score": 1.9425321098283512 }, { "content": "\t\ttask_manager: &mut task_manager,\n\n\t\ttransaction_pool: transaction_pool.clone(),\n\n\t\trpc_extensions_builder,\n\n\t\tbackend,\n\n\t\tsystem_rpc_tx,\n\n\t\tconfig,\n\n\t\ttelemetry: telemetry.as_mut(),\n\n\t})?;\n\n\n\n\tif role.is_authority() {\n\n\t\tlet proposer_factory = sc_basic_authorship::ProposerFactory::new(\n\n\t\t\ttask_manager.spawn_handle(),\n\n\t\t\tclient.clone(),\n\n\t\t\ttransaction_pool,\n\n\t\t\tprometheus_registry.as_ref(),\n\n\t\t\ttelemetry.as_ref().map(|x| x.handle()),\n\n\t\t);\n\n\n\n\t\tlet can_author_with =\n\n\t\t\tsp_consensus::CanAuthorWithNativeVersion::new(client.executor().clone());\n", "file_path": "node/src/service.rs", "rank": 89, "score": 1.805598004307861 }, { "content": "### Multi-Node Local Testnet\n\n\n\nIf you want to see the multi-node consensus algorithm in action, refer to our\n\n[Start a Private Network tutorial](https://docs.substrate.io/tutorials/v3/private-network).\n\n\n\n## Template Structure\n\n\n\nA Substrate project such as this consists of a number of components that are spread across a few\n\ndirectories.\n\n\n\n### Node\n\n\n\nA blockchain node is an application that allows users to participate in a blockchain network.\n\nSubstrate-based blockchain nodes expose a number of capabilities:\n\n\n\n- Networking: Substrate nodes use the [`libp2p`](https://libp2p.io/) networking stack to allow the\n\n nodes in the network to communicate with one another.\n\n- Consensus: Blockchains must have a way to come to\n\n [consensus](https://docs.substrate.io/v3/advanced/consensus) on the state of the\n\n network. Substrate makes it possible to supply custom consensus engines and also ships with\n\n several consensus mechanisms that have been built on top of\n\n [Web3 Foundation research](https://research.web3.foundation/en/latest/polkadot/NPoS/index.html).\n\n- RPC Server: A remote procedure call (RPC) server is used to interact with Substrate nodes.\n\n\n", "file_path": "README.md", "rank": 90, "score": 1.7847971893855208 }, { "content": "---\n\ntitle: Installation\n\n---\n\n\n\nThis guide is for reference only, please check the latest information on getting starting with Substrate \n\n[here](https://docs.substrate.io/v3/getting-started/installation/).\n\n\n\nThis page will guide you through the **2 steps** needed to prepare a computer for **Substrate** development.\n\nSince Substrate is built with [the Rust programming language](https://www.rust-lang.org/), the first\n\nthing you will need to do is prepare the computer for Rust development - these steps will vary based\n\non the computer's operating system. Once Rust is configured, you will use its toolchains to interact\n\nwith Rust projects; the commands for Rust's toolchains will be the same for all supported,\n\nUnix-based operating systems.\n\n\n\n## Build dependencies\n\n\n\nSubstrate development is easiest on Unix-based operating systems like macOS or Linux. The examples\n\nin the [Substrate Docs](https://docs.substrate.io) use Unix-style terminals to demonstrate how to\n\ninteract with Substrate from the command line.\n\n\n\n### Ubuntu/Debian\n\n\n\nUse a terminal shell to execute the following commands:\n\n\n\n```bash\n\nsudo apt update\n\n# May prompt for location information\n\nsudo apt install -y git clang curl libssl-dev llvm libudev-dev\n\n```\n\n\n\n### Arch Linux\n\n\n\nRun these commands from a terminal:\n\n\n\n```bash\n\npacman -Syu --needed --noconfirm curl git clang\n\n```\n\n\n\n### Fedora\n\n\n\nRun these commands from a terminal:\n\n\n\n```bash\n\nsudo dnf update\n\nsudo dnf install clang curl git openssl-devel\n\n```\n\n\n\n### OpenSUSE\n\n\n\nRun these commands from a terminal:\n\n\n\n```bash\n\nsudo zypper install clang curl git openssl-devel llvm-devel libudev-devel\n\n```\n\n\n\n### macOS\n\n\n\n> **Apple M1 ARM**\n\n> If you have an Apple M1 ARM system on a chip, make sure that you have Apple Rosetta 2\n\n> installed through `softwareupdate --install-rosetta`. This is only needed to run the\n\n> `protoc` tool during the build. The build itself and the target binaries would remain native.\n\n\n\nOpen the Terminal application and execute the following commands:\n\n\n\n```bash\n", "file_path": "docs/rust-setup.md", "rank": 91, "score": 1.752972520400386 }, { "content": "\t\t\tclient: client.clone(),\n\n\t\t\tcreate_inherent_data_providers: move |_, ()| async move {\n\n\t\t\t\tlet timestamp = sp_timestamp::InherentDataProvider::from_system_time();\n\n\n\n\t\t\t\tlet slot =\n\n\t\t\t\t\tsp_consensus_aura::inherents::InherentDataProvider::from_timestamp_and_duration(\n\n\t\t\t\t\t\t*timestamp,\n\n\t\t\t\t\t\tslot_duration,\n\n\t\t\t\t\t);\n\n\n\n\t\t\t\tOk((timestamp, slot))\n\n\t\t\t},\n\n\t\t\tspawner: &task_manager.spawn_essential_handle(),\n\n\t\t\tcan_author_with: sp_consensus::CanAuthorWithNativeVersion::new(\n\n\t\t\t\tclient.executor().clone(),\n\n\t\t\t),\n\n\t\t\tregistry: config.prometheus_registry(),\n\n\t\t\tcheck_for_equivocation: Default::default(),\n\n\t\t\ttelemetry: telemetry.as_ref().map(|x| x.handle()),\n\n\t\t})?;\n", "file_path": "node/src/service.rs", "rank": 92, "score": 1.714199483939252 } ]
Rust
src/grouped_rects_to_place.rs
chinedufn/rectangle-pack
648774b13ff9af85019eddeb1856e1607191fb8c
use crate::RectToInsert; #[cfg(not(std))] use alloc::collections::BTreeMap as KeyValMap; #[cfg(std)] use std::collections::HashMap as KeyValMap; use alloc::{ collections::{btree_map::Entry, BTreeMap}, vec::Vec, }; use core::{fmt::Debug, hash::Hash}; #[derive(Debug)] pub struct GroupedRectsToPlace<RectToPlaceId, GroupId = ()> where RectToPlaceId: Debug + Hash + Eq + Ord + PartialOrd, GroupId: Debug + Hash + Eq + Ord + PartialOrd, { pub(crate) inbound_id_to_group_ids: KeyValMap<RectToPlaceId, Vec<Group<GroupId, RectToPlaceId>>>, pub(crate) group_id_to_inbound_ids: BTreeMap<Group<GroupId, RectToPlaceId>, Vec<RectToPlaceId>>, pub(crate) rects: KeyValMap<RectToPlaceId, RectToInsert>, } #[derive(Debug, Hash, Eq, PartialEq, Ord, PartialOrd)] pub enum Group<GroupId, RectToPlaceId> where GroupId: Debug + Hash + Eq + PartialEq + Ord + PartialOrd, RectToPlaceId: Debug + Ord + PartialOrd, { Ungrouped(RectToPlaceId), Grouped(GroupId), } impl<RectToPlaceId, GroupId> GroupedRectsToPlace<RectToPlaceId, GroupId> where RectToPlaceId: Debug + Hash + Clone + Eq + Ord + PartialOrd, GroupId: Debug + Hash + Clone + Eq + Ord + PartialOrd, { pub fn new() -> Self { Self { inbound_id_to_group_ids: Default::default(), group_id_to_inbound_ids: Default::default(), rects: Default::default(), } } pub fn push_rect( &mut self, inbound_id: RectToPlaceId, group_ids: Option<Vec<GroupId>>, inbound: RectToInsert, ) { self.rects.insert(inbound_id.clone(), inbound); match group_ids { None => { self.group_id_to_inbound_ids.insert( Group::Ungrouped(inbound_id.clone()), vec![inbound_id.clone()], ); self.inbound_id_to_group_ids .insert(inbound_id.clone(), vec![Group::Ungrouped(inbound_id)]); } Some(group_ids) => { self.inbound_id_to_group_ids.insert( inbound_id.clone(), group_ids .clone() .into_iter() .map(|gid| Group::Grouped(gid)) .collect(), ); for group_id in group_ids { match self.group_id_to_inbound_ids.entry(Group::Grouped(group_id)) { Entry::Occupied(mut o) => { o.get_mut().push(inbound_id.clone()); } Entry::Vacant(v) => { v.insert(vec![inbound_id.clone()]); } }; } } }; } } #[cfg(test)] mod tests { use super::*; use crate::RectToInsert; #[test] fn ungrouped_rectangles_use_their_inbound_id_as_their_group_id() { let mut lrg: GroupedRectsToPlace<_, ()> = GroupedRectsToPlace::new(); lrg.push_rect(RectToPlaceId::One, None, RectToInsert::new(10, 10, 1)); assert_eq!( lrg.group_id_to_inbound_ids[&Group::Ungrouped(RectToPlaceId::One)], vec![RectToPlaceId::One] ); } #[test] fn group_id_to_inbound_ids() { let mut lrg = GroupedRectsToPlace::new(); lrg.push_rect( RectToPlaceId::One, Some(vec![0]), RectToInsert::new(10, 10, 1), ); lrg.push_rect( RectToPlaceId::Two, Some(vec![0]), RectToInsert::new(10, 10, 1), ); assert_eq!( lrg.group_id_to_inbound_ids.get(&Group::Grouped(0)).unwrap(), &vec![RectToPlaceId::One, RectToPlaceId::Two] ); } #[test] fn inbound_id_to_group_ids() { let mut lrg = GroupedRectsToPlace::new(); lrg.push_rect( RectToPlaceId::One, Some(vec![0, 1]), RectToInsert::new(10, 10, 1), ); lrg.push_rect(RectToPlaceId::Two, None, RectToInsert::new(10, 10, 1)); assert_eq!( lrg.inbound_id_to_group_ids[&RectToPlaceId::One], vec![Group::Grouped(0), Group::Grouped(1)] ); assert_eq!( lrg.inbound_id_to_group_ids[&RectToPlaceId::Two], vec![Group::Ungrouped(RectToPlaceId::Two)] ); } #[test] fn store_the_inbound_rectangle() { let mut lrg = GroupedRectsToPlace::new(); lrg.push_rect( RectToPlaceId::One, Some(vec![0, 1]), RectToInsert::new(10, 10, 1), ); assert_eq!(lrg.rects[&RectToPlaceId::One], RectToInsert::new(10, 10, 1)); } #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] enum RectToPlaceId { One, Two, } }
use crate::RectToInsert; #[cfg(not(std))] use alloc::collections::BTreeMap as KeyValMap; #[cfg(std)] use std::collections::HashMap as KeyValMap; use alloc::{ collections::{btree_map::Entry, BTreeMap}, vec::Vec, }; use core::{fmt::Debug, hash::Hash}; #[derive(Debug)] pub struct GroupedRectsToPlace<RectToPlaceId, GroupId = ()> where RectToPlaceId: Debug + Hash + Eq + Ord + PartialOrd, GroupId: Debug + Hash + Eq + Ord + PartialOrd, { pub(crate) inbound_id_to_group_ids: KeyValMap<RectToPlaceId, Vec<Group<GroupId, RectToPlaceId>>>, pub(crate) group_id_to_inbound_ids: BTreeMap<Group<GroupId, RectToPlaceId>, Vec<RectToPlaceId>>, pub(crate) rects: KeyValMap<RectToPlaceId, RectToInsert>, } #[derive(Debug, Hash, Eq, PartialEq, Ord, PartialOrd)] pub enum Group<GroupId, RectToPlaceId> where GroupId: Debug + Hash + Eq + PartialEq + Ord + PartialOrd, RectToPlaceId: Debug + Ord + PartialOrd, { Ungrouped(RectToPlaceId), Grouped(GroupId), } impl<RectToPlaceId, GroupId> GroupedRectsToPlace<RectToPlaceId, GroupId> where RectToPlaceId: Debug + Hash + Clone + Eq + Ord + PartialOrd, GroupId: Debug + Hash + Clone + Eq + Ord + PartialOrd, { pub fn new() -> Self { Self { inbound_id_to_group_ids: Default::default(), group_id_to_inbound_ids: Default::default(), rects: Default::default(), } } pub fn push_rect( &mut self, inbound_id: RectToPlaceId, group_ids: Option<Vec<GroupId>>, inbound: RectToInsert, ) { self.rects.insert(inbound_id.clone(), inbound); match group_ids { None => { self.group_id_to_inbound_ids.insert( Group::Ungrouped(inbound_id.clone()), vec![inbound_id.clone()], ); self.inbound_id_to_group_ids .insert(inbound_id.clone(), vec![Group::Ungrouped(inbound_id)]); } Some(group_ids) => { self.inbound_id_to_group_ids.insert( inbound_id.clone(), group_ids .clone() .into_iter() .map(|gid| Group::Grouped(gid)) .collect(), ); for group_id in group_ids { match self.group_id_to_inbound_ids.entry(Group::Grouped(group_id)) { Entry::Occupied(mut o) => { o.get_mut().push(inbound_id.clone()); } Entry::Vacant(v) => { v.insert(vec![inbound_id.clone()]); } }; } } }; } } #[cfg(test)] mod tests { use super::*; use crate::RectToInsert; #[test] fn ungrouped_rectangles_use_their_inbound_id_as_their_group_id() { let mut lrg: GroupedRectsToPlace<_, ()> = GroupedRectsToPlace::new(); lrg.push_rect(RectToPlaceId::One, None, RectToInsert::new(10, 10, 1)); assert_eq!( lrg.group_id_to_inbound_ids[&Group::Ungrouped(RectToPlaceId::One)], vec![RectToPlaceId::One] ); } #[test] fn group_id_to_inbound_ids() { let mut lrg = GroupedRectsToPlace::new(); lrg.push_rect( RectToPlaceId::One, Some(vec![0]), RectToInsert::new(10, 10, 1), ); lrg.push_rect( RectToPlaceId::Two, Some(vec![0]), RectToInsert::new(10, 10, 1), ); assert_eq!( lrg.group_id_to_inbound_ids.get(&Group::Grouped(0)).unwrap(), &vec![RectToPlaceId::One, RectToPlaceId::Two] ); } #[test] fn inbound_id_to_group_ids() { let mut lrg = GroupedRectsToPlace::new(); lrg.push_rect( RectToPlaceId::One, Some(vec![0, 1]), RectToInsert::new(10, 10, 1), ); lrg.push_rect(RectToPlaceId::Two, None, RectToInsert::new(10, 10, 1)); assert_eq!( lrg.inbound_id_to_group_ids[&RectToPlaceId::One], vec![Group::Grouped(0), Group::Grouped(1)] ); assert_eq!( lrg.inbound_id_to_group_ids[&RectToPlaceId::Two], vec![Group::Ungrouped(RectToPlaceId::Two)] ); } #[test]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] enum RectToPlaceId { One, Two, } }
fn store_the_inbound_rectangle() { let mut lrg = GroupedRectsToPlace::new(); lrg.push_rect( RectToPlaceId::One, Some(vec![0, 1]), RectToInsert::new(10, 10, 1), ); assert_eq!(lrg.rects[&RectToPlaceId::One], RectToInsert::new(10, 10, 1)); }
function_block-full_function
[ { "content": "/// Determine how to fit a set of incoming rectangles (2d or 3d) into a set of target bins.\n\n///\n\n/// ## Example\n\n///\n\n/// ```\n\n/// //! A basic example of packing rectangles into target bins\n\n///\n\n/// use rectangle_pack::{\n\n/// GroupedRectsToPlace,\n\n/// RectToInsert,\n\n/// pack_rects,\n\n/// TargetBin,\n\n/// volume_heuristic,\n\n/// contains_smallest_box\n\n/// };\n\n/// use std::collections::BTreeMap;\n\n///\n\n/// // A rectangle ID just needs to meet these trait bounds (ideally also Copy).\n\n/// // So you could use a String, PathBuf, or any other type that meets these\n\n/// // trat bounds. You do not have to use a custom enum.\n\n/// #[derive(Debug, Hash, PartialEq, Eq, Clone, Ord, PartialOrd)]\n\n/// enum MyCustomRectId {\n\n/// RectOne,\n\n/// RectTwo,\n\n/// RectThree,\n\n/// }\n\n///\n\n/// // A target bin ID just needs to meet these trait bounds (ideally also Copy)\n\n/// // So you could use a u32, &str, or any other type that meets these\n\n/// // trat bounds. You do not have to use a custom enum.\n\n/// #[derive(Debug, Hash, PartialEq, Eq, Clone, Ord, PartialOrd)]\n\n/// enum MyCustomBinId {\n\n/// DestinationBinOne,\n\n/// DestinationBinTwo,\n\n/// }\n\n///\n\n/// // A placement group just needs to meet these trait bounds (ideally also Copy).\n\n/// //\n\n/// // Groups allow you to ensure that a set of rectangles will be placed\n\n/// // into the same bin. If this isn't possible an error is returned.\n\n/// //\n\n/// // Groups are optional.\n\n/// //\n\n/// // You could use an i32, &'static str, or any other type that meets these\n\n/// // trat bounds. You do not have to use a custom enum.\n\n/// #[derive(Debug, Hash, PartialEq, Eq, Clone, Ord, PartialOrd)]\n\n/// enum MyCustomGroupId {\n\n/// GroupIdOne\n\n/// }\n\n///\n\n/// let mut rects_to_place = GroupedRectsToPlace::new();\n\n/// rects_to_place.push_rect(\n\n/// MyCustomRectId::RectOne,\n\n/// Some(vec![MyCustomGroupId::GroupIdOne]),\n\n/// RectToInsert::new(10, 20, 255)\n\n/// );\n\n/// rects_to_place.push_rect(\n\n/// MyCustomRectId::RectTwo,\n\n/// Some(vec![MyCustomGroupId::GroupIdOne]),\n\n/// RectToInsert::new(5, 50, 255)\n\n/// );\n\n/// rects_to_place.push_rect(\n\n/// MyCustomRectId::RectThree,\n\n/// None,\n\n/// RectToInsert::new(30, 30, 255)\n\n/// );\n\n///\n\n/// let mut target_bins = BTreeMap::new();\n\n/// target_bins.insert(MyCustomBinId::DestinationBinOne, TargetBin::new(2048, 2048, 255));\n\n/// target_bins.insert(MyCustomBinId::DestinationBinTwo, TargetBin::new(4096, 4096, 1020));\n\n///\n\n/// // Information about where each `MyCustomRectId` was placed\n\n/// let rectangle_placements = pack_rects(\n\n/// &rects_to_place,\n\n/// &mut target_bins,\n\n/// &volume_heuristic,\n\n/// &contains_smallest_box\n\n/// ).unwrap();\n\n/// ```\n\n///\n\n/// ## Algorithm\n\n///\n\n/// The algorithm was originally inspired by [rectpack2D] and then modified to work in 3D.\n\n///\n\n/// [rectpack2D]: https://github.com/TeamHypersomnia/rectpack2D\n\n///\n\n/// ## TODO:\n\n///\n\n/// Optimize - plenty of room to remove clones and duplication .. etc\n\npub fn pack_rects<\n\n RectToPlaceId: Debug + Hash + PartialEq + Eq + Clone + Ord + PartialOrd,\n\n BinId: Debug + Hash + PartialEq + Eq + Clone + Ord + PartialOrd,\n\n GroupId: Debug + Hash + PartialEq + Eq + Clone + Ord + PartialOrd,\n\n>(\n\n rects_to_place: &GroupedRectsToPlace<RectToPlaceId, GroupId>,\n\n target_bins: &mut BTreeMap<BinId, TargetBin>,\n\n box_size_heuristic: &BoxSizeHeuristicFn,\n\n more_suitable_containers_fn: &ComparePotentialContainersFn,\n\n) -> Result<RectanglePackOk<RectToPlaceId, BinId>, RectanglePackError> {\n\n let mut packed_locations = KeyValMap::new();\n\n\n\n let mut target_bins: Vec<(&BinId, &mut TargetBin)> = target_bins.iter_mut().collect();\n\n sort_bins_smallest_to_largest(&mut target_bins, box_size_heuristic);\n\n\n\n let mut group_id_to_inbound_ids: Vec<(&Group<GroupId, RectToPlaceId>, &Vec<RectToPlaceId>)> =\n\n rects_to_place.group_id_to_inbound_ids.iter().collect();\n\n sort_groups_largest_to_smallest(\n\n &mut group_id_to_inbound_ids,\n\n rects_to_place,\n", "file_path": "src/lib.rs", "rank": 0, "score": 82677.79162864583 }, { "content": "fn sort_groups_largest_to_smallest<GroupId, RectToPlaceId>(\n\n group_id_to_inbound_ids: &mut Vec<(&Group<GroupId, RectToPlaceId>, &Vec<RectToPlaceId>)>,\n\n incoming_groups: &GroupedRectsToPlace<RectToPlaceId, GroupId>,\n\n box_size_heuristic: &BoxSizeHeuristicFn,\n\n) where\n\n RectToPlaceId: Debug + Hash + PartialEq + Eq + Clone + Ord + PartialOrd,\n\n GroupId: Debug + Hash + PartialEq + Eq + Clone + Ord + PartialOrd,\n\n{\n\n group_id_to_inbound_ids.sort_by(|a, b| {\n\n let a_heuristic =\n\n a.1.iter()\n\n .map(|inbound| {\n\n let rect = incoming_groups.rects[inbound];\n\n box_size_heuristic(rect.whd)\n\n })\n\n .sum();\n\n\n\n let b_heuristic: u128 =\n\n b.1.iter()\n\n .map(|inbound| {\n", "file_path": "src/lib.rs", "rank": 1, "score": 55829.37587151116 }, { "content": "/// Select the container that has the smallest box.\n\n///\n\n/// If there is a tie on the smallest boxes, select whichever also has the second smallest box.\n\npub fn contains_smallest_box(\n\n mut container1: [WidthHeightDepth; 3],\n\n mut container2: [WidthHeightDepth; 3],\n\n heuristic: &BoxSizeHeuristicFn,\n\n) -> Ordering {\n\n container1.sort_by(|a, b| heuristic(*a).cmp(&heuristic(*b)));\n\n container2.sort_by(|a, b| heuristic(*a).cmp(&heuristic(*b)));\n\n\n\n match heuristic(container2[0]).cmp(&heuristic(container1[0])) {\n\n Ordering::Equal => heuristic(container2[1]).cmp(&heuristic(container1[1])),\n\n o => o,\n\n }\n\n}\n\n\n\n/// A rectangular section within a target bin that takes up one or more layers\n\n#[derive(Debug, Eq, PartialEq, Copy, Clone, Default, Ord, PartialOrd)]\n\npub struct BinSection {\n\n pub(crate) x: u32,\n\n pub(crate) y: u32,\n\n pub(crate) z: u32,\n", "file_path": "src/bin_section.rs", "rank": 2, "score": 55151.77496096083 }, { "content": "// TODO: This is duplicative of the code above\n\nfn can_fit_entire_group_into_bin<RectToPlaceId, GroupId>(\n\n mut bin: TargetBin,\n\n group: &[RectToPlaceId],\n\n rects_to_place: &GroupedRectsToPlace<RectToPlaceId, GroupId>,\n\n\n\n box_size_heuristic: &BoxSizeHeuristicFn,\n\n more_suitable_containers_fn: &ComparePotentialContainersFn,\n\n) -> bool\n\nwhere\n\n RectToPlaceId: Debug + Hash + PartialEq + Eq + Clone + Ord + PartialOrd,\n\n GroupId: Debug + Hash + PartialEq + Eq + Clone + Ord + PartialOrd,\n\n{\n\n 'incoming: for rect_to_place_id in group.iter() {\n\n if bin.available_bin_sections.len() == 0 {\n\n return false;\n\n }\n\n\n\n let mut bin_sections = bin.available_bin_sections.clone();\n\n\n\n let last_section_idx = bin_sections.len() - 1;\n", "file_path": "src/lib.rs", "rank": 3, "score": 54487.404976289894 }, { "content": "/// The volume of the box\n\npub fn volume_heuristic(whd: WidthHeightDepth) -> u128 {\n\n whd.width as u128 * whd.height as u128 * whd.depth as u128\n\n}\n", "file_path": "src/box_size_heuristics.rs", "rank": 4, "score": 43775.466626973444 }, { "content": "fn sort_by_size_largest_to_smallest(\n\n items: &mut [BinSection; 3],\n\n box_size_heuristic: &BoxSizeHeuristicFn,\n\n) {\n\n items.sort_by(|a, b| box_size_heuristic(b.whd).cmp(&box_size_heuristic(a.whd)));\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 26400.415600002558 }, { "content": "use crate::width_height_depth::WidthHeightDepth;\n\n\n\n/// A rectangle that we want to insert into a target bin\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub struct RectToInsert {\n\n pub(crate) whd: WidthHeightDepth,\n\n allow_global_x_axis_rotation: bool,\n\n allow_global_y_axis_rotation: bool,\n\n allow_global_z_axis_rotation: bool,\n\n}\n\n\n\nimpl Into<WidthHeightDepth> for RectToInsert {\n\n fn into(self) -> WidthHeightDepth {\n\n WidthHeightDepth {\n\n width: self.width(),\n\n height: self.height(),\n\n depth: self.depth(),\n\n }\n\n }\n\n}\n", "file_path": "src/rect_to_insert.rs", "rank": 6, "score": 25772.214974233666 }, { "content": "\n\n#[allow(missing_docs)]\n\nimpl RectToInsert {\n\n pub fn new(width: u32, height: u32, depth: u32) -> Self {\n\n RectToInsert {\n\n whd: WidthHeightDepth {\n\n width,\n\n height,\n\n depth,\n\n },\n\n // Rotation is not yet supported\n\n allow_global_x_axis_rotation: false,\n\n allow_global_y_axis_rotation: false,\n\n allow_global_z_axis_rotation: false,\n\n }\n\n }\n\n}\n\n\n\n#[allow(missing_docs)]\n\nimpl RectToInsert {\n", "file_path": "src/rect_to_insert.rs", "rank": 7, "score": 25769.087457593487 }, { "content": " pub fn width(&self) -> u32 {\n\n self.whd.width\n\n }\n\n\n\n pub fn height(&self) -> u32 {\n\n self.whd.height\n\n }\n\n\n\n pub fn depth(&self) -> u32 {\n\n self.whd.depth\n\n }\n\n}\n", "file_path": "src/rect_to_insert.rs", "rank": 8, "score": 25764.482005855974 }, { "content": "fn sort_bins_smallest_to_largest<BinId>(\n\n bins: &mut Vec<(&BinId, &mut TargetBin)>,\n\n box_size_heuristic: &BoxSizeHeuristicFn,\n\n) where\n\n BinId: Debug + Hash + PartialEq + Eq + Clone,\n\n{\n\n bins.sort_by(|a, b| {\n\n box_size_heuristic(WidthHeightDepth {\n\n width: a.1.max_width,\n\n height: a.1.max_height,\n\n depth: a.1.max_depth,\n\n })\n\n .cmp(&box_size_heuristic(WidthHeightDepth {\n\n width: b.1.max_width,\n\n height: b.1.max_height,\n\n depth: b.1.max_depth,\n\n }))\n\n });\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 23925.40482277678 }, { "content": "}\n\n\n\n// A placement group just needs to meet these trait bounds (ideally also Copy).\n\n//\n\n// Groups allow you to ensure that a set of rectangles will be placed\n\n// into the same bin. If this isn't possible an error is returned.\n\n//\n\n// Groups are optional.\n\n//\n\n// You could use an i32, &'static str, or any other type that meets these\n\n// trat bounds. You do not have to use a custom enum.\n\n#[derive(Debug, Hash, PartialEq, Eq, Clone, Ord, PartialOrd)]\n\nenum MyCustomGroupId {\n\n GroupIdOne\n\n}\n\n\n\nlet mut rects_to_place = GroupedRectsToPlace::new();\n\nrects_to_place.push_rect(\n\n MyCustomRectId::RectOne,\n\n Some(vec![MyCustomGroupId::GroupIdOne]),\n\n RectToInsert::new(10, 20, 255)\n\n);\n\nrects_to_place.push_rect(\n\n MyCustomRectId::RectTwo,\n\n Some(vec![MyCustomGroupId::GroupIdOne]),\n\n RectToInsert::new(5, 50, 255)\n\n);\n\nrects_to_place.push_rect(\n\n MyCustomRectId::RectThree,\n\n None,\n\n RectToInsert::new(30, 30, 255)\n\n);\n\n\n\nlet mut target_bins = BTreeMap::new();\n\ntarget_bins.insert(MyCustomBinId::DestinationBinOne, TargetBin::new(2048, 2048, 255));\n\ntarget_bins.insert(MyCustomBinId::DestinationBinTwo, TargetBin::new(4096, 4096, 1020));\n\n\n\n// Information about where each `MyCustomRectId` was placed\n\nlet rectangle_placements = pack_rects(\n\n &rects_to_place,\n\n &mut target_bins,\n\n &volume_heuristic,\n\n &contains_smallest_box\n\n).unwrap();\n\n```\n\n\n\n[Full API Documentation](https://docs.rs/rectangle-pack)\n\n\n", "file_path": "README.md", "rank": 20, "score": 19.299815267542197 }, { "content": "/// // So you could use a String, PathBuf, or any other type that meets these\n\n/// // trat bounds. You do not have to use a custom enum.\n\n/// #[derive(Debug, Hash, PartialEq, Eq, Clone, Ord, PartialOrd)]\n\n/// enum MyCustomRectId {\n\n/// RectOne,\n\n/// RectTwo,\n\n/// RectThree,\n\n/// }\n\n///\n\n/// // A target bin ID just needs to meet these trait bounds (ideally also Copy)\n\n/// // So you could use a u32, &str, or any other type that meets these\n\n/// // trat bounds. You do not have to use a custom enum.\n\n/// #[derive(Debug, Hash, PartialEq, Eq, Clone, Ord, PartialOrd)]\n\n/// enum MyCustomBinId {\n\n/// DestinationBinOne,\n\n/// DestinationBinTwo,\n\n/// }\n\n///\n\n/// // A placement group just needs to meet these trait bounds (ideally also Copy).\n\n/// //\n", "file_path": "src/lib.rs", "rank": 21, "score": 18.695365906123733 }, { "content": "/// // Groups allow you to ensure that a set of rectangles will be placed\n\n/// // into the same bin. If this isn't possible an error is returned.\n\n/// //\n\n/// // Groups are optional.\n\n/// //\n\n/// // You could use an i32, &'static str, or any other type that meets these\n\n/// // trat bounds. You do not have to use a custom enum.\n\n/// #[derive(Debug, Hash, PartialEq, Eq, Clone, Ord, PartialOrd)]\n\n/// enum MyCustomGroupId {\n\n/// GroupIdOne\n\n/// }\n\n///\n\n/// let mut rects_to_place = GroupedRectsToPlace::new();\n\n/// rects_to_place.push_rect(\n\n/// MyCustomRectId::RectOne,\n\n/// Some(vec![MyCustomGroupId::GroupIdOne]),\n\n/// RectToInsert::new(10, 20, 255)\n\n/// );\n\n/// rects_to_place.push_rect(\n\n/// MyCustomRectId::RectTwo,\n", "file_path": "src/lib.rs", "rank": 22, "score": 18.511310706255436 }, { "content": " match self {\n\n PushBinSectionError::OutOfBounds(oob) => {\n\n f.debug_tuple(\"BinSection\").field(oob).finish()\n\n }\n\n PushBinSectionError::Overlaps {\n\n remaining_section,\n\n new_section,\n\n } => f\n\n .debug_struct(\"Overlaps\")\n\n .field(\"remaining_section\", remaining_section)\n\n .field(\"new_section\", new_section)\n\n .finish(),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::width_height_depth::WidthHeightDepth;\n", "file_path": "src/target_bin/push_available_bin_section.rs", "rank": 23, "score": 18.357385317614067 }, { "content": " let rect = incoming_groups.rects[inbound];\n\n box_size_heuristic(rect.whd)\n\n })\n\n .sum();\n\n\n\n b_heuristic.cmp(&a_heuristic)\n\n });\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::{pack_rects, volume_heuristic, RectToInsert, RectanglePackError, TargetBin};\n\n\n\n use super::*;\n\n use crate::packed_location::RotatedBy;\n\n\n\n /// If the provided rectangles can't fit into the provided bins.\n\n #[test]\n\n fn error_if_the_rectangles_cannot_fit_into_target_bins() {\n\n let mut targets = BTreeMap::new();\n", "file_path": "src/lib.rs", "rank": 24, "score": 17.88292674744537 }, { "content": " if let Some(previous_packed) = previous_packed.as_ref() {\n\n assert_eq!(&packed, previous_packed);\n\n }\n\n\n\n previous_packed = Some(packed);\n\n }\n\n }\n\n\n\n #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]\n\n enum RectToPlaceId {\n\n One,\n\n Two,\n\n Three,\n\n }\n\n\n\n #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)]\n\n enum BinId {\n\n Three,\n\n Four,\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 25, "score": 17.22233394442505 }, { "content": "# rectangle-pack [![Actions Status](https://github.com/chinedufn/rectangle-pack/workflows/test/badge.svg)](https://github.com/chinedufn/rectangle-pack/actions) [![docs](https://docs.rs/rectangle-pack/badge.svg)](https://docs.rs/rectangle-pack)\n\n\n\n> A general purpose, deterministic bin packer designed to conform to any two or three dimensional use case.\n\n\n\n`rectangle-pack` is a library focused on laying out any number of smaller rectangles (both 2d rectangles and 3d rectangular prisms) inside any number of larger rectangles.\n\n\n\n`rectangle-pack` exposes an API that gives the consumer control over how rectangles are packed - allowing them to tailor\n\nthe packing to their specific use case.\n\n\n\nWhile `rectangle-pack` was originally designed with texture atlas related use cases in mind - **the library itself has no notions of images and can be used\n\nin any rectangle packing context**.\n\n\n\n## Quickstart\n\n\n\n```\n\n# In your Cargo.toml\n\nrectangle-pack = \"0.4\"\n\n```\n\n\n\n```rust\n\n//! A basic example of packing rectangles into target bins\n\n\n\nuse rectangle_pack::{\n\n GroupedRectsToPlace,\n\n RectToInsert,\n\n pack_rects,\n\n TargetBin,\n\n volume_heuristic,\n\n contains_smallest_box\n\n};\n\nuse std::collections::BTreeMap;\n\n\n\n// A rectangle ID just needs to meet these trait bounds (ideally also Copy).\n\n// So you could use a String, PathBuf, or any other type that meets these\n\n// trat bounds. You do not have to use a custom enum.\n\n#[derive(Debug, Hash, PartialEq, Eq, Clone, Ord, PartialOrd)]\n\nenum MyCustomRectId {\n\n RectOne,\n\n RectTwo,\n\n RectThree,\n\n}\n\n\n\n// A target bin ID just needs to meet these trait bounds (ideally also Copy)\n\n// So you could use a u32, &str, or any other type that meets these\n\n// trat bounds. You do not have to use a custom enum.\n\n#[derive(Debug, Hash, PartialEq, Eq, Clone, Ord, PartialOrd)]\n\nenum MyCustomBinId {\n\n DestinationBinOne,\n\n DestinationBinTwo,\n", "file_path": "README.md", "rank": 26, "score": 17.00240216077235 }, { "content": "\n\n match pack_rects(\n\n &groups,\n\n &mut targets,\n\n &volume_heuristic,\n\n &contains_smallest_box,\n\n )\n\n .unwrap_err()\n\n {\n\n RectanglePackError::NotEnoughBinSpace => {}\n\n };\n\n }\n\n\n\n /// If we provide a single inbound rectangle and a single bin - it should be placed into that\n\n /// bin.\n\n #[test]\n\n fn one_inbound_rect_one_bin() {\n\n let mut groups: GroupedRectsToPlace<_, ()> = GroupedRectsToPlace::new();\n\n groups.push_rect(RectToPlaceId::One, None, RectToInsert::new(1, 2, 1));\n\n\n", "file_path": "src/lib.rs", "rank": 27, "score": 16.375055408278815 }, { "content": "/// Used to represent a volume (or area of the depth is 1)\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Ord, PartialOrd)]\n\n#[allow(missing_docs)]\n\npub struct WidthHeightDepth {\n\n pub(crate) width: u32,\n\n pub(crate) height: u32,\n\n pub(crate) depth: u32,\n\n}\n\n\n\n#[allow(missing_docs)]\n\nimpl WidthHeightDepth {\n\n /// # Panics\n\n ///\n\n /// Panics if width, height or depth is 0.\n\n pub fn new(width: u32, height: u32, depth: u32) -> Self {\n\n assert_ne!(width, 0);\n\n assert_ne!(height, 0);\n\n assert_ne!(depth, 0);\n\n\n\n WidthHeightDepth {\n", "file_path": "src/width_height_depth.rs", "rank": 28, "score": 16.102123638765036 }, { "content": "use crate::bin_section::BinSection;\n\nuse crate::width_height_depth::WidthHeightDepth;\n\nuse alloc::vec::Vec;\n\n\n\nmod coalesce;\n\nmod push_available_bin_section;\n\n\n\n/// A bin that we'd like to play our incoming rectangles into\n\n#[derive(Debug, Clone)]\n\npub struct TargetBin {\n\n pub(crate) max_width: u32,\n\n pub(crate) max_height: u32,\n\n pub(crate) max_depth: u32,\n\n pub(crate) available_bin_sections: Vec<BinSection>,\n\n}\n\n\n\nimpl TargetBin {\n\n #[allow(missing_docs)]\n\n pub fn new(max_width: u32, max_height: u32, max_depth: u32) -> Self {\n\n let available_bin_sections = vec![BinSection::new(\n", "file_path": "src/target_bin.rs", "rank": 29, "score": 16.00631946352521 }, { "content": "pub use crate::bin_section::contains_smallest_box;\n\npub use crate::bin_section::BinSection;\n\npub use crate::bin_section::ComparePotentialContainersFn;\n\nuse crate::grouped_rects_to_place::Group;\n\npub use crate::grouped_rects_to_place::GroupedRectsToPlace;\n\npub use crate::target_bin::TargetBin;\n\nuse crate::width_height_depth::WidthHeightDepth;\n\n\n\npub use self::box_size_heuristics::{volume_heuristic, BoxSizeHeuristicFn};\n\npub use self::rect_to_insert::RectToInsert;\n\npub use crate::packed_location::PackedLocation;\n\n\n\nmod bin_section;\n\nmod grouped_rects_to_place;\n\n\n\nmod packed_location;\n\nmod rect_to_insert;\n\nmod target_bin;\n\nmod width_height_depth;\n\n\n", "file_path": "src/lib.rs", "rank": 30, "score": 15.412069747780123 }, { "content": " bin.add_new_sections(new_sections);\n\n\n\n continue 'incoming;\n\n }\n\n\n\n return false;\n\n }\n\n\n\n true\n\n}\n\n\n\n/// Information about successfully packed rectangles.\n\n#[derive(Debug, PartialEq)]\n\npub struct RectanglePackOk<RectToPlaceId: PartialEq + Eq + Hash, BinId: PartialEq + Eq + Hash> {\n\n packed_locations: KeyValMap<RectToPlaceId, (BinId, PackedLocation)>,\n\n // TODO: Other information such as information about how the bins were packed\n\n // (perhaps percentage filled)\n\n}\n\n\n\nimpl<RectToPlaceId: PartialEq + Eq + Hash, BinId: PartialEq + Eq + Hash>\n", "file_path": "src/lib.rs", "rank": 31, "score": 14.690354874626083 }, { "content": " z: 0,\n\n whd: WidthHeightDepth {\n\n width: 1,\n\n height: 2,\n\n depth: 1\n\n },\n\n x_axis_rotation: RotatedBy::ZeroDegrees,\n\n y_axis_rotation: RotatedBy::ZeroDegrees,\n\n z_axis_rotation: RotatedBy::ZeroDegrees,\n\n }\n\n )\n\n }\n\n\n\n /// If we have one inbound rect and two bins, it should be placed into the smallest bin.\n\n #[test]\n\n fn one_inbound_rect_two_bins() {\n\n let mut groups: GroupedRectsToPlace<_, ()> = GroupedRectsToPlace::new();\n\n groups.push_rect(RectToPlaceId::One, None, RectToInsert::new(2, 2, 1));\n\n\n\n let mut targets = BTreeMap::new();\n", "file_path": "src/lib.rs", "rank": 32, "score": 14.102709114420847 }, { "content": " fn all_empty_space_behind_excluding_right(&self, incoming: &RectToInsert) -> BinSection {\n\n BinSection::new(\n\n self.x,\n\n self.y,\n\n self.z + incoming.depth(),\n\n WidthHeightDepth {\n\n width: incoming.width(),\n\n height: self.whd.height,\n\n depth: self.whd.depth - incoming.depth(),\n\n },\n\n )\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{volume_heuristic, RectToInsert};\n\n\n\n const BIGGEST: u32 = 50;\n", "file_path": "src/bin_section.rs", "rank": 33, "score": 13.878087323141052 }, { "content": "//! `rectangle-pack` is a library focused on laying out any number of smaller rectangles\n\n//! (both 2d rectangles and 3d rectangular prisms) inside any number of larger rectangles.\n\n#![cfg_attr(not(std), no_std)]\n\n#![deny(missing_docs)]\n\n\n\n#[macro_use]\n\nextern crate alloc;\n\n\n\n#[cfg(not(std))]\n\nuse alloc::collections::BTreeMap as KeyValMap;\n\n#[cfg(std)]\n\nuse std::collections::HashMap as KeyValMap;\n\n\n\nuse alloc::{collections::BTreeMap, vec::Vec};\n\n\n\nuse core::{\n\n fmt::{Debug, Display, Error as FmtError, Formatter},\n\n hash::Hash,\n\n};\n\n\n", "file_path": "src/lib.rs", "rank": 34, "score": 13.790452420631581 }, { "content": " z: 0,\n\n whd: WidthHeightDepth {\n\n width: 2,\n\n height: 2,\n\n depth: 1\n\n },\n\n x_axis_rotation: RotatedBy::ZeroDegrees,\n\n y_axis_rotation: RotatedBy::ZeroDegrees,\n\n z_axis_rotation: RotatedBy::ZeroDegrees,\n\n }\n\n )\n\n }\n\n\n\n /// If we have two inbound rects the largest one should be placed first.\n\n #[test]\n\n fn places_largest_rectangles_first() {\n\n let mut groups: GroupedRectsToPlace<_, ()> = GroupedRectsToPlace::new();\n\n groups.push_rect(RectToPlaceId::One, None, RectToInsert::new(10, 10, 1));\n\n groups.push_rect(RectToPlaceId::Two, None, RectToInsert::new(5, 5, 1));\n\n\n", "file_path": "src/lib.rs", "rank": 35, "score": 13.270232120444646 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::width_height_depth::WidthHeightDepth;\n\n\n\n /// Verify that the overlaps method works properly.\n\n #[test]\n\n fn overlaps() {\n\n OverlapsTest {\n\n label: \"Overlaps X, Y and Z\",\n\n section1: BinSection::new(3, 4, 5, WidthHeightDepth::new(1, 1, 1)),\n\n section2: section_2_3_4(),\n\n expected_overlap: true,\n\n }\n\n .test();\n\n\n\n OverlapsTest {\n", "file_path": "src/bin_section/overlaps.rs", "rank": 36, "score": 12.962162957695266 }, { "content": " /// when the second one couldn't fit in it.\n\n ///\n\n /// ```text\n\n /// ┌──────────────┬──────────────┐\n\n /// │ Third │ │\n\n /// ├──────────────┤ │\n\n /// │ │ │\n\n /// │ │ │\n\n /// │ ├──────────────┤\n\n /// │ First │ │\n\n /// │ │ Second │\n\n /// │ │ │\n\n /// └──────────────┴──────────────┘\n\n /// ```\n\n #[test]\n\n fn saves_bin_sections_for_future_use() {\n\n let mut targets = BTreeMap::new();\n\n targets.insert(BinId::Three, TargetBin::new(100, 100, 1));\n\n\n\n let mut groups: GroupedRectsToPlace<_, ()> = GroupedRectsToPlace::new();\n", "file_path": "src/lib.rs", "rank": 37, "score": 12.548116048659146 }, { "content": " /// should get placed into the smaller of the two new sections.\n\n ///\n\n /// ```text\n\n /// ┌──────────────┬──▲───────────────┐\n\n /// │ Second Rect │ │ │\n\n /// ├──────────────┴──┤ │\n\n /// │ │ │\n\n /// │ First Placed │ │\n\n /// │ Rectangle │ │\n\n /// │ │ │\n\n /// └─────────────────┴───────────────┘\n\n /// ```\n\n #[test]\n\n fn fills_small_sections_before_large_ones() {\n\n let mut targets = BTreeMap::new();\n\n targets.insert(BinId::Three, TargetBin::new(100, 100, 1));\n\n\n\n let mut groups: GroupedRectsToPlace<_, ()> = GroupedRectsToPlace::new();\n\n\n\n groups.push_rect(RectToPlaceId::One, None, RectToInsert::new(50, 90, 1));\n", "file_path": "src/lib.rs", "rank": 38, "score": 12.53882230629199 }, { "content": "\n\n groups.push_rect(RectToPlaceId::One, None, RectToInsert::new(60, 95, 1));\n\n groups.push_rect(RectToPlaceId::Two, None, RectToInsert::new(40, 10, 1));\n\n groups.push_rect(RectToPlaceId::Three, None, RectToInsert::new(60, 3, 1));\n\n\n\n let packed = pack_rects(\n\n &groups,\n\n &mut targets,\n\n &volume_heuristic,\n\n &contains_smallest_box,\n\n )\n\n .unwrap();\n\n let locations = packed.packed_locations;\n\n\n\n assert_eq!(\n\n locations[&RectToPlaceId::One].1,\n\n PackedLocation {\n\n x: 0,\n\n y: 0,\n\n z: 0,\n", "file_path": "src/lib.rs", "rank": 39, "score": 12.533573499237693 }, { "content": "use crate::width_height_depth::WidthHeightDepth;\n\n\n\n/// Describes how and where an incoming rectangle was packed into the target bins\n\n#[derive(Debug, PartialEq, Copy, Clone)]\n\npub struct PackedLocation {\n\n pub(crate) x: u32,\n\n pub(crate) y: u32,\n\n pub(crate) z: u32,\n\n pub(crate) whd: WidthHeightDepth,\n\n pub(crate) x_axis_rotation: RotatedBy,\n\n pub(crate) y_axis_rotation: RotatedBy,\n\n pub(crate) z_axis_rotation: RotatedBy,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Copy, Clone)]\n\n#[allow(unused)] // TODO: Implement rotations\n\npub enum RotatedBy {\n\n ZeroDegrees,\n\n NinetyDegrees,\n\n}\n", "file_path": "src/packed_location.rs", "rank": 40, "score": 12.400094921699019 }, { "content": " groups.push_rect(RectToPlaceId::Two, None, RectToInsert::new(20, 20, 1));\n\n\n\n let mut targets = BTreeMap::new();\n\n targets.insert(BinId::Three, TargetBin::new(20, 20, 1));\n\n targets.insert(BinId::Four, TargetBin::new(50, 50, 1));\n\n\n\n let packed = pack_rects(\n\n &groups,\n\n &mut targets,\n\n &volume_heuristic,\n\n &contains_smallest_box,\n\n )\n\n .unwrap();\n\n let locations = packed.packed_locations;\n\n\n\n assert_eq!(locations.len(), 2);\n\n\n\n assert_eq!(locations[&RectToPlaceId::One].0, BinId::Four,);\n\n assert_eq!(locations[&RectToPlaceId::Two].0, BinId::Three,);\n\n\n", "file_path": "src/lib.rs", "rank": 41, "score": 12.245898252457312 }, { "content": " let rectangles = vec![\n\n \"some-rectangle-0\",\n\n \"some-rectangle-1\",\n\n \"some-rectangle-2\",\n\n \"some-rectangle-3\",\n\n \"some-rectangle-4\",\n\n ];\n\n\n\n for rect_id in rectangles.iter() {\n\n rects_to_place.push_rect(rect_id, None, RectToInsert::new(4, 4, 1));\n\n }\n\n\n\n let packed = pack_rects(\n\n &rects_to_place,\n\n &mut target_bins.clone(),\n\n &volume_heuristic,\n\n &contains_smallest_box,\n\n )\n\n .unwrap();\n\n\n", "file_path": "src/lib.rs", "rank": 42, "score": 11.834921299138058 }, { "content": " }\n\n );\n\n }\n\n\n\n /// Create a handful of rectangles that need to be placed, with two of them in the same group\n\n /// and the rest ungrouped.\n\n /// Try placing them many times and verify that each time they are placed the exact same way.\n\n #[test]\n\n fn deterministic_packing() {\n\n let mut previous_packed = None;\n\n\n\n for _ in 0..5 {\n\n let mut rects_to_place: GroupedRectsToPlace<&'static str, &str> =\n\n GroupedRectsToPlace::new();\n\n\n\n let mut target_bins = BTreeMap::new();\n\n for bin_id in 0..5 {\n\n target_bins.insert(bin_id, TargetBin::new(8, 8, 1));\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 43, "score": 11.805124904230404 }, { "content": " targets.insert(BinId::Three, TargetBin::new(2, 100, 1));\n\n\n\n let mut groups: GroupedRectsToPlace<_, ()> = GroupedRectsToPlace::new();\n\n groups.push_rect(RectToPlaceId::One, None, RectToInsert::new(3, 1, 1));\n\n\n\n match pack_rects(\n\n &groups,\n\n &mut targets,\n\n &volume_heuristic,\n\n &contains_smallest_box,\n\n )\n\n .unwrap_err()\n\n {\n\n RectanglePackError::NotEnoughBinSpace => {}\n\n };\n\n }\n\n\n\n /// Rectangles in the same group need to be placed in the same bin.\n\n ///\n\n /// Here we create two Rectangles in the same group and create two bins that could fit them\n", "file_path": "src/lib.rs", "rank": 44, "score": 11.753733423572125 }, { "content": " groups.push_rect(RectToPlaceId::Two, None, RectToInsert::new(1, 1, 1));\n\n\n\n let packed = pack_rects(\n\n &groups,\n\n &mut targets,\n\n &volume_heuristic,\n\n &contains_smallest_box,\n\n )\n\n .unwrap();\n\n let locations = packed.packed_locations;\n\n\n\n assert_eq!(locations.len(), 2);\n\n\n\n assert_eq!(locations[&RectToPlaceId::One].0, BinId::Three,);\n\n assert_eq!(locations[&RectToPlaceId::Two].0, BinId::Three,);\n\n\n\n assert_eq!(\n\n locations[&RectToPlaceId::One].1,\n\n PackedLocation {\n\n x: 0,\n", "file_path": "src/lib.rs", "rank": 45, "score": 11.328715761038525 }, { "content": " let _bin_clone = bin.clone();\n\n\n\n let mut bin_sections = bin.available_bin_sections.clone();\n\n\n\n let last_section_idx = bin_sections.len() - 1;\n\n let mut sections_tried = 0;\n\n\n\n 'section: while let Some(remaining_section) = bin_sections.pop() {\n\n let rect_to_place = rects_to_place.rects[&rect_to_place_id];\n\n\n\n let placement = remaining_section.try_place(\n\n &rect_to_place,\n\n more_suitable_containers_fn,\n\n box_size_heuristic,\n\n );\n\n\n\n if placement.is_err() {\n\n sections_tried += 1;\n\n continue 'section;\n\n }\n", "file_path": "src/lib.rs", "rank": 46, "score": 10.586164582187479 }, { "content": " box_size_heuristic,\n\n );\n\n\n\n 'group: for (_group_id, rects_to_place_ids) in group_id_to_inbound_ids {\n\n for (bin_id, bin) in target_bins.iter_mut() {\n\n if !can_fit_entire_group_into_bin(\n\n bin.clone(),\n\n &rects_to_place_ids[..],\n\n rects_to_place,\n\n box_size_heuristic,\n\n more_suitable_containers_fn,\n\n ) {\n\n continue;\n\n }\n\n\n\n 'incoming: for rect_to_place_id in rects_to_place_ids.iter() {\n\n if bin.available_bin_sections.len() == 0 {\n\n continue;\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 47, "score": 10.503156029547544 }, { "content": " let mut targets = BTreeMap::new();\n\n targets.insert(BinId::Three, TargetBin::new(5, 5, 1));\n\n\n\n let packed = pack_rects(\n\n &groups,\n\n &mut targets,\n\n &volume_heuristic,\n\n &contains_smallest_box,\n\n )\n\n .unwrap();\n\n let locations = packed.packed_locations;\n\n\n\n assert_eq!(locations.len(), 1);\n\n\n\n assert_eq!(locations[&RectToPlaceId::One].0, BinId::Three,);\n\n assert_eq!(\n\n locations[&RectToPlaceId::One].1,\n\n PackedLocation {\n\n x: 0,\n\n y: 0,\n", "file_path": "src/lib.rs", "rank": 48, "score": 10.462673872493035 }, { "content": " let mut targets = BTreeMap::new();\n\n targets.insert(BinId::Three, TargetBin::new(20, 20, 2));\n\n\n\n let packed = pack_rects(\n\n &groups,\n\n &mut targets,\n\n &volume_heuristic,\n\n &contains_smallest_box,\n\n )\n\n .unwrap();\n\n let locations = packed.packed_locations;\n\n\n\n assert_eq!(locations.len(), 2);\n\n\n\n assert_eq!(locations[&RectToPlaceId::One].0, BinId::Three,);\n\n assert_eq!(locations[&RectToPlaceId::Two].0, BinId::Three,);\n\n\n\n assert_eq!(\n\n locations[&RectToPlaceId::One].1,\n\n PackedLocation {\n", "file_path": "src/lib.rs", "rank": 49, "score": 10.441783166724221 }, { "content": " /// individually but cannot fit them together.\n\n ///\n\n /// Then we verify that we receive an error for being unable to place the group.\n\n #[test]\n\n fn error_if_cannot_fit_group() {\n\n let mut targets = BTreeMap::new();\n\n targets.insert(BinId::Three, TargetBin::new(100, 100, 1));\n\n targets.insert(BinId::Four, TargetBin::new(100, 100, 1));\n\n\n\n let mut groups = GroupedRectsToPlace::new();\n\n groups.push_rect(\n\n RectToPlaceId::One,\n\n Some(vec![\"A Group\"]),\n\n RectToInsert::new(100, 100, 1),\n\n );\n\n groups.push_rect(\n\n RectToPlaceId::Two,\n\n Some(vec![\"A Group\"]),\n\n RectToInsert::new(100, 100, 1),\n\n );\n", "file_path": "src/lib.rs", "rank": 50, "score": 10.294055171849038 }, { "content": " width: 5,\n\n height: 5,\n\n depth: 1\n\n },\n\n x_axis_rotation: RotatedBy::ZeroDegrees,\n\n y_axis_rotation: RotatedBy::ZeroDegrees,\n\n z_axis_rotation: RotatedBy::ZeroDegrees,\n\n }\n\n )\n\n }\n\n\n\n /// We have two rectangles and two bins. Each bin has enough space to fit one rectangle.\n\n ///\n\n /// 1. First place the largest rectangle into the smallest bin.\n\n ///\n\n /// 2. Second place the remaining rectangle into the next available bin (i.e. the largest one).\n\n #[test]\n\n fn two_rects_two_bins() {\n\n let mut groups: GroupedRectsToPlace<_, ()> = GroupedRectsToPlace::new();\n\n groups.push_rect(RectToPlaceId::One, None, RectToInsert::new(15, 15, 1));\n", "file_path": "src/lib.rs", "rank": 51, "score": 10.01547897661788 }, { "content": "/// Some(vec![MyCustomGroupId::GroupIdOne]),\n\n/// RectToInsert::new(5, 50, 255)\n\n/// );\n\n/// rects_to_place.push_rect(\n\n/// MyCustomRectId::RectThree,\n\n/// None,\n\n/// RectToInsert::new(30, 30, 255)\n\n/// );\n\n///\n\n/// let mut target_bins = BTreeMap::new();\n\n/// target_bins.insert(MyCustomBinId::DestinationBinOne, TargetBin::new(2048, 2048, 255));\n\n/// target_bins.insert(MyCustomBinId::DestinationBinTwo, TargetBin::new(4096, 4096, 1020));\n\n///\n\n/// // Information about where each `MyCustomRectId` was placed\n\n/// let rectangle_placements = pack_rects(\n\n/// &rects_to_place,\n\n/// &mut target_bins,\n\n/// &volume_heuristic,\n\n/// &contains_smallest_box\n\n/// ).unwrap();\n", "file_path": "src/lib.rs", "rank": 52, "score": 9.95281570829714 }, { "content": "use crate::packed_location::RotatedBy;\n\nuse crate::{BoxSizeHeuristicFn, PackedLocation, RectToInsert, WidthHeightDepth};\n\n\n\nuse core::{\n\n cmp::Ordering,\n\n fmt::{Debug, Display, Error as FmtError, Formatter},\n\n};\n\n\n\nmod overlaps;\n\n\n\n/// Given two sets of containers, which of these is the more suitable for our packing.\n\n///\n\n/// Useful when we're determining how to split up the remaining volume/area of a box/rectangle.\n\n///\n\n/// For example - we might deem it best to cut the remaining region vertically, or horizontally,\n\n/// or along the Z-axis.\n\n///\n\n/// This decision is based on the more suitable contains heuristic. We determine all 6 possible\n\n/// ways to divide up remaining space, sort them using the more suitable contains heuristic function\n\n/// and choose the best one.\n\n///\n\n/// Ordering::Greater means the first set of containers is better.\n\n/// Ordering::Less means the second set of containers is better.\n\npub type ComparePotentialContainersFn =\n\n dyn Fn([WidthHeightDepth; 3], [WidthHeightDepth; 3], &BoxSizeHeuristicFn) -> Ordering;\n\n\n\n/// Select the container that has the smallest box.\n\n///\n\n/// If there is a tie on the smallest boxes, select whichever also has the second smallest box.\n", "file_path": "src/bin_section.rs", "rank": 53, "score": 9.874903180810524 }, { "content": " let mut sections_tried = 0;\n\n\n\n 'section: while let Some(remaining_section) = bin_sections.pop() {\n\n let rect_to_place = rects_to_place.rects[&rect_to_place_id];\n\n\n\n let placement = remaining_section.try_place(\n\n &rect_to_place,\n\n more_suitable_containers_fn,\n\n box_size_heuristic,\n\n );\n\n\n\n if placement.is_err() {\n\n sections_tried += 1;\n\n continue 'section;\n\n }\n\n\n\n let (_placement, mut new_sections) = placement.unwrap();\n\n sort_by_size_largest_to_smallest(&mut new_sections, box_size_heuristic);\n\n\n\n bin.remove_filled_section(last_section_idx - sections_tried);\n", "file_path": "src/lib.rs", "rank": 54, "score": 9.835140012536487 }, { "content": "\n\n /// Verify that if the bin section that we are pushing is outside of the TargetBin's bounds we\n\n /// return an error.\n\n #[test]\n\n fn error_if_bin_section_out_of_bounds() {\n\n let mut bin = empty_bin();\n\n\n\n let out_of_bounds = BinSection::new(101, 0, 0, WidthHeightDepth::new(1, 1, 1));\n\n\n\n match bin.push_available_bin_section(out_of_bounds).err().unwrap() {\n\n PushBinSectionError::OutOfBounds(err_bin_section) => {\n\n assert_eq!(err_bin_section, out_of_bounds)\n\n }\n\n _ => panic!(),\n\n };\n\n }\n\n\n\n /// Verify that if the bin section that we are pushing overlaps another bin section we return\n\n /// an error.\n\n #[test]\n", "file_path": "src/target_bin/push_available_bin_section.rs", "rank": 55, "score": 9.792064965529617 }, { "content": " RectanglePackOk<RectToPlaceId, BinId>\n\n{\n\n /// Indicates where every incoming rectangle was placed\n\n pub fn packed_locations(&self) -> &KeyValMap<RectToPlaceId, (BinId, PackedLocation)> {\n\n &self.packed_locations\n\n }\n\n}\n\n\n\n/// An error while attempting to pack rectangles into bins.\n\n#[derive(Debug, PartialEq)]\n\npub enum RectanglePackError {\n\n /// The rectangles can't be placed into the bins. More bin space needs to be provided.\n\n NotEnoughBinSpace,\n\n}\n\n\n\n#[cfg(std)]\n\nimpl std::error::Error for RectanglePackError {}\n\n\n\nimpl Display for RectanglePackError {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), FmtError> {\n\n match self {\n\n RectanglePackError::NotEnoughBinSpace => {\n\n f.write_str(\"Not enough space to place all of the rectangles.\")\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 56, "score": 9.775975681049383 }, { "content": " targets.insert(BinId::Three, TargetBin::new(5, 5, 1));\n\n targets.insert(BinId::Four, TargetBin::new(5, 5, 2));\n\n\n\n let packed = pack_rects(\n\n &groups,\n\n &mut targets,\n\n &volume_heuristic,\n\n &contains_smallest_box,\n\n )\n\n .unwrap();\n\n let locations = packed.packed_locations;\n\n\n\n assert_eq!(locations[&RectToPlaceId::One].0, BinId::Three,);\n\n\n\n assert_eq!(locations.len(), 1);\n\n assert_eq!(\n\n locations[&RectToPlaceId::One].1,\n\n PackedLocation {\n\n x: 0,\n\n y: 0,\n", "file_path": "src/lib.rs", "rank": 57, "score": 9.59796805122355 }, { "content": "\n\n let (placement, mut new_sections) = placement.unwrap();\n\n sort_by_size_largest_to_smallest(&mut new_sections, box_size_heuristic);\n\n\n\n bin.remove_filled_section(last_section_idx - sections_tried);\n\n bin.add_new_sections(new_sections);\n\n\n\n packed_locations.insert(rect_to_place_id.clone(), (bin_id.clone(), placement));\n\n\n\n continue 'incoming;\n\n }\n\n }\n\n\n\n continue 'group;\n\n }\n\n return Err(RectanglePackError::NotEnoughBinSpace);\n\n }\n\n\n\n Ok(RectanglePackOk { packed_locations })\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 58, "score": 9.351742042621058 }, { "content": "mod box_size_heuristics;\n\n\n\n/// Determine how to fit a set of incoming rectangles (2d or 3d) into a set of target bins.\n\n///\n\n/// ## Example\n\n///\n\n/// ```\n\n/// //! A basic example of packing rectangles into target bins\n\n///\n\n/// use rectangle_pack::{\n\n/// GroupedRectsToPlace,\n\n/// RectToInsert,\n\n/// pack_rects,\n\n/// TargetBin,\n\n/// volume_heuristic,\n\n/// contains_smallest_box\n\n/// };\n\n/// use std::collections::BTreeMap;\n\n///\n\n/// // A rectangle ID just needs to meet these trait bounds (ideally also Copy).\n", "file_path": "src/lib.rs", "rank": 59, "score": 9.17144618003644 }, { "content": " #[test]\n\n fn error_if_placement_is_taller_than_bin_section() {\n\n let bin_section = bin_section_width_height_depth(5, 20, 1);\n\n let placement = RectToInsert::new(5, 21, 1);\n\n\n\n assert_eq!(\n\n bin_section\n\n .try_place(&placement, &contains_smallest_box, &volume_heuristic)\n\n .unwrap_err(),\n\n BinSectionError::PlacementTallerThanBinSection\n\n );\n\n }\n\n\n\n /// If we're trying to place a rectangle that is deeper than the container we return an error\n\n #[test]\n\n fn error_if_placement_is_deeper_than_bin_section() {\n\n let bin_section = bin_section_width_height_depth(5, 20, 1);\n\n let placement = RectToInsert::new(5, 20, 2);\n\n\n\n assert_eq!(\n", "file_path": "src/bin_section.rs", "rank": 60, "score": 9.125257234166574 }, { "content": " /// Verify that we can push a valid bin section.\n\n #[test]\n\n fn push_bin_section() {\n\n let mut bin = full_bin();\n\n\n\n let valid_section = BinSection::new(1, 2, 0, WidthHeightDepth::new(1, 1, 1));\n\n\n\n assert_eq!(bin.available_bin_sections.len(), 0);\n\n bin.push_available_bin_section(valid_section).unwrap();\n\n assert_eq!(bin.available_bin_sections.len(), 1);\n\n\n\n assert_eq!(bin.available_bin_sections[0], valid_section);\n\n }\n\n\n\n fn empty_bin() -> TargetBin {\n\n TargetBin::new(100, 100, 1)\n\n }\n\n\n\n fn full_bin() -> TargetBin {\n\n let mut bin = TargetBin::new(100, 100, 1);\n\n\n\n bin.available_bin_sections.clear();\n\n\n\n bin\n\n }\n\n}\n", "file_path": "src/target_bin/push_available_bin_section.rs", "rank": 61, "score": 9.067327440158458 }, { "content": " fn error_if_bin_section_overlaps_another_remaining_section() {\n\n let mut bin = empty_bin();\n\n\n\n let overlaps = BinSection::new(0, 0, 0, WidthHeightDepth::new(1, 1, 1));\n\n\n\n match bin.push_available_bin_section(overlaps).err().unwrap() {\n\n PushBinSectionError::Overlaps {\n\n remaining_section: err_remaining_section,\n\n new_section: err_new_section,\n\n } => {\n\n assert_eq!(err_new_section, overlaps);\n\n assert_eq!(\n\n err_remaining_section,\n\n BinSection::new(0, 0, 0, WidthHeightDepth::new(100, 100, 1))\n\n );\n\n }\n\n _ => panic!(),\n\n }\n\n }\n\n\n", "file_path": "src/target_bin/push_available_bin_section.rs", "rank": 62, "score": 9.032882858272698 }, { "content": " pub(crate) whd: WidthHeightDepth,\n\n}\n\n\n\n/// An error while attempting to place a rectangle within a bin section;\n\n#[derive(Debug, Eq, PartialEq)]\n\n#[allow(missing_docs)]\n\npub enum BinSectionError {\n\n PlacementWiderThanBinSection,\n\n PlacementTallerThanBinSection,\n\n PlacementDeeperThanBinSection,\n\n}\n\n\n\nimpl Display for BinSectionError {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), FmtError> {\n\n let err = match self {\n\n BinSectionError::PlacementWiderThanBinSection => {\n\n \"Can not place a rectangle inside of a bin that is wider than that rectangle.\"\n\n }\n\n BinSectionError::PlacementTallerThanBinSection => {\n\n \"Can not place a rectangle inside of a bin that is taller than that rectangle.\"\n", "file_path": "src/bin_section.rs", "rank": 63, "score": 8.979288843037981 }, { "content": "## In The Wild\n\n\n\nHere are some known production users of `rectangle-pack`.\n\n\n\n- [Akigi](https://akigi.com) uses `rectangle-pack` to power parts of its runtime texture allocation strategy.\n\n\n\n- [Bevy](https://github.com/bevyengine/bevy/blob/9ae56e860468aa3158a702cbcf64e511b84a4b1c/crates/bevy_sprite/Cargo.toml#L29) uses `rectangle-pack`\n\n to create texture atlases.\n\n\n\n## Contributing\n\n\n\nIf you have a use case that isn't supported, a question, a patch, or anything else, go right ahead and open an issue or submit a pull request.\n\n\n\n## To Test\n\n\n\nTo run the test suite.\n\n\n\n```sh\n\n# Clone the repository\n\ngit clone [email protected]:chinedufn/rectangle-pack.git\n\ncd rectangle-pack\n\n\n\n# Run tests\n\ncargo test\n\n```\n\n\n\n## See Also\n\n\n\n- [rectpack2D]\n\n - Inspired parts of our initial implementation\n\n\n\n[rectpack2D]: https://github.com/TeamHypersomnia/rectpack2D\n", "file_path": "README.md", "rank": 64, "score": 8.675025762041509 }, { "content": " &self.available_bin_sections\n\n }\n\n\n\n /// Remove the section that was just split by a placed rectangle.\n\n pub fn remove_filled_section(&mut self, idx: usize) {\n\n self.available_bin_sections.remove(idx);\n\n }\n\n\n\n /// When a section is filled it gets split into three new sections.\n\n /// Here we add those.\n\n ///\n\n /// TODO: Ignore sections with a volume of 0\n\n pub fn add_new_sections(&mut self, new_sections: [BinSection; 3]) {\n\n for new_section in new_sections.iter() {\n\n if new_section.whd.volume() > 0 {\n\n self.available_bin_sections.push(*new_section);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/target_bin.rs", "rank": 65, "score": 8.26809543223594 }, { "content": " }\n\n .test();\n\n }\n\n\n\n fn section_2_3_4() -> BinSection {\n\n BinSection::new(2, 3, 4, WidthHeightDepth::new(2, 3, 4))\n\n }\n\n\n\n struct OverlapsTest {\n\n label: &'static str,\n\n section1: BinSection,\n\n section2: BinSection,\n\n expected_overlap: bool,\n\n }\n\n\n\n impl OverlapsTest {\n\n fn test(self) {\n\n assert_eq!(\n\n self.section1.overlaps(&self.section2),\n\n self.expected_overlap,\n\n \"{}\",\n\n self.label\n\n )\n\n }\n\n }\n\n}\n", "file_path": "src/bin_section/overlaps.rs", "rank": 66, "score": 8.12213783200471 }, { "content": " bin_section\n\n .try_place(&placement, &contains_smallest_box, &volume_heuristic)\n\n .unwrap_err(),\n\n BinSectionError::PlacementDeeperThanBinSection\n\n );\n\n }\n\n\n\n fn test_splits(\n\n container_dimensions: u32,\n\n rect_to_place: WidthHeightDepth,\n\n mut expected: [BinSection; 3],\n\n ) {\n\n let dim = container_dimensions;\n\n let bin_section = bin_section_width_height_depth(dim, dim, dim);\n\n\n\n let whd = rect_to_place;\n\n\n\n let placement = RectToInsert::new(whd.width, whd.height, whd.depth);\n\n\n\n let mut packed = bin_section\n", "file_path": "src/bin_section.rs", "rank": 67, "score": 8.098616885891303 }, { "content": "use crate::WidthHeightDepth;\n\n\n\n/// Incoming boxes are places into the smallest hole that will fit them.\n\n///\n\n/// \"small\" vs. \"large\" is based on the heuristic function.\n\n///\n\n/// A larger heuristic means that the box is larger.\n\npub type BoxSizeHeuristicFn = dyn Fn(WidthHeightDepth) -> u128;\n\n\n\n/// The volume of the box\n", "file_path": "src/box_size_heuristics.rs", "rank": 68, "score": 7.385268232185804 }, { "content": " }\n\n BinSectionError::PlacementDeeperThanBinSection => {\n\n \"Can not place a rectangle inside of a bin that is deeper than that rectangle.\"\n\n }\n\n };\n\n\n\n f.write_str(err)\n\n }\n\n}\n\n\n\nimpl BinSection {\n\n /// Create a new BinSection\n\n pub fn new(x: u32, y: u32, z: u32, whd: WidthHeightDepth) -> Self {\n\n BinSection { x, y, z, whd }\n\n }\n\n\n\n // TODO: Delete - just the old API before we had the WidthHeightDepth struct\n\n fn new_spread(x: u32, y: u32, z: u32, width: u32, height: u32, depth: u32) -> Self {\n\n BinSection {\n\n x,\n", "file_path": "src/bin_section.rs", "rank": 69, "score": 6.965896218237885 }, { "content": "use crate::bin_section::BinSection;\n\n\n\nimpl BinSection {\n\n /// Whether or not two bin sections overlap each other.\n\n pub fn overlaps(&self, other: &Self) -> bool {\n\n (self.x >= other.x && self.x <= other.right())\n\n && (self.y >= other.y && self.y <= other.top())\n\n && (self.z >= other.z && self.z <= other.back())\n\n }\n\n\n\n fn right(&self) -> u32 {\n\n self.x + (self.whd.width - 1)\n\n }\n\n\n\n fn top(&self) -> u32 {\n\n self.y + (self.whd.height - 1)\n\n }\n\n\n\n fn back(&self) -> u32 {\n\n self.z + (self.whd.depth - 1)\n", "file_path": "src/bin_section/overlaps.rs", "rank": 70, "score": 6.74432304142932 }, { "content": " ///\n\n /// Written to be readable/maintainable, not to minimize conditional logic, under the\n\n /// (unverified) assumption that a release compilation will inline and dedupe the function\n\n /// calls and conditionals.\n\n pub fn try_place(\n\n &self,\n\n incoming: &RectToInsert,\n\n container_comparison_fn: &ComparePotentialContainersFn,\n\n heuristic_fn: &BoxSizeHeuristicFn,\n\n ) -> Result<(PackedLocation, [BinSection; 3]), BinSectionError> {\n\n self.incoming_can_fit(incoming)?;\n\n\n\n let mut all_combinations = [\n\n self.depth_largest_height_second_largest_width_smallest(incoming),\n\n self.depth_largest_width_second_largest_height_smallest(incoming),\n\n self.height_largest_depth_second_largest_width_smallest(incoming),\n\n self.height_largest_width_second_largest_depth_smallest(incoming),\n\n self.width_largest_depth_second_largest_height_smallest(incoming),\n\n self.width_largest_height_second_largest_depth_smallest(incoming),\n\n ];\n", "file_path": "src/bin_section.rs", "rank": 71, "score": 6.701829396764012 }, { "content": " const MIDDLE: u32 = 25;\n\n const SMALLEST: u32 = 10;\n\n\n\n const FULL: u32 = 100;\n\n\n\n /// If we're trying to place a rectangle that is wider than the container we return an error\n\n #[test]\n\n fn error_if_placement_is_wider_than_bin_section() {\n\n let bin_section = bin_section_width_height_depth(5, 20, 1);\n\n let placement = RectToInsert::new(6, 20, 1);\n\n\n\n assert_eq!(\n\n bin_section\n\n .try_place(&placement, &contains_smallest_box, &volume_heuristic)\n\n .unwrap_err(),\n\n BinSectionError::PlacementWiderThanBinSection\n\n );\n\n }\n\n\n\n /// If we're trying to place a rectangle that is taller than the container we return an error\n", "file_path": "src/bin_section.rs", "rank": 72, "score": 6.580822621039561 }, { "content": " label: \"Overlaps X only\",\n\n section1: BinSection::new(3, 40, 50, WidthHeightDepth::new(1, 1, 1)),\n\n section2: section_2_3_4(),\n\n expected_overlap: false,\n\n }\n\n .test();\n\n\n\n OverlapsTest {\n\n label: \"Overlaps Y only\",\n\n section1: BinSection::new(30, 4, 50, WidthHeightDepth::new(1, 1, 1)),\n\n section2: section_2_3_4(),\n\n expected_overlap: false,\n\n }\n\n .test();\n\n\n\n OverlapsTest {\n\n label: \"Overlaps Z only\",\n\n section1: BinSection::new(30, 40, 5, WidthHeightDepth::new(1, 1, 1)),\n\n section2: section_2_3_4(),\n\n expected_overlap: false,\n", "file_path": "src/bin_section/overlaps.rs", "rank": 73, "score": 6.428091927061117 }, { "content": " remaining_section: *available,\n\n new_section: bin_section,\n\n });\n\n }\n\n }\n\n\n\n self.push_available_bin_section_unchecked(bin_section);\n\n\n\n Ok(())\n\n }\n\n\n\n /// Push a [`BinSection`] to the list of remaining [`BinSection`]'s that rectangles can be\n\n /// placed in, without checking whether or not it is valid.\n\n ///\n\n /// Use [`TargetBin.push_available_bin_section`] if you want to check that the new bin section\n\n /// does not overlap any existing bin sections nad that it is within the [`TargetBin`]'s bounds.\n\n ///\n\n /// [`TargetBin.push_available_bin_section`]: #method.push_available_bin_section\n\n pub fn push_available_bin_section_unchecked(&mut self, bin_section: BinSection) {\n\n self.available_bin_sections.push(bin_section);\n", "file_path": "src/target_bin/push_available_bin_section.rs", "rank": 74, "score": 6.346115969461444 }, { "content": " }\n\n}\n\n\n\n/// An error while attempting to push a [`BinSection`] into the remaining bin sections of a\n\n/// [`TargetBin`].\n\n#[derive(Debug)]\n\npub enum PushBinSectionError {\n\n /// Attempted to push a [`BinSection`] that is not fully contained by the bin.\n\n OutOfBounds(BinSection),\n\n /// Attempted to push a [`BinSection`] that overlaps another empty bin section.\n\n Overlaps {\n\n /// The section that is already stored as empty within the [`TargetBin`];\n\n remaining_section: BinSection,\n\n /// The section that you were trying to add to the [`TargetBin`];\n\n new_section: BinSection,\n\n },\n\n}\n\n\n\nimpl Display for PushBinSectionError {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {\n", "file_path": "src/target_bin/push_available_bin_section.rs", "rank": 75, "score": 6.240020366443856 }, { "content": " assert_eq!(\n\n locations[&RectToPlaceId::One].1,\n\n PackedLocation {\n\n x: 0,\n\n y: 0,\n\n z: 0,\n\n whd: WidthHeightDepth {\n\n width: 15,\n\n height: 15,\n\n depth: 1\n\n },\n\n x_axis_rotation: RotatedBy::ZeroDegrees,\n\n y_axis_rotation: RotatedBy::ZeroDegrees,\n\n z_axis_rotation: RotatedBy::ZeroDegrees,\n\n }\n\n );\n\n assert_eq!(\n\n locations[&RectToPlaceId::Two].1,\n\n PackedLocation {\n\n x: 0,\n", "file_path": "src/lib.rs", "rank": 76, "score": 5.287750074030448 }, { "content": " ///\n\n /// let start = len / 600 * current_frame;\n\n /// let end = start + len / 600;\n\n ///\n\n /// target_bin.coalesce_available_sections(idx, start..end);\n\n /// }\n\n ///\n\n /// # fn my_target_bin () -> TargetBin {\n\n /// # TargetBin::new(1, 2, 3)\n\n /// # }\n\n /// #\n\n /// # fn get_current_frame () -> usize {\n\n /// # 0\n\n /// # }\n\n /// ```\n\n ///\n\n /// [`TargetBin.push_available_bin_section`]: #method.push_available_bin_section\n\n // TODO: Write tests, implement then remove the \"ignore\" from the examples above.\n\n // Tests cases should have a rectangle and then a neighbor (above, below, left, right) and\n\n // verify that they get combined, but only if the comparison indices are correct and only if\n\n // the neighbor has the same width (uf above/below) or height (if left/right).\n\n pub fn coalesce_available_sections(\n\n _bin_section_index: usize,\n\n _compare_to_indices: Range<usize>,\n\n ) {\n\n unimplemented!()\n\n }\n\n}\n", "file_path": "src/target_bin/coalesce.rs", "rank": 77, "score": 5.197280762354977 }, { "content": " whd,\n\n [\n\n BinSection::new_spread(whd.width, 0, 0, FULL - whd.width, FULL, FULL),\n\n BinSection::new_spread(0, whd.height, 0, whd.width, FULL - whd.height, FULL),\n\n BinSection::new_spread(0, 0, whd.depth, whd.width, whd.height, FULL - whd.depth),\n\n ],\n\n );\n\n }\n\n\n\n // #[test]\n\n // fn todo() {\n\n // unimplemented!(\"Add tests for supporting rotation\");\n\n // }\n\n\n\n fn bin_section_width_height_depth(width: u32, height: u32, depth: u32) -> BinSection {\n\n BinSection::new(\n\n 0,\n\n 0,\n\n 0,\n\n WidthHeightDepth {\n\n width,\n\n height,\n\n depth,\n\n },\n\n )\n\n }\n\n}\n", "file_path": "src/bin_section.rs", "rank": 78, "score": 5.133589977140904 }, { "content": " }\n\n\n\n fn empty_space_directly_behind(&self, incoming: &RectToInsert) -> BinSection {\n\n BinSection::new(\n\n self.x,\n\n self.y,\n\n self.z + incoming.depth(),\n\n WidthHeightDepth {\n\n width: incoming.width(),\n\n height: incoming.height(),\n\n depth: self.whd.depth - incoming.depth(),\n\n },\n\n )\n\n }\n\n\n\n fn all_empty_space_above_excluding_right(&self, incoming: &RectToInsert) -> BinSection {\n\n BinSection::new(\n\n self.x,\n\n self.y + incoming.height(),\n\n self.z,\n", "file_path": "src/bin_section.rs", "rank": 79, "score": 4.941626344307943 }, { "content": "## no_std\n\n\n\nrectangle-pack supports `no_std` by disabling the `std` feature.\n\n\n\n```toml\n\nrectangle-pack = {version = \"0.4\", default-features = false}\n\n```\n\n\n\nDisabling the `std` feature does the following.\n\n\n\n- `BTreeMap`s are used internally in places where `HashMap`s would have been used.\n\n\n\n## Features\n\n\n\n- Place any number of 2d / 3d rectangles into any number of 2d / 3d target bins.\n\n - Supports three dimensional rectangles through a width + height + depth based API.\n\n\n\n- Generic API that pushes as much as possible into user-land for maximum flexibility.\n\n\n\n- Group rectangles using generic group id's when you need to ensure that certain rectangles will always end up sharing a bin with each other.\n\n\n\n- Supports two dimensional rectangles (depth = 1).\n\n\n\n- User provided heuristics to grant full control over the packing algorithm.\n\n\n\n- Zero dependencies, making it easier to embed it inside of a more use case specific library without introducing bloat.\n\n\n\n- Deterministic packing.\n\n - Packing of the same inputs using the same heuristics and the same sized target bins will always lead to the same layout.\n\n - This is useful anywhere that reproducible builds are useful, such as when generating a texture atlas that is meant to be cached based on the hash of the contents.\n\n\n\n- Ability to remove placed rectangles and coalesce neighboring free space.\n\n\n\n## Future Work\n\n\n\nThe first version of `rectangle-pack` was designed to meet my own needs.\n\n\n\nAs such there is functionality that could be useful that was not explored since I did not need it.\n\n\n\nHere are some things that could be useful in the future.\n\n\n", "file_path": "README.md", "rank": 80, "score": 4.8769363558217265 }, { "content": " whd,\n\n [\n\n BinSection::new_spread(whd.width, 0, 0, FULL - whd.width, FULL, whd.depth),\n\n BinSection::new_spread(0, whd.height, 0, whd.width, FULL - whd.height, whd.depth),\n\n BinSection::new_spread(0, 0, whd.depth, FULL, FULL, FULL - whd.depth),\n\n ],\n\n );\n\n }\n\n\n\n /// Verify that we choose the correct splits when the placed rectangle is height > depth > width\n\n #[test]\n\n fn height_largest_depth_second_largest_width_smallest() {\n\n let whd = WidthHeightDepth {\n\n width: SMALLEST,\n\n height: BIGGEST,\n\n depth: MIDDLE,\n\n };\n\n\n\n test_splits(\n\n FULL,\n", "file_path": "src/bin_section.rs", "rank": 81, "score": 4.852074483461703 }, { "content": " whd,\n\n [\n\n BinSection::new_spread(whd.width, 0, 0, FULL - whd.width, FULL, FULL),\n\n BinSection::new_spread(0, whd.height, 0, whd.width, FULL - whd.height, whd.depth),\n\n BinSection::new_spread(0, 0, whd.depth, whd.width, FULL, FULL - whd.depth),\n\n ],\n\n );\n\n }\n\n\n\n /// Verify that we choose the correct splits when the placed rectangle is depth > width > height\n\n #[test]\n\n fn depth_largest_width_second_largest_height_smallest() {\n\n let whd = WidthHeightDepth {\n\n width: MIDDLE,\n\n height: SMALLEST,\n\n depth: BIGGEST,\n\n };\n\n\n\n test_splits(\n\n FULL,\n", "file_path": "src/bin_section.rs", "rank": 82, "score": 4.852074483461703 }, { "content": " whd,\n\n [\n\n BinSection::new_spread(whd.width, 0, 0, FULL - whd.width, whd.height, whd.depth),\n\n BinSection::new_spread(0, whd.height, 0, FULL, FULL - whd.height, whd.depth),\n\n BinSection::new_spread(0, 0, whd.depth, FULL, FULL, FULL - whd.depth),\n\n ],\n\n );\n\n }\n\n\n\n /// Verify that we choose the correct splits when the placed rectangle is width > depth > height\n\n #[test]\n\n fn width_largest_depth_second_largest_height_smallest() {\n\n let whd = WidthHeightDepth {\n\n width: BIGGEST,\n\n height: SMALLEST,\n\n depth: MIDDLE,\n\n };\n\n\n\n test_splits(\n\n FULL,\n", "file_path": "src/bin_section.rs", "rank": 83, "score": 4.852074483461703 }, { "content": " whd,\n\n [\n\n BinSection::new_spread(whd.width, 0, 0, FULL - whd.width, whd.height, whd.depth),\n\n BinSection::new_spread(0, whd.height, 0, FULL, FULL - whd.height, FULL),\n\n BinSection::new_spread(0, 0, whd.depth, FULL, whd.height, FULL - whd.depth),\n\n ],\n\n );\n\n }\n\n\n\n /// Verify that we choose the correct splits when the placed rectangle is height > width > depth\n\n #[test]\n\n fn height_largest_width_second_largest_depth_smallest() {\n\n let whd = WidthHeightDepth {\n\n width: MIDDLE,\n\n height: BIGGEST,\n\n depth: SMALLEST,\n\n };\n\n\n\n test_splits(\n\n FULL,\n", "file_path": "src/bin_section.rs", "rank": 84, "score": 4.852074483461703 }, { "content": " whd,\n\n [\n\n BinSection::new_spread(whd.width, 0, 0, FULL - whd.width, whd.height, FULL),\n\n BinSection::new_spread(0, whd.height, 0, FULL, FULL - whd.height, FULL),\n\n BinSection::new_spread(0, 0, whd.depth, whd.width, whd.height, FULL - whd.depth),\n\n ],\n\n );\n\n }\n\n\n\n /// Verify that we choose the correct splits when the placed rectangle is depth > height > width\n\n #[test]\n\n fn depth_largest_height_second_largest_width_smallest() {\n\n let whd = WidthHeightDepth {\n\n width: SMALLEST,\n\n height: MIDDLE,\n\n depth: BIGGEST,\n\n };\n\n\n\n test_splits(\n\n FULL,\n", "file_path": "src/bin_section.rs", "rank": 85, "score": 4.852074483461703 }, { "content": " fn empty_space_directly_above(&self, incoming: &RectToInsert) -> BinSection {\n\n BinSection::new_spread(\n\n self.x,\n\n self.y + incoming.height(),\n\n self.z,\n\n incoming.width(),\n\n self.whd.height - incoming.height(),\n\n incoming.depth(),\n\n )\n\n }\n\n\n\n fn empty_space_directly_right(&self, incoming: &RectToInsert) -> BinSection {\n\n BinSection::new_spread(\n\n self.x + incoming.width(),\n\n self.y,\n\n self.z,\n\n self.whd.width - incoming.width(),\n\n incoming.height(),\n\n incoming.depth(),\n\n )\n", "file_path": "src/bin_section.rs", "rank": 86, "score": 4.842119287795002 }, { "content": "\n\n fn all_empty_space_right_excluding_above(&self, incoming: &RectToInsert) -> BinSection {\n\n BinSection::new(\n\n self.x + incoming.width(),\n\n self.y,\n\n self.z,\n\n WidthHeightDepth {\n\n width: self.whd.width - incoming.width(),\n\n height: incoming.height(),\n\n depth: self.whd.depth,\n\n },\n\n )\n\n }\n\n\n\n fn all_empty_space_right_excluding_behind(&self, incoming: &RectToInsert) -> BinSection {\n\n BinSection::new(\n\n self.x + incoming.width(),\n\n self.y,\n\n self.z,\n\n WidthHeightDepth {\n", "file_path": "src/bin_section.rs", "rank": 87, "score": 4.80983487524191 }, { "content": " [\n\n self.all_empty_space_right(incoming),\n\n self.all_empty_space_above_excluding_right(incoming),\n\n self.empty_space_directly_behind(incoming),\n\n ]\n\n }\n\n\n\n fn all_empty_space_above(&self, incoming: &RectToInsert) -> BinSection {\n\n BinSection::new_spread(\n\n self.x,\n\n self.y + incoming.height(),\n\n self.z,\n\n self.whd.width,\n\n self.whd.height - incoming.height(),\n\n self.whd.depth,\n\n )\n\n }\n\n\n\n fn all_empty_space_right(&self, incoming: &RectToInsert) -> BinSection {\n\n BinSection::new_spread(\n", "file_path": "src/bin_section.rs", "rank": 88, "score": 4.77797811840138 }, { "content": " .try_place(&placement, &contains_smallest_box, &volume_heuristic)\n\n .unwrap();\n\n\n\n packed.1.sort();\n\n expected.sort();\n\n\n\n assert_eq!(packed.1, expected);\n\n }\n\n\n\n /// Verify that we choose the correct splits when the placed rectangle is width > height > depth\n\n #[test]\n\n fn width_largest_height_second_largest_depth_smallest() {\n\n let whd = WidthHeightDepth {\n\n width: BIGGEST,\n\n height: MIDDLE,\n\n depth: SMALLEST,\n\n };\n\n\n\n test_splits(\n\n FULL,\n", "file_path": "src/bin_section.rs", "rank": 89, "score": 4.590423559907797 }, { "content": " y,\n\n z,\n\n whd: WidthHeightDepth {\n\n width,\n\n height,\n\n depth,\n\n },\n\n }\n\n }\n\n}\n\n\n\nimpl BinSection {\n\n /// See if a `LayeredRect` can fit inside of this BinSection.\n\n ///\n\n /// If it can we return the `BinSection`s that would be created by placing the `LayeredRect`\n\n /// inside of this `BinSection`.\n\n ///\n\n /// Consider the diagram below of a smaller box placed into of a larger one.\n\n ///\n\n /// The remaining space can be divided into three new sections.\n", "file_path": "src/bin_section.rs", "rank": 90, "score": 4.295442470971983 }, { "content": " /// time complexity of `O(Width * Height * Depth)`, where the worst case is tht you have a bin\n\n /// full of `1x1x1` rectangles.\n\n ///\n\n /// To skip the validity checks use [`TargetBin.push_available_bin_section_unchecked`].\n\n ///\n\n /// [`TargetBin.push_available_bin_section_unchecked`]: #method.push_available_bin_section_unchecked\n\n pub fn push_available_bin_section(\n\n &mut self,\n\n bin_section: BinSection,\n\n ) -> Result<(), PushBinSectionError> {\n\n if bin_section.x >= self.max_width\n\n || bin_section.y >= self.max_height\n\n || bin_section.z >= self.max_depth\n\n {\n\n return Err(PushBinSectionError::OutOfBounds(bin_section));\n\n }\n\n\n\n for available in self.available_bin_sections.iter() {\n\n if available.overlaps(&bin_section) {\n\n return Err(PushBinSectionError::Overlaps {\n", "file_path": "src/target_bin/push_available_bin_section.rs", "rank": 91, "score": 4.134817677194763 }, { "content": " x: 0,\n\n y: 0,\n\n z: 0,\n\n whd: WidthHeightDepth {\n\n width: 10,\n\n height: 10,\n\n depth: 1\n\n },\n\n x_axis_rotation: RotatedBy::ZeroDegrees,\n\n y_axis_rotation: RotatedBy::ZeroDegrees,\n\n z_axis_rotation: RotatedBy::ZeroDegrees,\n\n }\n\n );\n\n assert_eq!(\n\n locations[&RectToPlaceId::Two].1,\n\n PackedLocation {\n\n x: 10,\n\n y: 0,\n\n z: 0,\n\n whd: WidthHeightDepth {\n", "file_path": "src/lib.rs", "rank": 92, "score": 4.059517419501644 }, { "content": " y: 0,\n\n z: 0,\n\n whd: WidthHeightDepth {\n\n width: 50,\n\n height: 90,\n\n depth: 1\n\n },\n\n x_axis_rotation: RotatedBy::ZeroDegrees,\n\n y_axis_rotation: RotatedBy::ZeroDegrees,\n\n z_axis_rotation: RotatedBy::ZeroDegrees,\n\n }\n\n );\n\n assert_eq!(\n\n locations[&RectToPlaceId::Two].1,\n\n PackedLocation {\n\n x: 0,\n\n y: 90,\n\n z: 0,\n\n whd: WidthHeightDepth {\n\n width: 1,\n", "file_path": "src/lib.rs", "rank": 93, "score": 4.016683355578901 }, { "content": " self.x + incoming.width(),\n\n self.y,\n\n self.z,\n\n self.whd.width - incoming.width(),\n\n self.whd.height,\n\n self.whd.depth,\n\n )\n\n }\n\n\n\n fn all_empty_space_behind(&self, incoming: &RectToInsert) -> BinSection {\n\n BinSection::new_spread(\n\n self.x,\n\n self.y,\n\n self.z + incoming.depth(),\n\n self.whd.width,\n\n self.whd.height,\n\n self.whd.depth - incoming.depth(),\n\n )\n\n }\n\n\n", "file_path": "src/bin_section.rs", "rank": 94, "score": 3.9736128889704494 }, { "content": "\n\n#[allow(missing_docs)]\n\nimpl PackedLocation {\n\n pub fn x(&self) -> u32 {\n\n self.x\n\n }\n\n\n\n pub fn y(&self) -> u32 {\n\n self.y\n\n }\n\n\n\n pub fn z(&self) -> u32 {\n\n self.z\n\n }\n\n\n\n pub fn width(&self) -> u32 {\n\n self.whd.width\n\n }\n\n\n\n pub fn height(&self) -> u32 {\n\n self.whd.height\n\n }\n\n\n\n pub fn depth(&self) -> u32 {\n\n self.whd.depth\n\n }\n\n}\n", "file_path": "src/packed_location.rs", "rank": 95, "score": 3.9524627225876854 }, { "content": " whd: WidthHeightDepth {\n\n width: 60,\n\n height: 95,\n\n depth: 1\n\n },\n\n x_axis_rotation: RotatedBy::ZeroDegrees,\n\n y_axis_rotation: RotatedBy::ZeroDegrees,\n\n z_axis_rotation: RotatedBy::ZeroDegrees,\n\n }\n\n );\n\n assert_eq!(\n\n locations[&RectToPlaceId::Two].1,\n\n PackedLocation {\n\n x: 60,\n\n y: 0,\n\n z: 0,\n\n whd: WidthHeightDepth {\n\n width: 40,\n\n height: 10,\n\n depth: 1\n", "file_path": "src/lib.rs", "rank": 96, "score": 3.9336709694528342 }, { "content": "### Three-Dimensional Incoming Rectangle Rotation\n\n\n\nWhen attempting to place a Rectangle into the smallest available bin section we might want to rotate the rectangle in order to see which orientation produces the best fit.\n\n\n\nThis could be accomplished by:\n\n\n\n1. The API exposes three booleans for every incoming rectangles, `allow_global_x_axis_rotation`, `allow_global_y_axis_rotation`, `allow_global_z_axis_rotation`.\n\n\n\n2. Let's say all three are enabled. When attempting to place the rectangle/box we should attempt it in all 6 possible orientations and then select the best placement (based on the `ComparePotentialContainersFn` heuristic).\n\n\n\n3. Return information to the caller about which axis ended up being rotated.\n\n\n\n### Mutually exclusive groups\n\n\n\nAn example of this is the ability to ensure that certain rectqngle groups are not placed in the same bins.\n\n\n\nPerhaps you have two plates (bins) and two groups of cheese (rectangles), one for Alice and one for Bob.\n\n\n\nWhen packing you want to ensure that these groups of cheese each end up in a different bin since Alice and Bob don't like to share.\n\n\n\n### Stats on how the bins were packed\n\n\n\nThings such as the amount of wasted space - or anything else that would allow the caller to compare the results of different combinations of\n\ntarget bin sizes and heuristics to see which packed the most efficiently.\n\n\n\n---\n\n\n\nIf you have a use case that isn't supported, go right ahead and open an issue or submit a pull request.\n\n\n\n## Packing Algorithm\n\n\n\nWe started with the algorithm described in [rectpack2D] and then made some adjustments in order to\n\nsupport our goal of flexibly supporting all use cases.\n\n\n\n\n\n- The heuristic is provided by the caller instead of having `rectangle-pack` decide on a user provided heuristic.\n\n\n\n- When splitting an available section of a bin into two new sections of a bin - we do not decide on how the split should occur arbitrarily.\n\n Instead, we base it on the user provided `more_suitable_containers` heuristic function.\n\n\n\n- There is a third dimension.\n\n\n", "file_path": "README.md", "rank": 97, "score": 3.847971349969571 }, { "content": " width: self.whd.width - incoming.width(),\n\n height: self.whd.height,\n\n depth: incoming.depth(),\n\n },\n\n )\n\n }\n\n\n\n fn all_empty_space_behind_excluding_above(&self, incoming: &RectToInsert) -> BinSection {\n\n BinSection::new(\n\n self.x,\n\n self.y,\n\n self.z + incoming.depth(),\n\n WidthHeightDepth {\n\n width: self.whd.width,\n\n height: incoming.height(),\n\n depth: self.whd.depth - incoming.depth(),\n\n },\n\n )\n\n }\n\n\n", "file_path": "src/bin_section.rs", "rank": 98, "score": 3.6891247465972317 }, { "content": "## Background / Initial Motivation\n\n\n\nIn my application I've switched to dynamically placing textures into atlases at runtime\n\ninstead of in how I previously used an asset compilation step, so some of the problems\n\nexplained in the initial motivation details below are now moot.\n\n\n\nI still use rectangle-pack to power my runtime texture allocation, though,\n\nalong with a handful of other strategies depending on the nature of the\n\ntextures that need to be placed into the atlas.\n\n\n\nrectangle-pack knows nothing about textures, so you can use it for any form of bin\n\npacking, whether at runtime, during an offline step or any other time you like.\n\n\n\n<details>\n\n<summary>\n\nClick to show the initial motivation for the library.\n\n</summary>\n\n\n\nI'm working on a game with some of the following texture atlas requirements (as of March 2020):\n\n\n\n- I need to be able to guarantee that certain textures are available in the same atlas.\n\n - For example - if I'm rendering terrain using a blend map that maps each channel to a color / metallic-roughness / normal texture\n\n I want all of those textures to be available in the same atlas.\n\n Otherwise in the worst case I might need over a dozen texture uniforms in order to render a single chunk of terrain.\n\n\n\n- I want to have control over which channels are used when I'm packing my atlases.\n\n - For example - I need to be able to easily pack my metallic and roughness textures into one channel each, while\n\n packing color and normal channels into three channels.\n", "file_path": "README.md", "rank": 99, "score": 3.662069243671296 } ]
Rust
src/controllers/time.rs
gopalsgs/rustRocket
ef8e8e45ae3d0a3131b0024b66ee2b8322d56235
use std::f64; use rand::Rng; use super::Actions; use game_state::GameState; use geometry::{Advance, Position, Point}; use models::{Bullet, Enemy, Particle, Vector}; use util; const BULLETS_PER_SECOND: f64 = 100.0; const BULLET_RATE: f64 = 1.0 / BULLETS_PER_SECOND; const ENEMY_SPAWNS_PER_SECOND: f64 = 1.0; const ENEMY_SPAWN_RATE: f64 = 1.0 / ENEMY_SPAWNS_PER_SECOND; const TRAIL_PARTICLES_PER_SECOND: f64 = 20.0; const TRAIL_PARTICLE_RATE: f64 = 1.0 / TRAIL_PARTICLES_PER_SECOND; const ADVANCE_SPEED: f64 = 200.0; const BULLET_SPEED: f64 = 500.0; const ENEMY_SPEED: f64 = 100.0; const ROTATE_SPEED: f64 = 2.0 * f64::consts::PI; const PLAYER_GRACE_AREA: f64 = 200.0; pub struct TimeController<T: Rng> { rng: T, current_time: f64, last_tail_particle: f64, last_shoot: f64, last_spawned_enemy: f64 } impl<T: Rng> TimeController<T> { pub fn new(rng: T) -> TimeController<T> { TimeController { rng, current_time: 0.0, last_tail_particle: 0.0, last_shoot: 0.0, last_spawned_enemy: 0.0 } } pub fn update_seconds(&mut self, dt: f64, actions: &Actions, state: &mut GameState) { self.current_time += dt; if actions.rotate_left { *state.world.player.direction_mut() += -ROTATE_SPEED * dt; } if actions.rotate_right { *state.world.player.direction_mut() += ROTATE_SPEED * dt; }; let speed = if actions.boost { 2.0 * ADVANCE_SPEED } else { ADVANCE_SPEED }; state.world.player.advance_wrapping(dt * speed, state.world.size); for particle in &mut state.world.particles { particle.update(dt); } util::fast_retain(&mut state.world.particles, |p| p.ttl > 0.0); if self.current_time - self.last_tail_particle > TRAIL_PARTICLE_RATE { self.last_tail_particle = self.current_time; state.world.particles.push(Particle::new(state.world.player.vector.clone().invert(), 0.5)); } if actions.shoot && self.current_time - self.last_shoot > BULLET_RATE { self.last_shoot = self.current_time; state.world.bullets.push(Bullet::new(Vector::new(state.world.player.front(), state.world.player.direction()))); } for bullet in &mut state.world.bullets { bullet.update(dt * BULLET_SPEED); } { let size = &state.world.size; util::fast_retain(&mut state.world.bullets, |b| size.contains(b.position())); } if self.current_time - self.last_spawned_enemy > ENEMY_SPAWN_RATE { self.last_spawned_enemy = self.current_time; let player_pos: &Vector = &state.world.player.vector; let mut enemy_pos; loop { enemy_pos = Vector::random(&mut self.rng, state.world.size); if enemy_pos.position != player_pos.position { break; } } if enemy_pos.position.intersect_circle(&player_pos.position, PLAYER_GRACE_AREA) { let length: f64 = enemy_pos.position.squared_distance_to(&player_pos.position).sqrt(); let dp: Point = enemy_pos.position - player_pos.position; enemy_pos.position = player_pos.position + dp / length * PLAYER_GRACE_AREA; } let new_enemy = Enemy::new(enemy_pos); state.world.enemies.push(new_enemy); } for enemy in &mut state.world.enemies { enemy.update(dt * ENEMY_SPEED, state.world.player.position()); } } }
use std::f64; use rand::Rng; use super::Actions; use game_state::GameState; use geometry::{Advance, Position, Point}; use models::{Bullet, Enemy, Particle, Vector}; use util; const BULLETS_PER_SECOND: f64 = 100.0; const BULLET_RATE: f64 = 1.0 / BULLETS_PER_SECOND; const ENEMY_SPAWNS_PER_SECOND: f64 = 1.0; const ENEMY_SPAWN_RATE: f64 = 1.0 / ENEMY_SPAWNS_PER_SECOND; const TRAIL_PARTICLES_PER_SECOND: f64 = 20.0; const TRAIL_PARTICLE_RATE: f64 = 1.0 / TRAIL_PARTICLES_PER_SECOND; const ADVANCE_SPEED: f64 = 200.0; const BULLET_SPEED: f64 = 500.0; const ENEMY_SPEED: f64 = 100.0; const ROTATE_SPEED: f64 = 2.0 * f64::consts::PI; const PLAYER_GRACE_AREA: f64 = 200.0; pub struct TimeController<T: Rng> { rng: T, current_time: f64, last_tail_particle: f64, last_shoot: f64, last_spawned_enemy: f64 } impl<T: Rng> TimeController<T> {
pub fn update_seconds(&mut self, dt: f64, actions: &Actions, state: &mut GameState) { self.current_time += dt; if actions.rotate_left { *state.world.player.direction_mut() += -ROTATE_SPEED * dt; } if actions.rotate_right { *state.world.player.direction_mut() += ROTATE_SPEED * dt; }; let speed = if actions.boost { 2.0 * ADVANCE_SPEED } else { ADVANCE_SPEED }; state.world.player.advance_wrapping(dt * speed, state.world.size); for particle in &mut state.world.particles { particle.update(dt); } util::fast_retain(&mut state.world.particles, |p| p.ttl > 0.0); if self.current_time - self.last_tail_particle > TRAIL_PARTICLE_RATE { self.last_tail_particle = self.current_time; state.world.particles.push(Particle::new(state.world.player.vector.clone().invert(), 0.5)); } if actions.shoot && self.current_time - self.last_shoot > BULLET_RATE { self.last_shoot = self.current_time; state.world.bullets.push(Bullet::new(Vector::new(state.world.player.front(), state.world.player.direction()))); } for bullet in &mut state.world.bullets { bullet.update(dt * BULLET_SPEED); } { let size = &state.world.size; util::fast_retain(&mut state.world.bullets, |b| size.contains(b.position())); } if self.current_time - self.last_spawned_enemy > ENEMY_SPAWN_RATE { self.last_spawned_enemy = self.current_time; let player_pos: &Vector = &state.world.player.vector; let mut enemy_pos; loop { enemy_pos = Vector::random(&mut self.rng, state.world.size); if enemy_pos.position != player_pos.position { break; } } if enemy_pos.position.intersect_circle(&player_pos.position, PLAYER_GRACE_AREA) { let length: f64 = enemy_pos.position.squared_distance_to(&player_pos.position).sqrt(); let dp: Point = enemy_pos.position - player_pos.position; enemy_pos.position = player_pos.position + dp / length * PLAYER_GRACE_AREA; } let new_enemy = Enemy::new(enemy_pos); state.world.enemies.push(new_enemy); } for enemy in &mut state.world.enemies { enemy.update(dt * ENEMY_SPEED, state.world.player.position()); } } }
pub fn new(rng: T) -> TimeController<T> { TimeController { rng, current_time: 0.0, last_tail_particle: 0.0, last_shoot: 0.0, last_spawned_enemy: 0.0 } }
function_block-full_function
[ { "content": "/// Generates a new explosion of the given intensity at the given position.\n\n/// This works best with values between 5 and 25\n\npub fn make_explosion(particles: &mut Vec<Particle>, position: &Point, intensity: u8) {\n\n use itertools_num;\n\n for rotation in itertools_num::linspace(0.0, 2.0 * ::std::f64::consts::PI, 30) {\n\n for ttl in (1..intensity).map(|x| (x as f64) / 10.0) {\n\n particles.push(Particle::new(Vector::new(position.clone(), rotation), ttl));\n\n }\n\n }\n\n}\n", "file_path": "src/util.rs", "rank": 0, "score": 87781.22604042008 }, { "content": "/// A trait for objects that occupy a position in space\n\npub trait Position {\n\n /// Returns the x coordinate of the object\n\n fn x(&self) -> f64;\n\n\n\n /// Returns a mutable reference to the x coordinate\n\n fn x_mut(&mut self) -> &mut f64;\n\n\n\n /// Returns the y coordinate of the object\n\n fn y(&self) -> f64;\n\n\n\n /// Returns a mutable reference to the y coordinate\n\n fn y_mut(&mut self) -> &mut f64;\n\n\n\n /// Returns the position of the object\n\n fn position(&self) -> Point {\n\n Point::new(self.x(), self.y())\n\n }\n\n}\n\n\n", "file_path": "src/geometry/traits.rs", "rank": 1, "score": 48423.774524950386 }, { "content": "/// A trait that provides collision detection for objects with a position and a radius\n\n///\n\n/// For collision purposes, all objects are treated as circles\n\npub trait Collide: Position {\n\n /// Returns the radius of the object\n\n fn radius(&self) -> f64;\n\n\n\n /// Returns the diameter of the objects\n\n fn diameter(&self) -> f64 {\n\n self.radius() * 2.0\n\n }\n\n\n\n /// Returns true if the two objects collide and false otherwise\n\n fn collides_with<O: Collide>(&self, other: &O) -> bool {\n\n let radii = self.radius() + other.radius();\n\n self.position().squared_distance_to(&other.position()) < radii * radii\n\n }\n\n}\n", "file_path": "src/geometry/traits.rs", "rank": 2, "score": 45669.80229785099 }, { "content": "/// A trait for objects that have can move in a given direction\n\npub trait Advance: Position {\n\n /// Returns the direction of the object, measured in radians\n\n ///\n\n /// Note: 0.0 points to the right and a positive number means a clockwise\n\n /// rotation\n\n fn direction(&self) -> f64;\n\n\n\n /// Returns a mutable reference to the direction of the object\n\n fn direction_mut(&mut self) -> &mut f64;\n\n\n\n /// Changes the direction of the vector to point to the given target\n\n fn point_to(&mut self, target: Point) {\n\n let m = (self.y() - target.y) / (self.x() - target.x);\n\n\n\n *self.direction_mut() = if target.x > self.x() {\n\n m.atan()\n\n } else {\n\n m.atan() + f64::consts::PI\n\n };\n\n }\n", "file_path": "src/geometry/traits.rs", "rank": 3, "score": 45667.85621962698 }, { "content": "fn new_game_data(width: f64, height: f64) -> GameData {\n\n GameData {\n\n state: GameState::new(Size::new(width, height)),\n\n actions: Actions::default(),\n\n time_controller: TimeController::new(Pcg32Basic::from_seed([42, 42]))\n\n }\n\n}\n\n\n\n// These functions are provided by the runtime\n\nextern \"C\" {\n\n fn clear_screen();\n\n fn draw_player(_: c_double, _: c_double, _: c_double);\n\n fn draw_enemy(_: c_double, _: c_double);\n\n fn draw_bullet(_: c_double, _: c_double);\n\n fn draw_particle(_: c_double, _: c_double, _: c_double);\n\n fn draw_score(_: c_double);\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn resize(width: c_double, height: c_double) {\n", "file_path": "src/lib.rs", "rank": 4, "score": 34896.730428467636 }, { "content": "struct GameData {\n\n state: GameState,\n\n actions: Actions,\n\n time_controller: TimeController<Pcg32Basic>\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 34752.25492114943 }, { "content": "/// Optimized version of `Vec::retain`\n\n///\n\n/// We achieve better performance by renouncing to keep the original order of the `Vec`\n\npub fn fast_retain<T, F>(vec: &mut Vec<T>, mut f: F)\n\nwhere F: FnMut(&T) -> bool {\n\n let mut i = 0;\n\n while i < vec.len() {\n\n if !f(&vec[i]) {\n\n vec.swap_remove(i);\n\n }\n\n\n\n i += 1;\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 6, "score": 32701.041447945587 }, { "content": "use geometry::Point;\n\nuse models::{Particle, Vector};\n\n\n\n/// Optimized version of `Vec::retain`\n\n///\n\n/// We achieve better performance by renouncing to keep the original order of the `Vec`\n", "file_path": "src/util.rs", "rank": 7, "score": 23001.476083518643 }, { "content": "use geometry::Point;\n\nuse super::Vector;\n\nuse geometry::{Advance, Collide};\n\n\n\n/// Enemies follow the player in order to cause a collision and let him explode\n\npub struct Enemy {\n\n vector: Vector\n\n}\n\n\n\nderive_position_direction!(Enemy);\n\n\n\nimpl Enemy {\n\n /// Create a enemy with the given vector\n\n pub fn new(vector: Vector) -> Enemy {\n\n Enemy { vector: vector }\n\n }\n\n\n\n /// Update the enemy\n\n pub fn update(&mut self, speed: f64, player_position: Point) {\n\n // Point to the player\n\n self.point_to(player_position);\n\n self.advance(speed);\n\n }\n\n}\n\n\n\nimpl Collide for Enemy {\n\n fn radius(&self) -> f64 { 10.0 }\n\n}\n", "file_path": "src/models/enemy.rs", "rank": 8, "score": 21936.952350773176 }, { "content": "use super::Vector;\n\nuse geometry::Advance;\n\n\n\n/// A model representing a particle\n\n///\n\n/// Particles are visible objects that have a time to live and move around\n\n/// in a given direction until their time is up. They are spawned when the\n\n/// player or an enemy is killed\n\npub struct Particle {\n\n pub vector: Vector,\n\n pub ttl: f64\n\n}\n\n\n\nderive_position_direction!(Particle);\n\n\n\nimpl Particle {\n\n /// Create a particle with the given vector and time to live in seconds\n\n pub fn new(vector: Vector, ttl: f64) -> Particle {\n\n Particle { vector: vector, ttl: ttl }\n\n }\n\n\n\n /// Update the particle\n\n pub fn update(&mut self, elapsed_time: f64) {\n\n self.ttl -= elapsed_time;\n\n let speed = 500.0 * self.ttl * self.ttl;\n\n self.advance(elapsed_time * speed);\n\n }\n\n}\n", "file_path": "src/models/particle.rs", "rank": 9, "score": 21816.316456435576 }, { "content": "use std::f64;\n\n\n\nuse rand::Rng;\n\n\n\nuse geometry::{Point, Size};\n\n\n\n/// A `Vector`\n\n#[derive(Clone, Default)]\n\npub struct Vector {\n\n /// The position of the vector\n\n pub position: Point,\n\n /// The direction angle, in radians\n\n pub direction: f64\n\n}\n\n\n\nimpl Vector {\n\n /// Returns a new `Vector`\n\n pub fn new(position: Point, direction: f64) -> Vector {\n\n Vector { position: position, direction: direction }\n\n }\n", "file_path": "src/models/vector.rs", "rank": 10, "score": 21705.976084560436 }, { "content": "\n\n /// Returns a random `Vector` within the given bounds\n\n pub fn random<R: Rng>(rng: &mut R, bounds: Size) -> Vector {\n\n Vector::new(Point::random(rng, bounds), rng.gen())\n\n }\n\n\n\n /// Consumes the vector and returns a new one with inverted direction\n\n pub fn invert(mut self) -> Vector {\n\n self.direction -= f64::consts::PI;\n\n self\n\n }\n\n}\n\n\n\n/// A macro to implement `Position` and `Direction` for any type that has a field named `vector`\n\n#[macro_export]\n\nmacro_rules! derive_position_direction {\n\n ($t:ty) => {\n\n impl ::geometry::Position for $t {\n\n fn x(&self) -> f64 { self.vector.position.x }\n\n fn x_mut(&mut self) -> &mut f64 { &mut self.vector.position.x }\n", "file_path": "src/models/vector.rs", "rank": 11, "score": 21702.158115178914 }, { "content": " fn y(&self) -> f64 { self.vector.position.y }\n\n fn y_mut(&mut self) -> &mut f64 { &mut self.vector.position.y }\n\n }\n\n\n\n impl ::geometry::Advance for $t {\n\n fn direction(&self) -> f64 {\n\n self.vector.direction\n\n }\n\n\n\n fn direction_mut(&mut self) -> &mut f64 {\n\n &mut self.vector.direction\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/models/vector.rs", "rank": 12, "score": 21697.7822968777 }, { "content": "use rand::Rng;\n\n\n\nuse super::Size;\n\n\n\nuse std::ops::{Add, Sub, Mul, Div};\n\n\n\n/// A `Point` represents a position in space\n\n#[derive(Clone, Default, Copy)]\n\npub struct Point {\n\n pub x: f64,\n\n pub y: f64\n\n}\n\n\n\nimpl Point {\n\n /// Returns a new `Point` with the given coordinates\n\n pub fn new(x: f64, y: f64) -> Point {\n\n Point { x: x, y: y }\n\n }\n\n\n\n /// Returns a random `Point` within the given bounds (exclusive)\n", "file_path": "src/geometry/point.rs", "rank": 13, "score": 21003.40418873427 }, { "content": " pub fn random<R: Rng>(rng: &mut R, bounds: Size) -> Point {\n\n Point {\n\n x: rng.gen_range(0.0, bounds.width),\n\n y: rng.gen_range(0.0, bounds.height)\n\n }\n\n }\n\n\n\n /// Returns the squared distance from this point to the given one\n\n pub fn squared_distance_to(&self, target: &Point) -> f64 {\n\n (self.x - target.x) * (self.x - target.x)\n\n + (self.y - target.y) * (self.y - target.y)\n\n }\n\n\n\n /// Rotates the point through the origin in the given angle (radians)\n\n pub fn rotate(mut self, radians: f64) -> Point {\n\n let radius = (self.x * self.x + self.y * self.y).sqrt();\n\n let point_angle = (self.y / self.x).atan();\n\n let final_angle = point_angle + radians;\n\n self.x = final_angle.cos() * radius;\n\n self.y = final_angle.sin() * radius;\n", "file_path": "src/geometry/point.rs", "rank": 14, "score": 20999.63562853168 }, { "content": " self\n\n }\n\n\n\n /// Translates the point by another point\n\n pub fn translate(mut self, other: &Point) -> Point {\n\n self.x += other.x;\n\n self.y += other.y;\n\n self\n\n }\n\n\n\n /// Checks if this point is contained in a circle\n\n pub fn intersect_circle(self, center: &Point, radius: f64) -> bool {\n\n (self.x - center.x).powi(2) +\n\n (self.y - center.y).powi(2) < radius.powi(2)\n\n }\n\n}\n\n\n\n/// Implements '==' for Point, as well as its inverse '!='\n\nimpl PartialEq for Point {\n\n fn eq (&self, _rhs: &Self) -> bool {\n", "file_path": "src/geometry/point.rs", "rank": 15, "score": 20997.40134297621 }, { "content": " fn div(self, _rhs: Point) -> Point {\n\n assert!(_rhs.x != 0f64);\n\n assert!(_rhs.y != 0f64);\n\n Point {\n\n x: self.x / _rhs.x,\n\n y: self.y / _rhs.y,\n\n }\n\n }\n\n}\n\n\n\n/// Implements the '/' operator for Point / f64:\n\nimpl Div<f64> for Point {\n\n type Output = Point;\n\n\n\n fn div(self, _rhs: f64) -> Point {\n\n assert!(_rhs != 0f64);\n\n Point {\n\n x: self.x / _rhs,\n\n y: self.y / _rhs,\n\n }\n\n }\n\n}\n", "file_path": "src/geometry/point.rs", "rank": 16, "score": 20996.853137038117 }, { "content": " }\n\n }\n\n}\n\n\n\n/// Implements the '*' operator for Point * f64\n\nimpl Mul<f64> for Point {\n\n type Output = Point;\n\n\n\n fn mul(self, _rhs: f64) -> Point {\n\n Point {\n\n x: self.x * _rhs,\n\n y: self.x * _rhs,\n\n }\n\n }\n\n}\n\n\n\n/// Implements the '/' operator for Point / Point\n\nimpl Div for Point {\n\n type Output = Point;\n\n\n", "file_path": "src/geometry/point.rs", "rank": 17, "score": 20996.678762619846 }, { "content": "/// Implements the '-' operator for Point - f64\n\nimpl Sub<f64> for Point {\n\n type Output = Point;\n\n\n\n fn sub(self, _rhs: f64) -> Point {\n\n Point {\n\n x: self.x - _rhs,\n\n y: self.y - _rhs,\n\n }\n\n }\n\n}\n\n\n\n/// Implements the '*' operator for Point * Point\n\nimpl Mul for Point {\n\n type Output = Point;\n\n\n\n fn mul(self, _rhs: Point) -> Point {\n\n Point {\n\n x: self.x * _rhs.x,\n\n y: self.y * _rhs.y,\n", "file_path": "src/geometry/point.rs", "rank": 18, "score": 20996.601482081787 }, { "content": " (self.x == _rhs.x) && (self.y == _rhs.y)\n\n }\n\n}\n\n\n\n/// Implements the '+' operator for Point + Point\n\nimpl Add for Point {\n\n type Output = Point;\n\n\n\n fn add(self, _rhs: Point) -> Point {\n\n Point {\n\n x: self.x + _rhs.x,\n\n y: self.y + _rhs.y,\n\n }\n\n }\n\n}\n\n\n\n/// Implements the '+' operator for Point + f64\n\nimpl Add<f64> for Point {\n\n type Output = Point;\n\n\n", "file_path": "src/geometry/point.rs", "rank": 19, "score": 20996.38536392193 }, { "content": " fn add(self, _rhs: f64) -> Point {\n\n Point {\n\n x: self.x + _rhs,\n\n y: self.y + _rhs,\n\n }\n\n }\n\n}\n\n\n\n/// Implements the '-' operator for Point - Point\n\nimpl Sub for Point {\n\n type Output = Point;\n\n\n\n fn sub(self, _rhs: Point) -> Point {\n\n Point {\n\n x: self.x - _rhs.x,\n\n y: self.y - _rhs.y,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/geometry/point.rs", "rank": 20, "score": 20995.963252558067 }, { "content": "use rand::Rng;\n\n\n\nuse geometry::{Point, Size};\n\nuse super::Vector;\n\nuse geometry::{Advance, Collide, Position};\n\n\n\n/// The `Player` is the rocket controlled by the user\n\n#[derive(Default)]\n\npub struct Player {\n\n pub vector: Vector\n\n}\n\n\n\nderive_position_direction!(Player);\n\n\n\n/// The player is represented as the polygon below\n\npub const POLYGON: &'static [[f64; 2]] = &[\n\n [0.0, -8.0],\n\n [20.0, 0.0],\n\n [0.0, 8.0]\n\n];\n", "file_path": "src/models/player.rs", "rank": 22, "score": 13.552236234290538 }, { "content": "// macro_use needs to go first so the macro is visible for the other modules\n\n#[macro_use]\n\nmod vector;\n\n\n\nmod bullet;\n\nmod enemy;\n\nmod particle;\n\nmod player;\n\nmod world;\n\n\n\npub use self::bullet::Bullet;\n\npub use self::enemy::Enemy;\n\npub use self::particle::Particle;\n\npub use self::player::{Player, POLYGON as PLAYER_POLYGON};\n\npub use self::vector::Vector;\n\npub use self::world::World;\n", "file_path": "src/models/mod.rs", "rank": 23, "score": 12.382002628757686 }, { "content": "use super::Vector;\n\nuse geometry::{Advance, Collide};\n\n\n\n/// Bullets are spawned when the player shoots\n\n///\n\n/// When an enemy is reached by a bullet, it will explode\n\npub struct Bullet {\n\n vector: Vector\n\n}\n\n\n\nderive_position_direction!(Bullet);\n\n\n\nimpl Bullet {\n\n /// Create a bullet with the given vector\n\n pub fn new(vector: Vector) -> Bullet {\n\n Bullet { vector: vector }\n\n }\n\n\n\n /// Update the bullet's position\n\n pub fn update(&mut self, units: f64) {\n\n self.advance(units);\n\n }\n\n}\n\n\n\nimpl Collide for Bullet {\n\n fn radius(&self) -> f64 { 3.0 }\n\n}\n", "file_path": "src/models/bullet.rs", "rank": 24, "score": 11.460615920801162 }, { "content": "use rand::Rng;\n\n\n\nuse geometry::Size;\n\nuse models::{Bullet, Enemy, Particle, Player};\n\n\n\n/// A model that contains the other models and renders them\n\npub struct World {\n\n pub player: Player,\n\n pub particles: Vec<Particle>,\n\n pub bullets: Vec<Bullet>,\n\n pub enemies: Vec<Enemy>,\n\n pub size: Size\n\n}\n\n\n\nimpl World {\n\n /// Returns a new world of the given size\n\n pub fn new<R: Rng>(rng: &mut R, size: Size) -> World {\n\n World {\n\n player: Player::random(rng, size),\n\n particles: Vec::with_capacity(1000),\n\n bullets: vec![],\n\n enemies: vec![],\n\n size: size\n\n }\n\n }\n\n}\n", "file_path": "src/models/world.rs", "rank": 25, "score": 11.394987343886275 }, { "content": "\n\nimpl Player {\n\n /// Create a new `Player` with a random position and direction\n\n pub fn random<R: Rng>(rng: &mut R, bounds: Size) -> Player {\n\n Player { vector: Vector::random(rng, bounds) }\n\n }\n\n\n\n /// Returns the front of the rocket\n\n pub fn front(&self) -> Point {\n\n Point::new(POLYGON[1][0], POLYGON[1][1])\n\n .rotate(self.direction())\n\n .translate(&self.position())\n\n }\n\n}\n\n\n\nimpl Collide for Player {\n\n fn radius(&self) -> f64 { 6.0 }\n\n}\n", "file_path": "src/models/player.rs", "rank": 26, "score": 11.307441029976486 }, { "content": "use rand::Rng;\n\n\n\nuse super::Point;\n\n\n\n/// A `Size` represents a region in space\n\n#[derive(Clone, Copy, Default)]\n\npub struct Size {\n\n pub width: f64,\n\n pub height: f64\n\n}\n\n\n\nimpl Size {\n\n /// Returns a new `Size` of the given dimensions\n\n pub fn new(width: f64, height: f64) -> Size {\n\n Size { width: width, height: height }\n\n }\n\n\n\n /// Returns true if the `Point` is contained in this `Size` or false otherwise\n\n pub fn contains(&self, point: Point) -> bool {\n\n 0.0 <= point.x && point.x <= self.width\n", "file_path": "src/geometry/size.rs", "rank": 27, "score": 10.147542037955052 }, { "content": "mod point;\n\nmod size;\n\nmod traits;\n\n\n\npub use self::point::Point;\n\npub use self::size::Size;\n\npub use self::traits::{Position, Advance, Collide};", "file_path": "src/geometry/mod.rs", "rank": 30, "score": 9.516063984803502 }, { "content": " *DATA.lock().unwrap() = new_game_data(width, height);\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn draw() {\n\n use geometry::{Advance, Position};\n\n let data = &mut DATA.lock().unwrap();\n\n let world = &data.state.world;\n\n\n\n clear_screen();\n\n for particle in &world.particles {\n\n draw_particle(particle.x(), particle.y(), 5.0 * particle.ttl);\n\n }\n\n\n\n for bullet in &world.bullets {\n\n draw_bullet(bullet.x(), bullet.y());\n\n }\n\n\n\n for enemy in &world.enemies {\n\n draw_enemy(enemy.x(), enemy.y());\n", "file_path": "src/lib.rs", "rank": 31, "score": 9.059847251415341 }, { "content": "\n\n // We introduce a scope to shorten the lifetime of the borrows below\n\n {\n\n let bullets = &mut state.world.bullets;\n\n let enemies = &mut state.world.enemies;\n\n let particles = &mut state.world.particles;\n\n\n\n // Note: this is O(n * m) where n = amount of bullets and n = amount of enemies\n\n // This is pretty bad, but we don't care because n and m are small\n\n util::fast_retain(bullets, |bullet| {\n\n // Remove the first enemy that collides with a bullet (if any)\n\n // Add an explosion on its place\n\n if let Some((index, position)) = enemies.iter().enumerate()\n\n .find(|&(_, enemy)| enemy.collides_with(bullet))\n\n .map(|(index, enemy)| (index, enemy.position()))\n\n {\n\n util::make_explosion(particles, &position, 10);\n\n enemies.remove(index);\n\n false\n\n } else {\n", "file_path": "src/controllers/collisions.rs", "rank": 32, "score": 8.709741722155517 }, { "content": " && 0.0 <= point.y && point.y <= self.height\n\n }\n\n\n\n /// Returns a random x coordinate within the bounds of this `Size`\n\n pub fn random_x<R: Rng>(&self, rng: &mut R) -> f64 {\n\n rng.gen_range(0.0, self.width)\n\n }\n\n\n\n /// Returns a random y coordinate within the bounds of this `Size`\n\n pub fn random_y<R: Rng>(&self, rng: &mut R) -> f64 {\n\n rng.gen_range(0.0, self.height)\n\n }\n\n}\n", "file_path": "src/geometry/size.rs", "rank": 33, "score": 8.6861988040491 }, { "content": "//! Traits used by the models\n\n\n\nuse std::f64;\n\n\n\nuse super::{Point, Size};\n\n\n\n/// A trait for objects that occupy a position in space\n", "file_path": "src/geometry/traits.rs", "rank": 34, "score": 8.548934728751428 }, { "content": "use game_state::GameState;\n\nuse geometry::{Collide, Position};\n\nuse util;\n\n\n\nconst SCORE_PER_ENEMY: u32 = 10;\n\n\n\npub struct CollisionsController;\n\n\n\nimpl CollisionsController {\n\n pub fn handle_collisions(state: &mut GameState) {\n\n CollisionsController::handle_bullet_collisions(state);\n\n CollisionsController::handle_player_collisions(state);\n\n }\n\n\n\n /// Handles collisions between the bullets and the enemies\n\n ///\n\n /// When an enemy is reached by a bullet, both the enemy and the bullet\n\n /// will be removed. Additionally, the score of the player will be increased.\n\n fn handle_bullet_collisions(state: &mut GameState) {\n\n let old_enemy_count = state.world.enemies.len();\n", "file_path": "src/controllers/collisions.rs", "rank": 35, "score": 8.20806079058758 }, { "content": "use pcg_rand::Pcg32Basic;\n\nuse rand::SeedableRng;\n\n\n\nuse geometry::{Position, Size};\n\nuse models::World;\n\n\n\n/// The data structure that contains the state of the game\n\npub struct GameState {\n\n /// The world contains everything that needs to be drawn\n\n pub world: World,\n\n /// The current score of the player\n\n pub score: u32\n\n}\n\n\n\nimpl GameState {\n\n /// Returns a new `GameState` containing a `World` of the given `Size`\n\n pub fn new(size: Size) -> GameState {\n\n let mut rng = Pcg32Basic::from_seed([42, 42]);\n\n GameState {\n\n world: World::new(&mut rng, size),\n", "file_path": "src/game_state.rs", "rank": 36, "score": 7.34022273119093 }, { "content": " score: 0\n\n }\n\n }\n\n\n\n /// Reset our game-state\n\n pub fn reset(&mut self) {\n\n let mut rng = Pcg32Basic::from_seed([42, 42]);\n\n\n\n // Reset player position\n\n *self.world.player.x_mut() = self.world.size.random_x(&mut rng);\n\n *self.world.player.y_mut() = self.world.size.random_y(&mut rng);\n\n\n\n // Reset score\n\n self.score = 0;\n\n\n\n // Remove all enemies and bullets\n\n self.world.bullets.clear();\n\n self.world.enemies.clear();\n\n }\n\n}\n", "file_path": "src/game_state.rs", "rank": 38, "score": 6.969649082971589 }, { "content": " true\n\n }\n\n });\n\n }\n\n\n\n let killed_enemies = (old_enemy_count - state.world.enemies.len()) as u32;\n\n state.score += SCORE_PER_ENEMY * killed_enemies;\n\n }\n\n\n\n /// Handles collisions between the player and the enemies\n\n fn handle_player_collisions(state: &mut GameState) {\n\n if state.world.enemies.iter().any(|enemy| state.world.player.collides_with(enemy)) {\n\n // Make an explosion where the player was\n\n let ppos = state.world.player.position();\n\n util::make_explosion(&mut state.world.particles, &ppos, 8);\n\n\n\n state.reset();\n\n }\n\n }\n\n}\n", "file_path": "src/controllers/collisions.rs", "rank": 40, "score": 6.661722764979523 }, { "content": "//! This module contains the game logic\n\n//!\n\n//! There are three main controllers: collisions, input and time\n\n\n\nmod collisions;\n\nmod input;\n\nmod time;\n\n\n\npub use self::collisions::CollisionsController;\n\npub use self::input::Actions;\n\npub use self::time::TimeController;\n", "file_path": "src/controllers/mod.rs", "rank": 42, "score": 4.866866650318953 }, { "content": "extern crate itertools_num;\n\n#[macro_use]\n\nextern crate lazy_static;\n\nextern crate rand;\n\nextern crate pcg_rand;\n\n\n\nmod controllers;\n\nmod game_state;\n\nmod geometry;\n\nmod models;\n\nmod util;\n\n\n\nuse std::os::raw::{c_double, c_int};\n\nuse std::sync::Mutex;\n\n\n\nuse pcg_rand::Pcg32Basic;\n\nuse rand::SeedableRng;\n\n\n\nuse self::game_state::GameState;\n\nuse self::geometry::Size;\n\nuse self::controllers::{Actions, TimeController, CollisionsController};\n\n\n\nlazy_static! {\n\n static ref DATA: Mutex<GameData> = Mutex::new(new_game_data(1024.0, 600.0));\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 43, "score": 4.775111039374502 }, { "content": "\n\n /// Advances the object in the given amount of units, according to its direction\n\n fn advance(&mut self, units: f64) {\n\n *self.x_mut() += self.direction().cos() * units;\n\n *self.y_mut() += self.direction().sin() * units;\n\n }\n\n\n\n /// Similar to `Advance::advance`, but the final position will be wrapped\n\n /// around the given bounds\n\n fn advance_wrapping(&mut self, units: f64, bounds: Size) {\n\n self.advance(units);\n\n\n\n fn wrap(k: &mut f64, bound: f64) {\n\n if *k < 0.0 {\n\n *k += bound;\n\n } else if *k >= bound {\n\n *k -= bound;\n\n }\n\n }\n\n\n\n wrap(self.x_mut(), bounds.width);\n\n wrap(self.y_mut(), bounds.height);\n\n }\n\n}\n\n\n", "file_path": "src/geometry/traits.rs", "rank": 45, "score": 3.3773709257980844 }, { "content": "/// Active actions (toggled by user input)\n\n#[derive(Default)]\n\npub struct Actions {\n\n pub rotate_left: bool,\n\n pub rotate_right: bool,\n\n pub boost: bool,\n\n pub shoot: bool\n\n}\n", "file_path": "src/controllers/input.rs", "rank": 46, "score": 2.7435675940001354 }, { "content": " }\n\n\n\n draw_player(world.player.x(), world.player.y(), world.player.direction());\n\n draw_score(data.state.score as f64);\n\n}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn update(time: c_double) {\n\n let data: &mut GameData = &mut DATA.lock().unwrap();\n\n data.time_controller.update_seconds(time, &data.actions, &mut data.state);\n\n CollisionsController::handle_collisions(&mut data.state);\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 47, "score": 2.4597345755116358 }, { "content": "The MIT License (MIT)\n\n\n\nCopyright (c) 2015 Adolfo Ochagavía\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in\n\nall copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n\nTHE SOFTWARE.\n", "file_path": "LICENSE.md", "rank": 48, "score": 1.4254758981900968 }, { "content": "Rocket on WASM\n\n==============\n\n\n\nAn adapted version of the [Rocket](https://github.com/aochagavia/rocket) game, running on WASM!\n\n\n\n[Play now](https://thread-safe.nl/rocket) or\n\n[read the blog post](https://aochagavia.github.io/blog/rocket---a-rust-game-running-on-wasm/)\n\nabout the development of this WASM version.\n\n\n\n## Screenshot\n\n\n\n![Screenshot](screenshots/gameplay1.png)\n\n\n\n## How to play\n\n\n\nAs you can see in the screenshot, you are the red rocket and have to save the world from\n\nthe yellow invaders. To do so, you can use the following controls:\n\n\n\nKeyboard | Action\n\n----------------------- | ------------\n\n<kbd>&uparrow;</kbd> | Boost\n\n<kbd>&leftarrow;</kbd> | Rotate left\n\n<kbd>&rightarrow;</kbd> | Rotate right\n\n<kbd>Space</kbd> | Shoot\n\n\n\n## Compiling and running\n\n\n\nFollow the steps on the [hellorust website](https://www.hellorust.com/setup/wasm-target/)\n\nin order to set up everything. Besides the `wasm32-unknown-unknown` target, the `post_build.py`\n\nscript requires python 2.7 and `wasm-gc`.\n\n\n\nAfter setting things up, you should be able to compile the code using the commands below:\n\n\n\n```\n\ncargo build --release --target wasm32-unknown-unknown\n\npython post_build.py\n\n```\n\n\n\nThe generated wasm will be copied to the `html` directory and `wasm-gc`ed.\n\n\n\n```\n\npython -m SimpleHTTPServer\n\n```\n\n\n\nTry opening http://localhost:8000/ on your browser to check whether it works.\n\n\n\n## Related projects\n\n\n\n* [Running Rocket in a Python environment through WebAssembly](https://almarklein.org/python_and_webassembly.html)\n", "file_path": "readme.md", "rank": 49, "score": 1.0212014534784073 } ]
Rust
calyx/src/passes/compile_control.rs
yoonachang/futil
2ef8c89ac3e8c398f9db42c28ec8d495ee0a45e2
use crate::errors::Error; use crate::lang::{ ast, component::Component, context::Context, structure_builder::ASTBuilder, }; use crate::passes::visitor::{Action, Named, VisResult, Visitor}; use crate::{add_wires, guard, port, structure}; use ast::{Control, Enable, GuardExpr}; use std::collections::HashMap; use std::convert::TryInto; #[derive(Default)] pub struct CompileControl {} impl Named for CompileControl { fn name() -> &'static str { "compile-control" } fn description() -> &'static str { "Compile away all control language constructs into structure" } } impl Visitor for CompileControl { fn finish_if( &mut self, cif: &ast::If, comp: &mut Component, ctx: &Context, ) -> VisResult { let st = &mut comp.structure; let if_group: ast::Id = st.namegen.gen_name("if").into(); let if_group_node = st.insert_group(&if_group, HashMap::new())?; let cond_group_node = st.get_node_by_name(&cif.cond)?; let cond = cif.port.get_edge(st)?; let (true_group, false_group) = match (&*cif.tbranch, &*cif.fbranch) { (Control::Enable { data: t }, Control::Enable { data: f }) => { Ok((&t.comp, &f.comp)) } _ => Err(Error::MalformedControl( "Both branches of an if must be an enable.".to_string(), )), }?; let true_group_node = st.get_node_by_name(true_group)?; let false_group_node = st.get_node_by_name(false_group)?; structure!( st, &ctx, let cond_computed = prim std_reg(1); let cond_stored = prim std_reg(1); let signal_const = constant(1, 1); let signal_off = constant(0, 1); let done_reg = prim std_reg(1); ); let cond_go = !guard!(st; cond_computed["out"]); let is_cond_computed = guard!(st; cond_group_node["go"]) & guard!(st; cond_group_node["done"]); let true_turn = guard!(st; cond_computed["out"]) & guard!(st; cond_stored["out"]); let true_go = !guard!(st; true_group_node["done"]) & true_turn.clone(); let false_turn = guard!(st; cond_computed["out"]) & !guard!(st; cond_stored["out"]); let false_go = !guard!(st; false_group_node["done"]) & false_turn.clone(); let done_guard = (true_turn & guard!(st; true_group_node["done"])) | (false_turn & guard!(st; false_group_node["done"])); let done_reg_high = guard!(st; done_reg["out"]); add_wires!( st, Some(if_group.clone()), cond_group_node["go"] = cond_go ? (signal_const.clone()); cond_computed["in"] = is_cond_computed ? (signal_const.clone()); cond_computed["write_en"] = is_cond_computed ? (signal_const.clone()); cond_stored["in"] = is_cond_computed ? (cond.clone()); cond_stored["write_en"] = is_cond_computed ? (cond); true_group_node["go"] = true_go ? (signal_const.clone()); false_group_node["go"] = false_go ? (signal_const.clone()); done_reg["in"] = done_guard ? (signal_const.clone()); done_reg["write_en"] = done_guard ? (signal_const.clone()); if_group_node["done"] = done_reg_high ? (signal_const.clone()); ); add_wires!( st, None, done_reg["in"] = done_reg_high ? (signal_off.clone()); done_reg["write_en"] = done_reg_high ? (signal_const.clone()); cond_computed["in"] = done_reg_high ? (signal_off.clone()); cond_computed["write_en"] = done_reg_high ? (signal_const.clone()); cond_stored["in"] = done_reg_high ? (signal_off); cond_stored["write_en"] = done_reg_high ? (signal_const); ); Ok(Action::Change(Control::enable(if_group))) } fn finish_while( &mut self, ctrl: &ast::While, comp: &mut Component, ctx: &Context, ) -> VisResult { let st = &mut comp.structure; let while_group = st.namegen.gen_name("while").into(); let while_group_node = st.insert_group(&while_group, HashMap::new())?; let cond_group_node = st.get_node_by_name(&ctrl.cond)?; let cond = ctrl.port.get_edge(&*st)?; let body_group = match &*ctrl.body { Control::Enable { data } => Ok(&data.comp), _ => Err(Error::MalformedControl( "The body of a while must be an enable.".to_string(), )), }?; let body_group_node = st.get_node_by_name(body_group)?; structure!(st, &ctx, let cond_computed = prim std_reg(1); let cond_stored = prim std_reg(1); let done_reg = prim std_reg(1); let signal_on = constant(1, 1); let signal_off = constant(0, 1); ); let cond_go = !guard!(st; cond_computed["out"]); let is_cond_computed = guard!(st; cond_group_node["go"]) & guard!(st; cond_group_node["done"]); let body_go = guard!(st; cond_stored["out"]) & guard!(st; cond_computed["out"]) & !guard!(st; body_group_node["done"]); let cond_recompute = guard!(st; cond_stored["out"]) & guard!(st; cond_computed["out"]) & guard!(st; body_group_node["done"]); let is_cond_false = guard!(st; cond_computed["out"]) & !guard!(st; cond_stored["out"]); let done_reg_high = guard!(st; done_reg["out"]); add_wires!(st, Some(while_group.clone()), cond_group_node["go"] = cond_go ? (signal_on.clone()); cond_computed["in"] = is_cond_computed ? (signal_on.clone()); cond_computed["write_en"] = is_cond_computed ? (signal_on.clone()); cond_stored["in"] = is_cond_computed ? (cond); cond_stored["write_en"] = is_cond_computed ? (signal_on.clone()); body_group_node["go"] = body_go ? (signal_on.clone()); cond_computed["in"] = cond_recompute ? (signal_off.clone()); cond_computed["write_en"] = cond_recompute ? (signal_on.clone()); done_reg["in"] = is_cond_false ? (signal_on.clone()); done_reg["write_en"] = is_cond_false ? (signal_on.clone()); while_group_node["done"] = done_reg_high ? (signal_on.clone()); cond_computed["in"] = is_cond_false ? (signal_off.clone()); cond_computed["write_en"] = is_cond_false ? (signal_on.clone()); ); add_wires!(st, None, done_reg["in"] = done_reg_high ? (signal_off); done_reg["write_en"] = done_reg_high ? (signal_on); ); Ok(Action::Change(Control::enable(while_group))) } fn finish_seq( &mut self, s: &ast::Seq, comp: &mut Component, ctx: &Context, ) -> VisResult { let st = &mut comp.structure; let seq_group: ast::Id = st.namegen.gen_name("seq").into(); let seq_group_node = st.insert_group(&seq_group, HashMap::new())?; let fsm_size = 32; structure!(st, &ctx, let fsm = prim std_reg(fsm_size); let signal_on = constant(1, 1); ); for (idx, con) in s.stmts.iter().enumerate() { match con { Control::Enable { data: Enable { comp: group_name }, } => { let my_idx: u64 = idx.try_into().unwrap(); /* group[go] = fsm.out == idx & !group[done] ? 1 */ let group = st.get_node_by_name(&group_name)?; structure!(st, &ctx, let fsm_cur_state = constant(my_idx, fsm_size); let fsm_nxt_state = constant(my_idx + 1, fsm_size); ); let group_go = (guard!(st; fsm["out"]) .eq(st.to_guard(fsm_cur_state.clone()))) & !guard!(st; group["done"]); let group_done = (guard!(st; fsm["out"]) .eq(st.to_guard(fsm_cur_state.clone()))) & guard!(st; group["done"]); add_wires!(st, Some(seq_group.clone()), group["go"] = group_go ? (signal_on.clone()); fsm["in"] = group_done ? (fsm_nxt_state.clone()); fsm["write_en"] = group_done ? (signal_on.clone()); ); } _ => { return Err(Error::MalformedControl( "Cannot compile non-group statement inside sequence" .to_string(), )) } } } let final_state_val: u64 = s.stmts.len().try_into().unwrap(); structure!(st, &ctx, let reset_val = constant(0, fsm_size); let fsm_final_state = constant(final_state_val, fsm_size); ); let seq_done = guard!(st; fsm["out"]).eq(st.to_guard(fsm_final_state)); add_wires!(st, Some(seq_group.clone()), seq_group_node["done"] = seq_done ? (signal_on.clone()); ); add_wires!(st, None, fsm["in"] = seq_done ? (reset_val); fsm["write_en"] = seq_done ? (signal_on); ); Ok(Action::Change(Control::enable(seq_group))) } fn finish_par( &mut self, s: &ast::Par, comp: &mut Component, ctx: &Context, ) -> VisResult { let st = &mut comp.structure; let par_group: ast::Id = st.namegen.gen_name("par").into(); let par_group_idx = st.insert_group(&par_group, HashMap::new())?; let mut par_group_done: Vec<GuardExpr> = Vec::with_capacity(s.stmts.len()); let mut par_done_regs = Vec::with_capacity(s.stmts.len()); structure!(st, &ctx, let signal_on = constant(1, 1); let signal_off = constant(0, 1); let par_reset = prim std_reg(1); ); for con in s.stmts.iter() { match con { Control::Enable { data: Enable { comp: group_name }, } => { let group_idx = st.get_node_by_name(&group_name)?; structure!(st, &ctx, let par_done_reg = prim std_reg(1); ); let group_go = !(guard!(st; par_done_reg["out"]) | guard!(st; group_idx["done"])); let group_done = guard!(st; group_idx["done"]); add_wires!(st, Some(par_group.clone()), group_idx["go"] = group_go ? (signal_on.clone()); par_done_reg["in"] = group_done ? (signal_on.clone()); par_done_reg["write_en"] = group_done ? (signal_on.clone()); ); par_done_regs.push(par_done_reg); par_group_done.push(guard!(st; par_done_reg["out"])); } _ => { return Err(Error::MalformedControl( "Cannot compile non-group statement inside sequence" .to_string(), )) } } } let par_done = GuardExpr::and_vec(par_group_done); let par_reset_out = guard!(st; par_reset["out"]); add_wires!(st, Some(par_group.clone()), par_reset["in"] = par_done ? (signal_on.clone()); par_reset["write_en"] = par_done ? (signal_on.clone()); par_group_idx["done"] = par_reset_out ? (signal_on.clone()); ); add_wires!(st, None, par_reset["in"] = par_reset_out ? (signal_off.clone()); par_reset["write_en"] = par_reset_out ? (signal_on.clone()); ); for par_done_reg in par_done_regs { add_wires!(st, None, par_done_reg["in"] = par_reset_out ? (signal_off.clone()); par_done_reg["write_en"] = par_reset_out ? (signal_on.clone()); ); } Ok(Action::Change(Control::enable(par_group))) } }
use crate::errors::Error; use crate::lang::{ ast, component::Component, context::Context, structure_builder::ASTBuilder, }; use crate::passes::visitor::{Action, Named, VisResult, Visitor}; use crate::{add_wires, guard, port, structure}; use ast::{Control, Enable, GuardExpr}; use std::collections::HashMap; use std::convert::TryInto; #[derive(Default)] pub struct CompileControl {} impl Named for CompileControl { fn name() -> &'static str { "compile-control" } fn description() -> &'static str { "Compile away all control language constructs into structure" } } impl Visitor for CompileControl { fn finish_if( &mut self, cif: &ast::If, comp: &mut Component, ctx: &Context, ) -> VisResult { let st = &mut comp.structure; let if_group: ast::Id = st.namegen.gen_name("if").into(); let if_group_node = st.insert_group(&if_group, HashMap::new())?; let cond_group_node = st.get_node_by_name(&cif.cond)?; let cond = cif.port.get_edge(st)?; let (true_group, false_group) = match (&*cif.tbranch, &*cif.fbranch) { (Control::Enable { data: t }, Control::Enable { data: f }) => { Ok((&t.comp, &f.comp)) } _ => Err(Error::MalformedControl( "Both branches of an if must be an enable.".to_string(), )), }?; let true_group_node = st.get_node_by_name(true_group)?; let false_group_node = st.get_node_by_name(false_group)?; structure!( st, &ctx, let cond_computed = prim std_reg(1); let cond_stored = prim std_reg(1); let signal_const = constant(1, 1); let signal_off = constant(0, 1); let done_reg = prim std_reg(1); ); let cond_go = !guard!(st; cond_computed["out"]); let is_cond_computed = guard!(st; cond_group_node["go"]) & guard!(st; cond_group_node["done"]); let true_turn = guard!(st; cond_computed["out"]) & guard!(st; cond_stored["out"]); let true_go = !guard!(st; true_group_node["done"]) & true_turn.clone(); let false_turn = guard!(st; cond_computed["out"]) & !guard!(st; cond_stored["out"]); let false_go = !guard!(st; false_group_node["done"]) & false_turn.clone(); let done_guard = (true_turn & guard!(st; true_group_node["done"])) | (false_turn & guard!(st; false_group_node["done"])); let done_reg_high = guard!(st; done_reg["out"]); add_wires!( st, Some(if_group.clone()), cond_group_node["go"] = cond_go ? (signal_const.clone()); cond_computed["in"] = is_cond_computed ? (signal_const.clone()); cond_computed["write_en"] = is_cond_computed ? (signal_const.clone()); cond_stored["in"] = is_cond_computed ? (cond.clone()); cond_stored["write_en"] = is_cond_computed ? (cond); true_group_node["go"] = true_go ? (signal_const.clone()); false_group_node["go"] = false_go ? (signal_const.clone()); done_reg["in"] = done_guard ? (signal_const.clone()); done_reg["write_en"] = done_guard ? (signal_const.clone()); if_group_node["done"] = done_reg_high ? (signal_const.clone()); ); add_wires!( st, None, done_reg["in"] = done_reg_high ? (signal_off.clone()); done_reg["write_en"] = done_reg_high ? (signal_const.clone()); cond_computed["in"] = done_reg_high ? (signal_off.clone()); cond_computed["write_en"] = done_reg_high ? (signal_const.clone()); cond_stored["in"] = done_reg_high ? (signal_off); cond_stored["write_en"] = done_reg_high ? (signal_const); ); Ok(Action::Change(Control::enable(if_group))) } fn finish_while( &mut self, ctrl: &ast::While, comp: &mut Component, ctx: &Context, ) -> VisResult { let st = &mut comp.structure; let while_group = st.namegen.gen_name("while").into(); let while_group_node = st.insert_group(&while_group, HashMap::new())?; let cond_group_node = st.get_node_by_name(&ctrl.cond)?; let cond = ctrl.port.get_edge(&*st)?; let body_group = match &*ctrl.body { Control::Enable { data } => Ok(&data.comp), _ => Err(Error::MalformedControl( "The body of a while must be an enable.".to_string(), )), }?; let body_group_node = st.get_node_by_name(body_group)?; structure!(st, &ctx, let cond_computed = prim std_reg(1); let cond_stored = prim std_reg(1); let done_reg = prim std_reg(1); let signal_on = constant(1, 1); let signal_off = constant(0, 1); ); let cond_go = !guard!(st; cond_computed["out"]); let is_cond_computed = guard!(st; cond_group_node["go"]) & guard!(st; cond_group_node["done"]); let body_go = guard!(st; cond_stored["out"]) & guard!(st; cond_computed["out"]) & !guard!(st; body_group_node["done"]); let cond_recompute = guard!(st; cond_stored["out"]) & guard!(st; cond_computed["out"]) & guard!(st; body_group_node["done"]); let is_cond_false = guard!(st; cond_computed["out"]) & !guard!(st; cond_stored["out"]); let done_reg_high = guard!(st; done_reg["out"]); add_wires!(st, Some(while_group.clone()), cond_group_node["go"] = cond_go ? (signal_on.clone()); cond_computed["in"] = is_cond_computed ? (signal_on.clone()); cond_computed["write_en"] = is_cond_computed ? (signal_on.clone()); cond_stored["in"] = is_cond_computed ? (cond); cond_stored["write_en"] = is_cond_computed ? (signal_on.clone()); body_group_node["go"] = body_go ? (signal_on.clone()); cond_computed["in"] = cond_recompute ? (signal_off.clone()); cond_computed["write_en"] = cond_recompute ? (signal_on.clone()); done_reg["in"] = is_cond_false ? (signal_on.clone()); done_reg["write_en"] = is_cond_false ? (signal_on.clone()); while_group_node["done"] = done_reg_high ? (signal_on.clone()); cond_computed["in"] = is_cond_false ? (signal_off.clone()); cond_computed["write_en"] = is_cond_false ? (signal_on.clone()); ); add_wires!(st, None, done_reg["in"] = done_reg_high ? (signal_off); done_reg["write_en"] = done_reg_high ? (signal_on); ); Ok(Action::Change(Control::enable(while_group))) } fn finish_seq( &mut self, s: &ast::Seq, comp: &mut Component, ctx: &Context, ) -> VisResult { let st = &mut comp.structure; let seq_group: ast::Id = st.namegen.gen_name("seq").into(); let seq_group_node = st.insert_group(&seq_group, HashMap::new())?; let fsm_size = 32; structure!(st, &ctx, let fsm = prim std_reg(fsm_size); let signal_on = constant(1, 1); ); for (idx, con) in s.stmts.iter().enumerate() { match con { Control::Enable { data: Enable { comp: group_name }, } => { let my_idx: u64 = idx.try_into().unwrap(); /* group[go] = fsm.out == idx & !group[done] ? 1 */ let group = st.get_node_by_name(&group_name)?; structure!(st, &ctx, let fsm_cur_state = constant(my_idx, fsm_size); let fsm_nxt_state = constant(my_idx + 1, fsm_size); ); let group_go = (guard!(st; fsm["out"]) .eq(st.to_guard(fsm_cur_state.clone()))) & !guard!(st; group["done"]); let group_done = (guard!(st; fsm["out"]) .eq(st.to_guard(fsm_cur_state.clone()))) & guard!(st; group["done"]); add_wires!(st, Some(seq_group.clone()), group["go"] = group_go ? (signal_on.clone()); fsm["in"] = group_done ? (fsm_nxt_state.clone()); fsm["write_en"] = group_done ? (signal_on.clone()); ); } _ => { return Err(Error::MalformedControl( "Cannot compile non-group statement inside sequence" .to_string(), )) } } } let final_state_val: u64 = s.stmts.len().try_into().unwrap(); structure!(st, &ctx, let reset_val = constant(0, fsm_size); let fsm_final_state = constant(final_state_val, fsm_size); ); let seq_done = guard!(st; fsm["out"]).eq(st.to_guard(fsm_final_state)); add_wires!(st, Some(seq_group.clone()), seq_group_node["done"] = seq_done ? (signal_on.clone()); ); add_wires!(st, None, fsm["in"] = seq_done ? (reset_val); fsm["write_en"] = seq_done ? (signal_on); ); Ok(Action::Change(Control::enable(seq_group))) }
}
fn finish_par( &mut self, s: &ast::Par, comp: &mut Component, ctx: &Context, ) -> VisResult { let st = &mut comp.structure; let par_group: ast::Id = st.namegen.gen_name("par").into(); let par_group_idx = st.insert_group(&par_group, HashMap::new())?; let mut par_group_done: Vec<GuardExpr> = Vec::with_capacity(s.stmts.len()); let mut par_done_regs = Vec::with_capacity(s.stmts.len()); structure!(st, &ctx, let signal_on = constant(1, 1); let signal_off = constant(0, 1); let par_reset = prim std_reg(1); ); for con in s.stmts.iter() { match con { Control::Enable { data: Enable { comp: group_name }, } => { let group_idx = st.get_node_by_name(&group_name)?; structure!(st, &ctx, let par_done_reg = prim std_reg(1); ); let group_go = !(guard!(st; par_done_reg["out"]) | guard!(st; group_idx["done"])); let group_done = guard!(st; group_idx["done"]); add_wires!(st, Some(par_group.clone()), group_idx["go"] = group_go ? (signal_on.clone()); par_done_reg["in"] = group_done ? (signal_on.clone()); par_done_reg["write_en"] = group_done ? (signal_on.clone()); ); par_done_regs.push(par_done_reg); par_group_done.push(guard!(st; par_done_reg["out"])); } _ => { return Err(Error::MalformedControl( "Cannot compile non-group statement inside sequence" .to_string(), )) } } } let par_done = GuardExpr::and_vec(par_group_done); let par_reset_out = guard!(st; par_reset["out"]); add_wires!(st, Some(par_group.clone()), par_reset["in"] = par_done ? (signal_on.clone()); par_reset["write_en"] = par_done ? (signal_on.clone()); par_group_idx["done"] = par_reset_out ? (signal_on.clone()); ); add_wires!(st, None, par_reset["in"] = par_reset_out ? (signal_off.clone()); par_reset["write_en"] = par_reset_out ? (signal_on.clone()); ); for par_done_reg in par_done_regs { add_wires!(st, None, par_done_reg["in"] = par_reset_out ? (signal_off.clone()); par_done_reg["write_en"] = par_reset_out ? (signal_on.clone()); ); } Ok(Action::Change(Control::enable(par_group))) }
function_block-full_function
[ { "content": "/// Given a StructureGraph `st`, this function inlines assignments\n\n/// to `x[hole]` into any uses of `x[hole]` in a GuardExpr. This works\n\n/// in 2 passes over the edges. The first pass only considers assignments\n\n/// into `x[hole]` and builds up a mapping from `x[hole] -> (edge index, guards)`\n\n/// The second pass considers every edge and uses the map to replace every instance\n\n/// of `x[hole]` in a guard with `guards`.\n\nfn inline_hole(st: &mut StructureGraph, hole: String) {\n\n // a mapping from atoms (dst) -> Vec<GuardExpr> (sources) that write\n\n // into the atom.\n\n let mut multi_guard_map: HashMap<Atom, Vec<GuardExpr>> = HashMap::new();\n\n\n\n // build up the mapping by mapping over all writes into holes\n\n for idx in st\n\n .edge_idx()\n\n .with_direction(DataDirection::Write)\n\n .with_node_type(NodeType::Hole)\n\n .with_port(hole.clone())\n\n {\n\n let ed = &st.get_edge(idx);\n\n let (src_idx, dest_idx) = st.endpoints(idx);\n\n let guard_opt = ed.guard.clone();\n\n let atom = st.to_atom((src_idx, ed.src.port_name().clone()));\n\n\n\n // ASSUMES: The values being assigned into holes are one-bit.\n\n // Transform `x[hole] = src` into `x[hole] = src ? 1`;\n\n let guard = match guard_opt {\n", "file_path": "calyx/src/passes/inliner.rs", "rank": 0, "score": 181622.5769830363 }, { "content": "/// Opaque handle to a port on a component. These can only be created by\n\n/// calling Node::port_handle method and forces uses of ports to make sure\n\n/// that they exist on the Node.\n\nstruct PortHandle<'a>(&'a ast::Id);\n\n*/\n\n\n\nimpl Iterator for PortIter {\n\n type Item = ast::Id;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n match self.items.len().cmp(&1) {\n\n Ordering::Greater | Ordering::Equal => {\n\n let ret = Some(self.items[0].name.clone());\n\n self.items = (&self.items[1..]).to_vec();\n\n ret\n\n }\n\n Ordering::Less => None,\n\n }\n\n }\n\n}\n\n\n\nimpl Node {\n\n pub fn get_component_type(&self) -> Result<&ast::Id> {\n", "file_path": "calyx/src/lang/structure.rs", "rank": 1, "score": 181279.8788932835 }, { "content": "/// Returns `Ok` if the control for `comp` is either a single `enable`\n\n/// or `empty`.\n\nfn validate_control(comp: &component::Component) -> Result<()> {\n\n match &comp.control {\n\n Control::Empty { .. } => Ok(()),\n\n _ => Err(Error::MalformedControl(\n\n \"Must either be a single enable or an empty statement\".to_string(),\n\n )),\n\n }\n\n}\n\n\n\nimpl Backend for VerilogBackend {\n\n fn name() -> &'static str {\n\n \"verilog\"\n\n }\n\n\n\n fn validate(ctx: &context::Context) -> Result<()> {\n\n ctx.definitions_iter(|_, comp| {\n\n validate_structure(comp)?;\n\n validate_control(comp)\n\n })\n\n }\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 2, "score": 180835.9082235366 }, { "content": "/// Returns `Ok` if there are no groups defined.\n\nfn validate_structure(comp: &component::Component) -> Result<()> {\n\n let valid = comp.structure.edge_idx().all(|idx| {\n\n let edge = &comp.structure.get_edge(idx);\n\n edge.guard\n\n .as_ref()\n\n .map(|g| validate_guard(g))\n\n .unwrap_or(true)\n\n && edge.group.is_none()\n\n });\n\n if valid {\n\n Ok(())\n\n } else {\n\n Err(Error::MalformedStructure(\n\n \"Groups / Holes can not be turned into Verilog\".to_string(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 3, "score": 180534.58076458512 }, { "content": "/// Converts a guarded edge into a Verilog string\n\nfn guard<'a>(expr: &GuardExpr, ctx: ParenCtx) -> D<'a> {\n\n use ParenCtx as P;\n\n match expr {\n\n GuardExpr::Atom(a) => atom(a),\n\n GuardExpr::Not(a) => {\n\n let ret = D::text(expr.op_str()).append(guard(a, P::Not));\n\n if ctx > P::Not {\n\n ret.parens()\n\n } else {\n\n ret\n\n }\n\n }\n\n GuardExpr::And(bs) => {\n\n let ret = D::intersperse(\n\n bs.iter().map(|b| guard(b, P::And)).collect::<Vec<_>>(),\n\n D::text(\" & \"),\n\n );\n\n if ctx > P::And {\n\n ret.parens()\n\n } else {\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 4, "score": 175653.9665575106 }, { "content": "pub fn display(doc: RcDoc<ColorSpec>, write: OutputFile, ctx: &Context) {\n\n match write {\n\n OutputFile::Stdout => {\n\n if ctx.force_color || write.isatty() {\n\n doc.render_colored(\n\n 100,\n\n StandardStream::stdout(ColorChoice::Auto),\n\n )\n\n .unwrap();\n\n } else {\n\n doc.render(100, &mut std::io::stdout()).unwrap();\n\n }\n\n }\n\n OutputFile::File(_) => {\n\n doc.render(100, &mut write.get_write()).unwrap();\n\n }\n\n }\n\n}\n\n\n", "file_path": "calyx/src/frontend/pretty_print.rs", "rank": 5, "score": 171519.68531049654 }, { "content": "pub fn _eval(_c: &Context) {\n\n unimplemented!(\"Interpreter is not implemeted.\");\n\n}\n", "file_path": "calyx/src/backend/interpreter/eval.rs", "rank": 6, "score": 168604.9535090722 }, { "content": "/// Function to iterate over a vector of control statements and collect\n\n/// the \"static\" attribute using the `acc` function.\n\n/// Returns None if any of of the Control statements is a compound statement.\n\nfn accumulate_static_time<F>(\n\n st: &StructureGraph,\n\n stmts: &[Control],\n\n acc: F,\n\n) -> Option<u64>\n\nwhere\n\n F: FnMut(u64, &u64) -> u64,\n\n{\n\n let timing: Result<Vec<&u64>, ()> = stmts\n\n .iter()\n\n .map(|con| {\n\n if let Control::Enable {\n\n data: Enable { comp: group },\n\n } = con\n\n {\n\n st.groups[&Some(group.clone())]\n\n .0\n\n .get(\"static\")\n\n .ok_or_else(|| ())\n\n } else {\n", "file_path": "calyx/src/passes/static_timing.rs", "rank": 7, "score": 165534.90470277972 }, { "content": "fn backends() -> Vec<(&'static str, BackendOpt)> {\n\n vec![\n\n (VerilogBackend::name(), BackendOpt::Verilog),\n\n (\"futil\", BackendOpt::Futil),\n\n (\"dot\", BackendOpt::Dot),\n\n (\"none\", BackendOpt::None),\n\n ]\n\n}\n\n\n\nimpl Default for BackendOpt {\n\n fn default() -> Self {\n\n BackendOpt::Futil\n\n }\n\n}\n\n\n\n/// Command line parsing for the Backend enum\n\nimpl FromStr for BackendOpt {\n\n type Err = String;\n\n fn from_str(input: &str) -> std::result::Result<Self, Self::Err> {\n\n // allocate a vector for the list of backends\n", "file_path": "src/cmdline.rs", "rank": 8, "score": 163738.58794384066 }, { "content": "#[allow(unused)]\n\npub fn calculate_hash<T: Hash>(t: &T) -> u64 {\n\n let mut s = DefaultHasher::new();\n\n t.hash(&mut s);\n\n s.finish()\n\n}\n", "file_path": "calyx/src/utils.rs", "rank": 9, "score": 149901.3995567797 }, { "content": "//==========================================\n\n// Connection Functions\n\n//==========================================\n\n/// Generate wire connections\n\nfn connections<'a>(comp: &component::Component) -> D<'a> {\n\n let doc = comp\n\n .structure\n\n .component_iterator()\n\n // for every component\n\n .map(|(idx, node)| {\n\n node.signature\n\n .inputs\n\n .iter()\n\n // get all the edges writing into a port\n\n .map(|portdef| {\n\n get_all_edges(&comp, idx, portdef.name.to_string())\n\n })\n\n // remove empty edges\n\n .filter(|(_, edges)| !edges.is_empty())\n\n .map(|(port, edges)| gen_assigns(node, port, edges))\n\n .collect::<Vec<_>>()\n\n })\n\n .flatten();\n\n\n\n D::text(\"always_comb begin\")\n\n .append(D::line().append(D::intersperse(doc, D::line())).nest(2))\n\n .append(D::line())\n\n .append(D::text(\"end\"))\n\n}\n\n\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 10, "score": 144239.3578683939 }, { "content": "/// Turn u64 into a formatted Verilog bitwidth specifier.\n\npub fn bitwidth<'a>(width: u64) -> Result<D<'a>> {\n\n match width.cmp(&1) {\n\n Ordering::Less => unreachable!(),\n\n Ordering::Equal => Ok(D::nil()),\n\n Ordering::Greater => {\n\n Ok(D::text(format!(\"[{}:0]\", width - 1)).append(D::space()))\n\n }\n\n }\n\n}\n\n\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 11, "score": 144133.55669010256 }, { "content": "/// Checks to make sure that there are no holes being\n\n/// used in a guard.\n\nfn validate_guard(guard: &GuardExpr) -> bool {\n\n match guard {\n\n GuardExpr::And(bs) => bs.iter().all(|b| validate_guard(b)),\n\n GuardExpr::Or(bs) => bs.iter().all(|b| validate_guard(b)),\n\n GuardExpr::Eq(left, right) => {\n\n validate_guard(left) && validate_guard(right)\n\n }\n\n GuardExpr::Neq(left, right) => {\n\n validate_guard(left) && validate_guard(right)\n\n }\n\n GuardExpr::Gt(left, right) => {\n\n validate_guard(left) && validate_guard(right)\n\n }\n\n GuardExpr::Lt(left, right) => {\n\n validate_guard(left) && validate_guard(right)\n\n }\n\n GuardExpr::Geq(left, right) => {\n\n validate_guard(left) && validate_guard(right)\n\n }\n\n GuardExpr::Leq(left, right) => {\n\n validate_guard(left) && validate_guard(right)\n\n }\n\n GuardExpr::Not(inner) => validate_guard(inner),\n\n GuardExpr::Atom(Atom::Port(p)) => {\n\n matches!(p, Port::Comp { .. } | Port::This { .. })\n\n }\n\n GuardExpr::Atom(Atom::Num(_)) => true,\n\n }\n\n}\n\n\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 12, "score": 143339.75809577247 }, { "content": "//==========================================\n\n// Memory init functions\n\n//==========================================\n\n// Generates code of the form:\n\n// ```\n\n// import \"DPI-C\" function string futil_getenv (input string env_var);\n\n// string DATA;\n\n// initial begin\n\n// DATA = futil_getenv(\"DATA\");\n\n// $display(\"DATA: %s\", DATA);\n\n// $readmemh({DATA, \"/<mem_name>.out\"}, <mem_name>.mem);\n\n// ...\n\n// end\n\n// final begin\n\n// $writememh({DATA, \"/<mem_name>.out\"}, <mem_name>.mem);\n\n// end\n\n// ```\n\nfn memory_init<'a>(comp: &component::Component) -> D<'a> {\n\n // Import futil helper library.\n\n const IMPORT_STMT: &str =\n\n \"import \\\"DPI-C\\\" function string futil_getenv (input string env_var);\";\n\n const DATA_DECL: &str = \"string DATA;\";\n\n const DATA_GET: &str = \"DATA = futil_getenv(\\\"DATA\\\");\";\n\n const DATA_DISP: &str =\n\n \"$fdisplay(2, \\\"DATA (path to meminit files): %s\\\", DATA);\";\n\n\n\n let initial_block = D::text(\"initial begin\")\n\n .append(D::line())\n\n .append(\n\n (D::text(DATA_GET)\n\n .append(D::line())\n\n .append(DATA_DISP)\n\n .append(memory_load_store(\"$readmemh\", \"dat\", &comp)))\n\n .nest(4),\n\n )\n\n .append(D::line())\n\n .append(\"end\");\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 13, "score": 141915.6046179103 }, { "content": "//==========================================\n\n// Subcomponent Instance Functions\n\n//==========================================\n\n/// Generate Verilog for each subcomponent instanstiation and\n\n/// wire up all the ports.\n\nfn subcomponent_instances<'a>(comp: &component::Component) -> D<'a> {\n\n let doc = comp\n\n .structure\n\n .component_iterator()\n\n .filter_map(|(idx, node)| {\n\n if let NodeData::Cell(cell) = &node.data {\n\n Some((node, idx, cell))\n\n } else {\n\n None\n\n }\n\n })\n\n .map(|(node, idx, cell)| {\n\n subcomponent_sig(&node.name, &cell)\n\n .append(D::space())\n\n .append(\n\n D::line()\n\n .append(signature_connections(\n\n &node.signature,\n\n &comp,\n\n idx,\n\n ))\n\n .nest(4)\n\n .append(D::line())\n\n .parens(),\n\n )\n\n .append(\";\")\n\n });\n\n D::intersperse(doc, D::line().append(D::line()))\n\n}\n\n\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 14, "score": 141909.9335092104 }, { "content": "/// Walks the `GuardExpr` ast and replaces Atoms `a` with\n\n/// it's corresponding entry in `map` if one exists.\n\nfn tree_walk(guard: GuardExpr, map: &GuardMap) -> GuardExpr {\n\n match guard {\n\n GuardExpr::Atom(a) => map\n\n .get(&a)\n\n .map(|g| tree_walk(g.clone(), &map))\n\n .unwrap_or(GuardExpr::Atom(a)),\n\n GuardExpr::Not(inner) => {\n\n GuardExpr::Not(Box::new(tree_walk(*inner, &map)))\n\n }\n\n GuardExpr::And(bs) => GuardExpr::and_vec(\n\n bs.into_iter().map(|b| tree_walk(b, &map)).collect(),\n\n ),\n\n GuardExpr::Or(bs) => GuardExpr::or_vec(\n\n bs.into_iter().map(|b| tree_walk(b, &map)).collect(),\n\n ),\n\n GuardExpr::Eq(left, right) => GuardExpr::Eq(\n\n Box::new(tree_walk(*left, &map)),\n\n Box::new(tree_walk(*right, &map)),\n\n ),\n\n GuardExpr::Neq(left, right) => GuardExpr::Neq(\n", "file_path": "calyx/src/passes/inliner.rs", "rank": 15, "score": 141358.96801280003 }, { "content": "/// Add `go`/`done`/`clk` ports to a signature.\n\nfn extend_sig(mut sig: Signature) -> Signature {\n\n sig.add_input(\"go\", 1);\n\n sig.add_input(\"clk\", 1);\n\n sig.add_output(\"done\", 1);\n\n sig\n\n}\n\n\n\nimpl Context {\n\n /// Generates a Context from a namespace and slice of libraries.\n\n ///\n\n /// # Arguments\n\n /// * `namespace` - command line options\n\n /// * `libs` - slice of library asts\n\n /// # Returns\n\n /// Returns a Context object for the compilation unit,\n\n /// or an error.\n\n pub fn from_ast(\n\n namespace: ast::NamespaceDef,\n\n libraries: &[lib::Library],\n\n debug_mode: bool,\n", "file_path": "calyx/src/lang/context.rs", "rank": 16, "score": 139493.32981206995 }, { "content": "/// Trait that describes named things. Calling `do_pass` and `do_pass_default`\n\n/// require this to be implemented. This has to be a separate trait from `Visitor`\n\n/// because these methods don't recieve `self` which means that it is impossible\n\n/// to create dynamic trait objects.\n\npub trait Named {\n\n /// The name of a pass. Is used for identifying passes.\n\n fn name() -> &'static str;\n\n\n\n /// A short description of the pass.\n\n fn description() -> &'static str {\n\n \"no description provided\"\n\n }\n\n}\n\n\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 17, "score": 139433.2265459329 }, { "content": "//==========================================\n\n// Wire Declaration Functions\n\n//==========================================\n\n/// Generate all the wire declarations for `comp`\n\nfn wire_declarations<'a>(comp: &component::Component) -> Result<D<'a>> {\n\n let wires = comp\n\n .structure\n\n .component_iterator()\n\n // filter for cells because we don't need to declare wires for ports\n\n .filter_map(|(_idx, node)| match &node.data {\n\n NodeData::Cell(_) => Some(node),\n\n _ => None,\n\n })\n\n // extract name, portdef from input / output of signature\n\n .map(|node| {\n\n node.signature\n\n .inputs\n\n .iter()\n\n .map(move |pd| (&node.name, pd))\n\n .chain(\n\n node.signature\n\n .outputs\n\n .iter()\n\n .map(move |pd| (&node.name, pd)),\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 18, "score": 137472.27279472045 }, { "content": "/// Generates just the Verilog instanstiation code, but none\n\n/// of the connections.\n\nfn subcomponent_sig<'a>(id: &ast::Id, structure: &ast::Cell) -> D<'a> {\n\n let (name, params): (&ast::Id, &[u64]) = match structure {\n\n Cell::Decl { data } => (&data.component, &[]),\n\n Cell::Prim { data } => (&data.instance.name, &data.instance.params),\n\n };\n\n\n\n D::text(name.to_string())\n\n .append(D::line())\n\n .append(\"#\")\n\n .append(\n\n D::intersperse(\n\n params.iter().map(|param| D::text(param.to_string())),\n\n D::text(\",\").append(D::line()),\n\n )\n\n .group()\n\n .parens(),\n\n )\n\n .append(D::line())\n\n .append(id.to_string())\n\n .group()\n\n}\n\n\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 19, "score": 136715.94329076298 }, { "content": "/// The `Visitor` trait parameterized on an `Error` type.\n\n/// For each node `x` in the Ast, there are the functions `start_x`\n\n/// and `finish_x`. The start functions are called at the beginning\n\n/// of the traversal for each node, and the finish functions are called\n\n/// at the end of the traversal for each node. You can use the finish\n\n/// functions to wrap error with more information.\n\npub trait Visitor {\n\n fn do_pass_default(context: &Context) -> Result<Self, errors::Error>\n\n where\n\n Self: Default + Sized + Named,\n\n {\n\n let mut visitor = Self::default();\n\n visitor.do_pass(&context)?;\n\n Ok(visitor)\n\n }\n\n\n\n fn do_pass(&mut self, context: &Context) -> Result<(), errors::Error>\n\n where\n\n Self: Sized + Named,\n\n {\n\n context.definitions_iter(|_id, mut comp| {\n\n let _ = self\n\n .start(&mut comp, context)?\n\n .and_then(|| {\n\n // clone component control so that we can visit the control and provide\n\n // mutable access to the component\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 20, "score": 136397.56948134213 }, { "content": "/// Implements convience functions to build commonly used ast nodes and\n\n/// add them to the structure graph.\n\npub trait ASTBuilder {\n\n /// Abstract representation for the indexing types used by the underlying\n\n /// graph representation.\n\n type ComponentHandle;\n\n type ConnectionHandle;\n\n\n\n /// Representation of values representing the port.\n\n type PortRep;\n\n\n\n /// Construct a new primitive of type `prim` with paramters `params`.\n\n /// The identifier for this component uses the prefix `name_prefix`.\n\n /// Uses the `ctx` to check the well-formedness of the primitive\n\n /// instantiation.\n\n ///\n\n /// Returns a handle to the component that can be used by the underlying\n\n /// graph representation to access this new components internal\n\n /// representation.\n\n fn new_primitive<S: AsRef<str>>(\n\n &mut self,\n\n ctx: &Context,\n", "file_path": "calyx/src/lang/structure_builder.rs", "rank": 21, "score": 132652.5388072487 }, { "content": "pub trait Visitable {\n\n fn visit(\n\n &mut self,\n\n visitor: &mut dyn Visitor,\n\n component: &mut Component,\n\n context: &Context,\n\n ) -> VisResult;\n\n}\n\n\n\n// Blanket impl for Vectors of Visitables\n\nimpl<V: Visitable> Visitable for Vec<V> {\n\n fn visit(\n\n &mut self,\n\n visitor: &mut dyn Visitor,\n\n component: &mut Component,\n\n context: &Context,\n\n ) -> VisResult {\n\n for t in self {\n\n match t.visit(visitor, component, context)? {\n\n Action::Continue | Action::Change(_) => continue,\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 22, "score": 101273.61080404607 }, { "content": "/// Construct the pass manager by registering all passes and aliases used\n\n/// by the command line.\n\nfn construct_pass_manager() -> Result<PassManager> {\n\n // Construct the pass manager and register all passes.\n\n let mut pm = PassManager::new();\n\n\n\n // Register passes.\n\n register_pass!(pm, WellFormed);\n\n register_pass!(pm, StaticTiming);\n\n register_pass!(pm, CompileControl);\n\n register_pass!(pm, GoInsertion);\n\n register_pass!(pm, ComponentInterface);\n\n register_pass!(pm, Inliner);\n\n register_pass!(pm, MergeAssign);\n\n register_pass!(pm, Externalize);\n\n register_pass!(pm, RemoveExternalMemories);\n\n register_pass!(pm, CollapseControl);\n\n register_pass!(pm, CompileEmpty);\n\n register_pass!(pm, Papercut);\n\n\n\n // Register aliases\n\n register_alias!(\n", "file_path": "src/main.rs", "rank": 23, "score": 94197.7781787533 }, { "content": "/// Generates a Verilog identifier for a (Node, String).\n\n/// * NodeData::Cell(..) => name_port\n\n/// * NodeData::Port => port\n\n/// * NodeData::Hole => impossible!\n\n/// * NodeData::Constant({width: w, value: v}) => w'dv\n\nfn wire_id_from_node<'a>(node: &Node, port: String) -> D<'a> {\n\n match &node.data {\n\n NodeData::Cell(..) => {\n\n D::text(format!(\"{}_{}\", node.name.to_string(), port))\n\n }\n\n NodeData::Port => D::text(port),\n\n NodeData::Hole(name) => {\n\n unreachable!(format!(\"Structure has a hole: {}\", name.id))\n\n }\n\n NodeData::Constant(n) => D::text(format!(\"{}'d{}\", n.width, n.val)),\n\n }\n\n}\n\n\n\n/// Tracks the context in the guards to only generate parens when inside an\n\n/// operator with stronger binding.\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 24, "score": 93030.16947162464 }, { "content": "pub fn comment(doc: RcDoc<ColorSpec>) -> RcDoc<ColorSpec> {\n\n let mut c = ColorSpec::new();\n\n c.set_fg(Some(Color::Rgb(100, 100, 100)));\n\n doc.annotate(c)\n\n}\n", "file_path": "calyx/src/frontend/colors.rs", "rank": 25, "score": 92087.50097214515 }, { "content": "fn main() {\n\n let mut app = Opts::clap();\n\n match option_env!(\"CALYX_AC_ZSH\") {\n\n None => (),\n\n Some(dir) => {\n\n app.gen_completions(env!(\"CARGO_PKG_NAME\"), Shell::Zsh, dir)\n\n }\n\n }\n\n match option_env!(\"CALYX_AUTOCOMPLETION_BASH\") {\n\n None => (),\n\n Some(dir) => {\n\n app.gen_completions(env!(\"CARGO_PKG_NAME\"), Shell::Bash, dir)\n\n }\n\n }\n\n}\n", "file_path": "build.rs", "rank": 48, "score": 70699.28650595441 }, { "content": "/// Get all the assignments to a given (node, port) pair.\n\nfn get_all_edges(\n\n comp: &component::Component,\n\n node: NodeIndex,\n\n port: String,\n\n) -> (String, Vec<(EdgeData, &Node)>) {\n\n // collect all edges writing into this node and port\n\n let edges = comp\n\n .structure\n\n .edge_idx()\n\n .with_direction(DataDirection::Write)\n\n .with_node(node)\n\n .with_port(port.clone())\n\n .map(|idx| {\n\n (\n\n comp.structure.get_edge(idx).clone(),\n\n comp.structure.get_node(comp.structure.endpoints(idx).0),\n\n )\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n (port, edges)\n\n}\n\n\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 49, "score": 65089.439908571694 }, { "content": "fn main() -> Result<()> {\n\n let pm = construct_pass_manager()?;\n\n\n\n // parse the command line arguments into Opts struct\n\n let opts: Opts = Opts::from_args();\n\n\n\n // list all the avaliable pass options when flag --list-passes is enabled\n\n if opts.list_passes {\n\n println!(\"{}\", pm.show_names());\n\n return Ok(());\n\n }\n\n\n\n // ==== Construct the context ====\n\n // parse the file\n\n let namespace = match &opts.file {\n\n Some(file) => syntax::FutilParser::parse_file(&file),\n\n None => {\n\n if atty::isnt(Stream::Stdin) {\n\n syntax::FutilParser::parse(stdin())\n\n } else {\n", "file_path": "src/main.rs", "rank": 50, "score": 64823.329976055815 }, { "content": "/// A mapping from destination ports to the guard expressions that write\n\n/// into them.\n\ntype GuardMap = HashMap<Atom, GuardExpr>;\n\n\n", "file_path": "calyx/src/passes/inliner.rs", "rank": 51, "score": 64458.64001446463 }, { "content": "# The FuTIL Compiler\n\n\n\nThe FuTIL compiler has several command line to control the execution of various\n\npasses and backends.\n\n\n\n### Controlling Passes\n\n\n\nThe compiler is organized as a sequence of passes that are run when the compiler\n\nexecutes.\n\n\n\nTo get a complete list of all passes, run the following from the repository\n\nroot:\n\n\n\n```\n\ncargo run -- --list-passes\n\n```\n\n\n\nThis generates results of the form:\n\n\n\n```\n\nPasses:\n\n- collapse-control\n\n- compile-control\n\n...\n\n\n\nAliases:\n\n- all: well-formed, papercut, remove-external-memories, ...\n\n...\n\n```\n\n\n\nThe first section list all the passes implemented in the compiler.\n\nThe second section lists *aliases* for combination of passes that are commonly\n\nrun together.\n\nFor example, the alias `all` is an ordered sequence of default passes executed\n\nwhen the compiler is run from the command-line.\n\n\n\nThe command-line provides two options to control the execution of passes:\n\n- `-p, --pass`: Execute this pass or alias. Overrides default alias.\n\n- `-d, --disable-pass`: Disable this pass or alias. Takes priority over `-p`.\n\n\n\nFor example, we can run the following to disable the `static-timing` pass from\n\nthe default execution alias `all`:\n\n\n\n```bash\n\ncd futil\n\ncargo run -- examples/futil/simple.futil -p all -d static-timing\n\n```\n", "file_path": "docs/compiler.md", "rank": 52, "score": 63490.92488470395 }, { "content": "/// All backends must implement this trait.\n\n/// `Backend::name` returns the name of this backend.\n\n/// `Backend::validate` should return `Ok(())` if the\n\n/// program is in the expected form and `Err(...)` otherwise.\n\n/// `Backend::emit` should convert the program to a formted string\n\n/// `Backend::run` is the composition of these two functions.\n\npub trait Backend {\n\n fn name() -> &'static str;\n\n fn validate(prog: &context::Context) -> Result<()>;\n\n fn emit(prog: &context::Context, write: OutputFile) -> Result<()>;\n\n fn run(prog: &context::Context, file: OutputFile) -> Result<()> {\n\n Self::validate(&prog)?;\n\n Self::emit(prog, file)\n\n }\n\n}\n\n\n", "file_path": "calyx/src/backend/traits.rs", "rank": 53, "score": 63302.59694527539 }, { "content": "pub trait Emitable {\n\n fn doc<'a>(\n\n &self,\n\n ctx: &context::Context,\n\n comp: &component::Component,\n\n ) -> Result<RcDoc<'a, ColorSpec>>;\n\n}\n", "file_path": "calyx/src/backend/traits.rs", "rank": 54, "score": 63290.251284078586 }, { "content": "fn block<'a>(\n\n name: RcDoc<'a, ColorSpec>,\n\n doc: RcDoc<'a, ColorSpec>,\n\n) -> RcDoc<'a, ColorSpec> {\n\n name.append(RcDoc::space()).append(\n\n RcDoc::nil()\n\n .append(RcDoc::line())\n\n .append(doc)\n\n .nest(2)\n\n .append(RcDoc::line())\n\n .braces(),\n\n )\n\n}\n\n\n", "file_path": "calyx/src/frontend/pretty_print.rs", "rank": 55, "score": 62649.27748154312 }, { "content": "pub trait ColorHelper {\n\n fn define_color(self) -> Self;\n\n fn keyword_color(self) -> Self;\n\n fn control_color(self) -> Self;\n\n fn literal_color(self) -> Self;\n\n}\n\n\n\nimpl<'a> ColorHelper for RcDoc<'a, ColorSpec> {\n\n fn define_color(self) -> Self {\n\n let mut c = ColorSpec::new();\n\n c.set_fg(Some(Color::Green)).set_bold(true);\n\n self.annotate(c)\n\n }\n\n\n\n fn keyword_color(self) -> Self {\n\n let mut c = ColorSpec::new();\n\n c.set_fg(Some(Color::Blue));\n\n self.annotate(c)\n\n }\n\n\n", "file_path": "calyx/src/frontend/colors.rs", "rank": 56, "score": 62284.8229809322 }, { "content": "/// Generate a sequence of ternary assignments into the (node, port) using\n\n/// edges. Generated code looks like:\n\n/// node.port = g1 ? n1.p1 : g2 ? n2.p2 ...\n\nfn gen_assigns<'a>(\n\n node: &Node,\n\n port: String,\n\n edges: Vec<(EdgeData, &Node)>,\n\n) -> D<'a> {\n\n let unguarded_drivers = edges\n\n .iter()\n\n .filter(|(ed, _)| {\n\n ed.guard.is_none() || ed.guard.as_ref().unwrap().provably_true()\n\n })\n\n .count();\n\n\n\n // Error if there is more than one unguarded driver.\n\n if unguarded_drivers > 1 {\n\n panic!(\n\n \"Multiple unguarded drivers for {}.{}\",\n\n node.name.to_string(),\n\n port\n\n );\n\n }\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 57, "score": 61660.76109010156 }, { "content": "/// Generates Verilog for connection ports to wires.\n\nfn signature_connections<'a>(\n\n sig: &ast::Signature,\n\n comp: &component::Component,\n\n idx: NodeIndex,\n\n) -> D<'a> {\n\n // wire up all the incoming edges\n\n let all = sig.inputs.iter().chain(sig.outputs.iter()).map(|portdef| {\n\n // if portdef is named `clk`, wire up `clk`\n\n if &portdef.name == \"clk\" {\n\n D::text(\".\").append(\"clk\").append(D::text(\"clk\").parens())\n\n } else {\n\n D::text(\".\")\n\n .append(D::text(portdef.name.to_string()))\n\n .append(\n\n D::text(format!(\n\n \"{}_{}\",\n\n comp.structure.get_node(idx).name.to_string(),\n\n portdef.name.to_string()\n\n ))\n\n .parens(),\n\n )\n\n }\n\n });\n\n\n\n D::intersperse(all, D::text(\",\").append(D::line()))\n\n}\n\n\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 58, "score": 61652.7910253609 }, { "content": "fn stmt_vec<'a>(\n\n vec: impl Iterator<Item = RcDoc<'a, ColorSpec>>,\n\n) -> RcDoc<'a, ColorSpec> {\n\n RcDoc::intersperse(vec, RcDoc::line())\n\n}\n\n\n", "file_path": "calyx/src/frontend/pretty_print.rs", "rank": 59, "score": 61648.54785166359 }, { "content": "/// Collects all of the Verilog implementations specified in the library\n\n/// file.\n\nfn primitive_implemenations<'a>(\n\n prog: &ast::NamespaceDef,\n\n context: &context::Context,\n\n) -> Result<D<'a>> {\n\n let docs = prog\n\n .components\n\n .iter()\n\n .map(|c| c.cells.iter())\n\n .flatten()\n\n .filter_map(|s| match s {\n\n Cell::Prim { data } => Some(&data.instance.name),\n\n _ => None,\n\n })\n\n .unique()\n\n .map(|name| {\n\n context.library_context.definitions[&name]\n\n .implementation\n\n .iter()\n\n .find_map(|im| match im {\n\n Implementation::Verilog { data } => {\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 60, "score": 61648.54785166359 }, { "content": "pub trait PrettyPrint {\n\n /// Convert `self` into an `RcDoc`. the `area` of type `&Bump`\n\n /// is provided in case objects need to be allocated while producing\n\n /// The RcDoc. Call `arena.alloc(obj)` to allocate `obj` and use the\n\n /// returned reference for printing.\n\n fn prettify<'a>(&self, arena: &'a bumpalo::Bump) -> RcDoc<'a, ColorSpec>;\n\n fn pretty_string(&self) -> String {\n\n // XXX(sam) this leaks memory atm because we put vecs into this\n\n let mut arena = bumpalo::Bump::new();\n\n let mut buf = Vec::new();\n\n {\n\n let r = self.prettify(&arena);\n\n r.render(100, &mut buf).unwrap();\n\n }\n\n arena.reset();\n\n String::from_utf8(buf).unwrap()\n\n }\n\n fn pretty_print(&self) {\n\n // XXX(sam) this leaks memory atm because we put vecs into this\n\n let mut arena = bumpalo::Bump::new();\n", "file_path": "calyx/src/frontend/pretty_print.rs", "rank": 61, "score": 61331.23533039707 }, { "content": "fn memory_load_store<'a>(\n\n mem_f: &'static str,\n\n ext: &'static str,\n\n comp: &component::Component,\n\n) -> D<'a> {\n\n let doc = comp\n\n .structure\n\n .component_iterator()\n\n .filter(|(_, node)| {\n\n if let NodeData::Cell(Cell::Prim { data }) = &node.data {\n\n data.instance.name.to_string().contains(\"mem\")\n\n } else {\n\n false\n\n }\n\n })\n\n .map(|(_, node)| {\n\n D::text(mem_f)\n\n .append(\n\n D::text(format!(\n\n \"{{ DATA, \\\"/{}.{}\\\" }}\",\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 62, "score": 60699.41660720648 }, { "content": "/// A generalized 'unwrapping' trait that extracts data from\n\n/// a container that can possible be an error and automatically\n\n/// generates the correct `Error` variant with the `ast::Id`.\n\n/// For example, `Extract<NodeIndex, NodeIndex>` can be implemented for\n\n/// `Option<NodeIndex>` to provide convienent error reporting for\n\n/// undefined components / groups.\n\npub trait Extract<T, R> {\n\n /// Unpacks `T` into `Result<R>` using `id: ast::Id`\n\n /// for error reporting with locations.\n\n fn extract(&self, id: &ast::Id) -> Result<R>;\n\n}\n\n\n\nimpl Extract<NodeIndex, NodeIndex> for Option<NodeIndex> {\n\n fn extract(&self, id: &ast::Id) -> Result<NodeIndex> {\n\n match self {\n\n Some(t) => Ok(*t),\n\n None => Err(Error::UndefinedComponent(id.clone())),\n\n }\n\n }\n\n}\n", "file_path": "calyx/src/errors.rs", "rank": 63, "score": 57350.63914788375 }, { "content": "pub trait PrettyHelper<'a>: Sized {\n\n fn surround(self, pre: &'a str, post: &'a str) -> Self;\n\n fn parens(self) -> Self {\n\n self.surround(\"(\", \")\")\n\n }\n\n fn brackets(self) -> Self {\n\n self.surround(\"[\", \"]\")\n\n }\n\n fn braces(self) -> Self {\n\n self.surround(\"{\", \"}\")\n\n }\n\n fn quotes(self) -> Self {\n\n self.surround(\"\\\"\", \"\\\"\")\n\n }\n\n}\n\n\n\nimpl<'a, A> PrettyHelper<'a> for RcDoc<'a, A> {\n\n fn surround(self, l: &'a str, r: &'a str) -> Self {\n\n RcDoc::text(l).append(self).append(RcDoc::text(r))\n\n }\n\n}\n\n\n", "file_path": "calyx/src/frontend/pretty_print.rs", "rank": 64, "score": 55478.06410273739 }, { "content": "/// Converts ast::Atom to a verilog string\n\nfn atom<'a>(atom: &Atom) -> D<'a> {\n\n match atom {\n\n Atom::Port(p) => match p {\n\n Port::Comp { component, port } => D::text(format!(\n\n \"{}_{}\",\n\n component.to_string(),\n\n port.to_string()\n\n )),\n\n Port::This { port } => D::text(port.to_string()),\n\n Port::Hole { .. } => unreachable!(\n\n \"Holes should be caught in the backend validation.\"\n\n ),\n\n },\n\n Atom::Num(n) => D::text(format!(\"{}'d{}\", n.width, n.val)),\n\n }\n\n}\n\n\n", "file_path": "calyx/src/backend/verilog/gen.rs", "rank": 65, "score": 51677.51990123959 }, { "content": "# Program Context\n\n\n\nThe primary data structure for representing programs is a `context::Context`.\n\nIt captures the resolved signatures as well a graph-based representation of the\n\nprogram.\n", "file_path": "docs/dev/context.md", "rank": 66, "score": 41445.61047675552 }, { "content": "/// Methods over Components. Only define functions that cannot be methods\n\n/// on `Control`, `Signature`, or `Structure`.\n\nimpl Component {\n\n pub fn from_signature<S: AsRef<str>>(name: S, sig: ast::Signature) -> Self {\n\n let mut graph = StructureGraph::default();\n\n graph.add_signature(sig.clone());\n\n\n\n Component {\n\n name: name.as_ref().into(),\n\n signature: sig,\n\n control: ast::Control::empty(),\n\n structure: graph,\n\n resolved_sigs: HashMap::new(),\n\n }\n\n }\n\n\n\n /// Add a new input port to this component.\n\n pub fn add_input(\n\n &mut self,\n\n portdef: impl Into<ast::Portdef>,\n", "file_path": "calyx/src/lang/component.rs", "rank": 67, "score": 40242.69771615365 }, { "content": "use super::ast;\n\nuse crate::errors::{Error, Result};\n\nuse crate::frontend::pretty_print::PrettyPrint;\n\nuse crate::lang::structure::StructureGraph;\n\nuse pretty::{termcolor::ColorSpec, RcDoc};\n\nuse std::collections::HashMap;\n\n\n\n/// In memory representation for a Component. Contains a Signature, Control AST,\n\n/// structure graph, and resolved signatures of used components\n\n#[derive(Debug, Clone)]\n\npub struct Component {\n\n pub name: ast::Id,\n\n pub signature: ast::Signature,\n\n pub control: ast::Control,\n\n pub structure: StructureGraph,\n\n /// Maps names of sub-component used in this component to fully\n\n /// resolved signatures.\n\n pub resolved_sigs: HashMap<ast::Id, ast::Signature>,\n\n}\n\n\n", "file_path": "calyx/src/lang/component.rs", "rank": 68, "score": 40231.57452723474 }, { "content": " Ok(())\n\n } else {\n\n Err(Error::DuplicatePort(self.name.clone(), portdef))\n\n }\n\n }\n\n}\n\n\n\nimpl Into<ast::ComponentDef> for Component {\n\n fn into(self) -> ast::ComponentDef {\n\n let (signature, cells, connections) = self.structure.into();\n\n ast::ComponentDef {\n\n name: self.name,\n\n signature,\n\n cells,\n\n connections,\n\n control: self.control,\n\n }\n\n }\n\n}\n\n\n\nimpl PrettyPrint for Component {\n\n fn prettify<'a>(&self, arena: &'a bumpalo::Bump) -> RcDoc<'a, ColorSpec> {\n\n let v: ast::ComponentDef = self.clone().into();\n\n let vref = arena.alloc(v);\n\n vref.prettify(&arena)\n\n }\n\n}\n", "file_path": "calyx/src/lang/component.rs", "rank": 69, "score": 40226.356653942086 }, { "content": " ) -> Result<()> {\n\n let portdef = portdef.into();\n\n if !self.signature.has_input(portdef.name.as_ref()) {\n\n self.structure.insert_input_port(&portdef);\n\n self.signature.inputs.push(portdef);\n\n Ok(())\n\n } else {\n\n Err(Error::DuplicatePort(self.name.clone(), portdef))\n\n }\n\n }\n\n\n\n /// Add a new output port to this component.\n\n pub fn add_output(\n\n &mut self,\n\n portdef: impl Into<ast::Portdef>,\n\n ) -> Result<()> {\n\n let portdef = portdef.into();\n\n if !self.signature.has_output(portdef.name.as_ref()) {\n\n self.structure.insert_output_port(&portdef);\n\n self.signature.outputs.push(portdef);\n", "file_path": "calyx/src/lang/component.rs", "rank": 70, "score": 40225.0074739994 }, { "content": " Action::Stop => return Ok(Action::Stop),\n\n };\n\n }\n\n Ok(Action::Continue)\n\n }\n\n}\n\n\n\nimpl Visitable for Control {\n\n fn visit(\n\n &mut self,\n\n visitor: &mut dyn Visitor,\n\n component: &mut Component,\n\n context: &Context,\n\n ) -> VisResult {\n\n match self {\n\n Control::Seq { data } => visitor\n\n .start_seq(data, component, context)?\n\n .and_then(|| data.stmts.visit(visitor, component, context))?\n\n .and_then(|| visitor.finish_seq(data, component, context))?,\n\n Control::Par { data } => visitor\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 71, "score": 40201.88327156648 }, { "content": "// Inspired by this blog post: http://thume.ca/2019/04/18/writing-a-compiler-in-rust/\n\n\n\nuse crate::errors;\n\nuse crate::frontend::pretty_print::PrettyPrint;\n\nuse crate::lang::{ast::*, component::Component, context::Context};\n\n\n\npub enum Action {\n\n /// Continue AST traversal\n\n Continue,\n\n /// Stop AST traversal\n\n Stop,\n\n /// Change the current ast node. Implies ending\n\n /// the traversal for this branch of the AST\n\n Change(Control),\n\n}\n\n\n\nimpl Action {\n\n /// Monadic helper function that sequences actions\n\n /// that return a VisResult.\n\n /// If `self` is `Continue` or `Change`, return the result of running `f`.\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 72, "score": 40200.76191472631 }, { "content": " let mut control = comp.control.clone();\n\n control.visit(self, &mut comp, context)?;\n\n // replace component control with the control we visited\n\n comp.control = control;\n\n Ok(Action::Continue)\n\n })?\n\n .and_then(|| self.finish(&mut comp, context))?;\n\n Ok(())\n\n })?;\n\n\n\n // Display intermediate futil program after running the pass.\n\n if context.debug_mode {\n\n println!(\"=============== {} ==============\", Self::name());\n\n println!(\"{}\", Self::description());\n\n context.pretty_print();\n\n println!(\"================================================\");\n\n }\n\n\n\n Ok(())\n\n }\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 73, "score": 40199.71257332537 }, { "content": " .start_par(data, component, context)?\n\n .and_then(|| data.stmts.visit(visitor, component, context))?\n\n .and_then(|| visitor.finish_par(data, component, context))?,\n\n Control::If { data } => visitor\n\n .start_if(data, component, context)?\n\n .and_then(|| data.tbranch.visit(visitor, component, context))?\n\n .and_then(|| data.fbranch.visit(visitor, component, context))?\n\n .and_then(|| visitor.finish_if(data, component, context))?,\n\n Control::While { data } => visitor\n\n .start_while(data, component, context)?\n\n .and_then(|| data.body.visit(visitor, component, context))?\n\n .and_then(|| visitor.finish_while(data, component, context))?,\n\n Control::Print { data } => visitor\n\n .start_print(data, component, context)?\n\n .and_then(|| visitor.finish_print(data, component, context))?,\n\n Control::Enable { data } => visitor\n\n .start_enable(data, component, context)?\n\n .and_then(|| visitor.finish_enable(data, component, context))?,\n\n Control::Empty { data } => visitor\n\n .start_empty(data, component, context)?\n\n .and_then(|| visitor.finish_empty(data, component, context))?,\n\n }\n\n .apply_change(self)\n\n }\n\n}\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 74, "score": 40198.89357211109 }, { "content": " /// Pass `Stop` through\n\n fn and_then<F>(self, mut other: F) -> VisResult\n\n where\n\n F: FnMut() -> VisResult,\n\n {\n\n match self {\n\n Action::Continue => other(),\n\n x => Ok(x),\n\n }\n\n }\n\n\n\n /// Applies the Change action if `self` is a Change action.\n\n /// Otherwise passes the action through unchanged\n\n fn apply_change(self, con: &mut Control) -> VisResult {\n\n match self {\n\n Action::Change(c) => {\n\n *con = c;\n\n Ok(Action::Continue)\n\n }\n\n x => Ok(x),\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 75, "score": 40191.506079817715 }, { "content": " &mut self,\n\n _s: &Enable,\n\n _comp: &mut Component,\n\n _x: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn finish_enable(\n\n &mut self,\n\n _s: &Enable,\n\n _comp: &mut Component,\n\n _x: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn start_empty(\n\n &mut self,\n\n _s: &Empty,\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 76, "score": 40190.23535664011 }, { "content": "\n\n fn start_print(\n\n &mut self,\n\n _s: &Print,\n\n _comp: &mut Component,\n\n _x: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn finish_print(\n\n &mut self,\n\n _s: &Print,\n\n _comp: &mut Component,\n\n _x: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn start_enable(\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 77, "score": 40188.79247332929 }, { "content": " _comp: &mut Component,\n\n _x: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn finish_empty(\n\n &mut self,\n\n _s: &Empty,\n\n _comp: &mut Component,\n\n _x: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n}\n\n\n\n/** `Visitable` describes types that can be visited by things\n\nimplementing `Visitor`. This performs a recursive walk of the tree.\n\nIt calls `Visitor::start_*` on the way down, and `Visitor::finish_*`\n\non the way up. */\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 78, "score": 40188.703248176265 }, { "content": "\n\n fn start(&mut self, _comp: &mut Component, _c: &Context) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn finish(&mut self, _comp: &mut Component, _c: &Context) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn start_seq(\n\n &mut self,\n\n _s: &Seq,\n\n _comp: &mut Component,\n\n _c: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn finish_seq(\n\n &mut self,\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 79, "score": 40186.12041364759 }, { "content": " _s: &Seq,\n\n _comp: &mut Component,\n\n _c: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn start_par(\n\n &mut self,\n\n _s: &Par,\n\n _comp: &mut Component,\n\n _c: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn finish_par(\n\n &mut self,\n\n _s: &Par,\n\n _comp: &mut Component,\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 80, "score": 40185.98388349404 }, { "content": " _x: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn start_if(\n\n &mut self,\n\n _s: &If,\n\n _comp: &mut Component,\n\n _c: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn finish_if(\n\n &mut self,\n\n _s: &If,\n\n _comp: &mut Component,\n\n _x: &Context,\n\n ) -> VisResult {\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 81, "score": 40185.8045246159 }, { "content": " Ok(Action::Continue)\n\n }\n\n\n\n fn start_while(\n\n &mut self,\n\n _s: &While,\n\n _comp: &mut Component,\n\n _c: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n\n\n\n fn finish_while(\n\n &mut self,\n\n _s: &While,\n\n _comp: &mut Component,\n\n _x: &Context,\n\n ) -> VisResult {\n\n Ok(Action::Continue)\n\n }\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 82, "score": 40185.4191823903 }, { "content": " }\n\n }\n\n}\n\n\n\npub type VisResult = Result<Action, errors::Error>;\n\n\n\n/// Trait that describes named things. Calling `do_pass` and `do_pass_default`\n\n/// require this to be implemented. This has to be a separate trait from `Visitor`\n\n/// because these methods don't recieve `self` which means that it is impossible\n\n/// to create dynamic trait objects.\n", "file_path": "calyx/src/passes/visitor.rs", "rank": 83, "score": 40177.5904629351 }, { "content": " params: &[u64],\n\n ) -> Result<Component> {\n\n let sig = self.library_context.resolve(id, params)?;\n\n Ok(Component::from_signature(name, sig))\n\n }\n\n\n\n /// Looks up the component for a component instance id.\n\n /// Does not provide mutable access to the Context.\n\n ///\n\n /// # Arguments\n\n /// * `id` - the identifier for the instance\n\n /// # Returns\n\n /// Returns the Component corresponding to `id` or an error.\n\n pub fn get_component(&self, id: &ast::Id) -> Result<Component> {\n\n match self.definitions.borrow().get(id) {\n\n Some(comp) => Ok(comp.clone()),\n\n None => Err(Error::UndefinedComponent(id.clone())),\n\n }\n\n }\n\n\n", "file_path": "calyx/src/lang/context.rs", "rank": 84, "score": 39892.45558983696 }, { "content": " /// Insert the component `comp` into `self`.\n\n pub fn insert_component(&self, comp: Component) {\n\n // It's possible that this method will be called inside the\n\n // `definitions_iter` function. In that case, the borrow will\n\n // fail and we temporarily move `comp` to `self.definitions.to_insert`.\n\n // When the iteration finishes, `definitions_iter` is responsible for\n\n // applying these changes. If we successfully borrow `self.definitions`\n\n // we can insert immediately.\n\n match self.definitions.try_borrow_mut() {\n\n Ok(mut defns) => {\n\n defns.insert(comp.name.clone(), comp);\n\n }\n\n Err(_) => self.definitions_to_insert.borrow_mut().push(comp),\n\n };\n\n }\n\n}\n\n\n\nimpl Into<ast::NamespaceDef> for Context {\n\n fn into(self) -> ast::NamespaceDef {\n\n let mut components: Vec<ast::ComponentDef> = vec![];\n", "file_path": "calyx/src/lang/context.rs", "rank": 85, "score": 39892.273080798026 }, { "content": "/// let comp = context.definitions.borrow_mut()[\"main\"];\n\n/// // insert_comp borrows context immmutably and uses borrow_mut()\n\n/// // internally to gain mutably\n\n/// context.insert_comp(new_comp); // <---- compiles fine, potentially run time error!\n\n/// // mutate comp here\n\n/// ...\n\n/// ```\n\n///\n\n/// `RefCell`s in essence let us give controlled\n\n/// mutable access to the context. However, we give up on some of Rust's compile-time safety guarantees\n\n/// so we have to make sure to enforce these ourselves. In particular, in `insert_component` we\n\n/// use `try_borrow_mut` to test if another mutable reference is alive. This will happen whenever\n\n/// we call this method from a pass because `definitions_iter` also borrows `definitions` mutably.\n\n/// If the borrow fails, then we put the new component\n\n/// in `definitions_to_insert` instead of putting it in the HashMap directly. After `definitions_iter`\n\n/// is done with it's mutable reference to `definitions`, then it inserts all the new components.\n\n#[derive(Debug, Clone)]\n\npub struct Context {\n\n /// Enable debugging output.\n\n pub debug_mode: bool,\n", "file_path": "calyx/src/lang/context.rs", "rank": 86, "score": 39889.16060670591 }, { "content": " /// Enable Verilator mode. This tells the backend to generate additional code for loading in memories.\n\n pub verilator_mode: bool,\n\n /// Force outputting in color.\n\n pub force_color: bool,\n\n /// Library containing primitive definitions.\n\n pub library_context: LibraryContext,\n\n /// Maps Ids to in-memory representation of the component.\n\n definitions: RefCell<HashMap<ast::Id, Component>>,\n\n /// Keeps track of components that we need to insert. We need\n\n /// this because `definitions_iter` allows multiple mutable\n\n /// references to `self.definitions` to be given away. If we\n\n /// insert components inside a call to `definitions_iter`, things\n\n /// will break.\n\n definitions_to_insert: RefCell<Vec<Component>>,\n\n /// Paths to the import statements. Used by the FuTIL pretty printer.\n\n imports: Vec<String>,\n\n}\n\n/// Add `go`/`done`/`clk` ports to a signature.\n", "file_path": "calyx/src/lang/context.rs", "rank": 87, "score": 39887.28794413992 }, { "content": " }\n\n\n\n // XXX(sam) maybe implement this as an iterator?\n\n /// Iterates over the context definitions, giving mutable access the components\n\n pub fn definitions_iter(\n\n &self,\n\n mut func: impl FnMut(&ast::Id, &mut Component) -> Result<()>,\n\n ) -> Result<()> {\n\n let mut definitions = self.definitions.borrow_mut();\n\n\n\n // do main iteration\n\n let ret = definitions\n\n .iter_mut()\n\n .map(|(id, comp)| func(id, comp))\n\n .collect();\n\n\n\n // if there are new definitions to insert, insert them now\n\n let mut defns_to_insert = self.definitions_to_insert.borrow_mut();\n\n for new_defn in defns_to_insert.drain(..) {\n\n definitions.insert(new_defn.name.clone(), new_defn);\n", "file_path": "calyx/src/lang/context.rs", "rank": 88, "score": 39887.22506102898 }, { "content": " verilator_mode: bool,\n\n force_color: bool,\n\n ) -> Result<Self> {\n\n // build hashmap for primitives in provided libraries\n\n let mut lib_definitions = HashMap::new();\n\n for def in libraries {\n\n for prim in &def.primitives {\n\n lib_definitions.insert(prim.name.clone(), prim.clone());\n\n }\n\n }\n\n let libctx = LibraryContext {\n\n definitions: lib_definitions,\n\n };\n\n\n\n // gather signatures from all components\n\n let mut signatures = HashMap::new();\n\n for comp in &namespace.components {\n\n signatures\n\n .insert(comp.name.clone(), extend_sig(comp.signature.clone()));\n\n }\n", "file_path": "calyx/src/lang/context.rs", "rank": 89, "score": 39884.581834377124 }, { "content": "\n\n let mut definitions = HashMap::new();\n\n for comp in namespace.components {\n\n let resolved_sigs = comp.resolve_primitives(&libctx)?;\n\n let ast::ComponentDef {\n\n name,\n\n signature,\n\n cells,\n\n connections,\n\n control,\n\n } = comp;\n\n let extended_sig = extend_sig(signature);\n\n let structure = StructureGraph::new(\n\n extended_sig.clone(),\n\n cells,\n\n connections,\n\n &signatures,\n\n &resolved_sigs,\n\n )?;\n\n definitions.insert(\n", "file_path": "calyx/src/lang/context.rs", "rank": 90, "score": 39882.97543856468 }, { "content": "/// references to the component (owned by the context) alive at the same time. We\n\n/// get around this restriction using `RefCell`s to give a mutable style interface\n\n/// to immutable references to the context.\n\n///\n\n/// `RefCell` is a Rust mechanism that allows an immutable reference to be turned into\n\n/// a mutable reference. For example if we assume that `definitions` doesn't use a `RefCell`,\n\n/// the following is disallowed by Rust:\n\n/// ```rust\n\n/// let mut context = Context::from_opts(&opts)?;\n\n/// let comp = &mut context.definitions[\"main\"];\n\n/// // insert_comp borrows context mutably\n\n/// context.insert_comp(new_comp); // <---- compile time error! can't have two mutable references to the same data\n\n/// // mutate comp here\n\n/// ...\n\n/// ```\n\n///\n\n/// With a `RefCell`, the code looks like this:\n\n///\n\n/// ```rust\n\n/// let context = Context::from_opts(&opts)?; // not declared as mutable\n", "file_path": "calyx/src/lang/context.rs", "rank": 91, "score": 39882.84818519652 }, { "content": " for comp in self.definitions.borrow().values() {\n\n components.push(comp.clone().into())\n\n }\n\n components.sort();\n\n ast::NamespaceDef {\n\n components,\n\n libraries: self.imports,\n\n }\n\n }\n\n}\n\n\n\n/// Map library signatures to \"real\" Futil signatures. Since library components\n\n/// can have parameters while futil components cannot, we define helpers methods\n\n/// to make this easier.\n\n#[derive(Debug, Clone)]\n\npub struct LibraryContext {\n\n pub definitions: HashMap<ast::Id, lib::Primitive>,\n\n}\n\n\n\nimpl LibraryContext {\n", "file_path": "calyx/src/lang/context.rs", "rank": 92, "score": 39882.238016863936 }, { "content": "use crate::errors::{Error, Result};\n\nuse crate::frontend::pretty_print::PrettyPrint;\n\nuse crate::lang::{\n\n ast, ast::Signature, component::Component, library::ast as lib,\n\n structure::StructureGraph,\n\n};\n\nuse pretty::{termcolor::ColorSpec, RcDoc};\n\nuse std::cell::RefCell;\n\nuse std::collections::HashMap;\n\n\n\n/// Represents an entire Futil program. We are keeping all of the components in a `RefCell<HashMap>`.\n\n/// We use the `RefCell` to provide our desired visitor interface\n\n/// where each visitor gets mutable access to it's own component as well as immutable\n\n/// access to the global context to allow looking up definitions and primitives. Mutable\n\n/// access to it's own component is desirable because the structure is represented with a graph\n\n/// and graphs are ill-suited for functional style interfaces.\n\n///\n\n/// However, we also need a way for visitors to add new component definitions to the context.\n\n/// We can't just give the visitor mutable access to the context, because we\n\n/// can't have mutable references to the context and mutable\n", "file_path": "calyx/src/lang/context.rs", "rank": 93, "score": 39882.11717059324 }, { "content": " }\n\n\n\n ret\n\n }\n\n\n\n /// Creates a concrete instance of a primitive component.\n\n /// Because primitive components can take in parameters, this\n\n /// function attempts to resolve supplied parameters with a\n\n /// primitive component to create a concrete component.\n\n ///\n\n /// # Arguments\n\n /// * `name` - the type of primitive component to instance\n\n /// * `id` - the identifier for the instance\n\n /// * `params` - parameters to pass to the primitive component definition\n\n /// # Returns\n\n /// Returns a concrete Component object or an error.\n\n pub fn instantiate_primitive<S: AsRef<str>>(\n\n &self,\n\n name: S,\n\n id: &ast::Id,\n", "file_path": "calyx/src/lang/context.rs", "rank": 94, "score": 39880.41868407516 }, { "content": " .map(|pd| pd.resolve(&id, &param_map))\n\n .collect();\n\n // resolve outputs\n\n let outputs_res: Result<Vec<ast::Portdef>> = prim\n\n .signature\n\n .outputs()\n\n .map(|pd| pd.resolve(&id, &param_map))\n\n .collect();\n\n let inputs = inputs_res?;\n\n let outputs = outputs_res?;\n\n Ok(ast::Signature { inputs, outputs })\n\n }\n\n None => Err(Error::UndefinedComponent(id.clone())),\n\n }\n\n }\n\n}\n\n\n\n/* =============== Context Printing ================ */\n\nimpl PrettyPrint for Context {\n\n fn prettify<'a>(&self, arena: &'a bumpalo::Bump) -> RcDoc<'a, ColorSpec> {\n\n let namespace: ast::NamespaceDef = self.clone().into();\n\n namespace.prettify(&arena)\n\n }\n\n}\n", "file_path": "calyx/src/lang/context.rs", "rank": 95, "score": 39877.34577030461 }, { "content": " /// Given the id of a library primitive and a list of values for the params,\n\n /// attempt to resolve a `ParamSignature` into a `Signature`\n\n pub fn resolve(\n\n &self,\n\n id: &ast::Id,\n\n params: &[u64],\n\n ) -> Result<ast::Signature> {\n\n match self.definitions.get(id) {\n\n Some(prim) => {\n\n // zip param ids with passed in params into hashmap\n\n let param_map: HashMap<&ast::Id, u64> = prim\n\n .params\n\n .iter()\n\n .zip(params)\n\n .map(|(id, &width)| (id, width))\n\n .collect();\n\n // resolve inputs\n\n let inputs_res: Result<Vec<ast::Portdef>> = prim\n\n .signature\n\n .inputs()\n", "file_path": "calyx/src/lang/context.rs", "rank": 96, "score": 39876.911063862906 }, { "content": " name.clone(),\n\n Component {\n\n name: name.clone(),\n\n signature: extended_sig,\n\n control,\n\n structure,\n\n resolved_sigs,\n\n },\n\n );\n\n }\n\n\n\n Ok(Context {\n\n debug_mode,\n\n verilator_mode,\n\n force_color,\n\n library_context: libctx,\n\n definitions: RefCell::new(definitions),\n\n definitions_to_insert: RefCell::new(vec![]),\n\n imports: namespace.libraries,\n\n })\n", "file_path": "calyx/src/lang/context.rs", "rank": 97, "score": 39873.685342448975 }, { "content": "/// Helper to construct portdef from str and u64.\n\nimpl From<(&str, u64)> for Portdef {\n\n fn from((name, width): (&str, u64)) -> Self {\n\n Portdef {\n\n name: name.into(),\n\n width,\n\n }\n\n }\n\n}\n\n\n\n/// Statement that refers to a port on a subcomponent.\n\n/// This is distinct from a `Portdef` which defines a port.\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]\n\npub enum Port {\n\n /// Refers to the port named `port` on the subcomponent\n\n /// `component`.\n\n Comp { component: Id, port: Id },\n\n\n\n /// Refers to the port named `port` on the component\n\n /// currently being defined.\n", "file_path": "calyx/src/lang/ast.rs", "rank": 98, "score": 39859.6680970467 }, { "content": " /// Port that connects the conditional check.\n\n pub port: Port,\n\n\n\n /// Modules that need to be enabled to send signal on `port`.\n\n pub cond: Id,\n\n\n\n /// Control for the loop body.\n\n pub body: Box<Control>,\n\n}\n\n\n\n/// Data for the `print` control statement.\n\n#[derive(Debug, Clone, Hash, PartialEq, Eq)]\n\npub struct Print {\n\n /// Name of the port to print.\n\n pub var: Port,\n\n}\n\n\n\n/// Data for the `enable` control statement.\n\n#[derive(Debug, Clone, Hash, PartialEq, Eq)]\n\npub struct Enable {\n", "file_path": "calyx/src/lang/ast.rs", "rank": 99, "score": 39858.9088756272 } ]
Rust
src/online/registry/github_registry.rs
SierraSoftworks/git-tool
961f0d7b2a9dfc58c2346571e09f4cf8b7851c01
use super::*; use crate::errors; use serde::Deserialize; pub struct GitHubRegistry; impl GitHubRegistry { async fn get(&self, core: &Core, url: &str) -> Result<reqwest::Response, errors::Error> { let uri: reqwest::Url = url.parse().map_err(|e| { errors::system_with_internal( &format!("Unable to parse GitHub API URL '{}'.", url), "Please report this error to us by opening a ticket in GitHub.", e, ) })?; #[allow(unused_mut)] let mut req = reqwest::Request::new(reqwest::Method::GET, uri); req.headers_mut().append( "User-Agent", version!("Git-Tool/").parse().map_err(|e| { errors::system_with_internal( &format!( "Unable to parse Git-Tool user agent header {}.", version!("Git-Tool/") ), "Please report this error to us by opening a ticket in GitHub.", e, ) })?, ); #[cfg(test)] { if let Ok(token) = std::env::var("GITHUB_TOKEN") { req.headers_mut().append( "Authorization", format!("token {}", token).parse().map_err(|e| { errors::system_with_internal( "Unable to parse GITHUB_TOKEN authorization header.", "Please report this error to us by opening a ticket in GitHub.", e, ) })?, ); } } core.http_client().request(req).await } } #[async_trait::async_trait] impl Registry for GitHubRegistry { async fn get_entries(&self, core: &Core) -> Result<Vec<String>, Error> { let resp = self.get(core, "https://api.github.com/repos/SierraSoftworks/git-tool/git/trees/main?recursive=true").await?; match resp.status() { http::StatusCode::OK => { let tree: GitHubTree = resp.json().await?; let mut entries: Vec<String> = Vec::new(); let prefix = "registry/"; let suffix = ".yaml"; for node in tree.tree { if node.node_type == "blob" && node.path.starts_with(prefix) && node.path.ends_with(suffix) { let len = node.path.len(); let name: String = node.path[prefix.len()..(len - suffix.len())].into(); entries.push(name); } } Ok(entries) } http::StatusCode::TOO_MANY_REQUESTS | http::StatusCode::FORBIDDEN => { let inner_error = errors::reqwest::ResponseError::with_body(resp).await; Err(errors::user_with_internal( "GitHub has rate limited requests from your IP address.", "Please wait until GitHub removes this rate limit before trying again.", inner_error, )) } status => { let inner_error = errors::reqwest::ResponseError::with_body(resp).await; Err(errors::system_with_internal( &format!("Received an HTTP {} response from GitHub when attempting to list items in the Git-Tool registry.", status), "Please read the error message below and decide if there is something you can do to fix the problem, or report it to us on GitHub.", inner_error)) } } } async fn get_entry(&self, core: &Core, id: &str) -> Result<Entry, Error> { let resp = self .get( core, &format!( "https://raw.githubusercontent.com/SierraSoftworks/git-tool/main/registry/{}.yaml", id ), ) .await?; match resp.status() { http::StatusCode::OK => { let body = resp.bytes().await?; let entity = serde_yaml::from_slice(&body)?; Ok(entity) }, http::StatusCode::NOT_FOUND => { Err(errors::user( &format!("Could not find {} in the Git-Tool registry.", id), "Please make sure that you've selected a configuration entry which exists in the registry. You can check this with `git-tool config list`.")) }, http::StatusCode::TOO_MANY_REQUESTS | http::StatusCode::FORBIDDEN => { let inner_error = errors::reqwest::ResponseError::with_body(resp).await; Err(errors::user_with_internal( "GitHub has rate limited requests from your IP address.", "Please wait until GitHub removes this rate limit before trying again.", inner_error)) }, status => { let inner_error = errors::reqwest::ResponseError::with_body(resp).await; Err(errors::system_with_internal( &format!("Received an HTTP {} response from GitHub when attempting to fetch /registry/{}.yaml.", status, id), "Please read the error message below and decide if there is something you can do to fix the problem, or report it to us on GitHub.", inner_error)) } } } } #[derive(Debug, Deserialize, Clone)] struct GitHubTree { pub tree: Vec<GitHubTreeNode>, pub truncated: bool, } #[derive(Debug, Deserialize, Clone)] struct GitHubTreeNode { #[serde(rename = "type")] pub node_type: String, pub path: String, } #[cfg(test)] mod tests { use super::*; #[tokio::test] async fn get_entries() { let core = Core::builder().build(); let registry = GitHubRegistry; let entries = registry.get_entries(&core).await.unwrap(); assert_ne!(entries.len(), 0); assert!(entries.iter().any(|i| i == "apps/bash")); } #[tokio::test] async fn get_entry() { let core = Core::builder().build(); let registry = GitHubRegistry; let entry = registry.get_entry(&core, "apps/bash").await.unwrap(); assert_eq!(entry.name, "Bash"); } }
use super::*; use crate::errors; use serde::Deserialize; pub struct GitHubRegistry; impl GitHubRegistry { async fn get(&self, core: &Core, url: &str) -> Result<reqwest::Response, errors::Error> { let uri: reqwest::Url = url.parse().map_err(|e| { errors::system_with_internal( &format!("Unable to parse GitHub API URL '{}'.", url), "Please report this error to us by opening a ticket in GitHub.", e, ) })?; #[allow(unused_mut)] let mut req = reqwest::Request::new(reqwest::Method::GET, uri); req.headers_mut().append( "User-Agent", version!("Git-Tool/").parse().map_err(|e| { errors::system_with_internal( &format!( "Unable to parse Git-Tool user agent header {}.", version!("Git-Tool/") ), "Please report this error to us by opening a ticket in GitHub.", e, ) })?, ); #[cfg(test)] { if let Ok(token) = std::env::var("GITHUB_TOKEN") { req.headers_mut().append( "Authorization", format!("token {}", token).parse().map_err(|e| { errors::system_with_internal( "Unable to parse GITHUB_TOKEN authorization header.", "Please report this error to us by opening a ticket in GitHub.", e, ) })?, ); } } core.http_client().request(req).await } } #[async_trait::async_trait] impl Registry for GitHubRegistry { async fn get_entries(&self, core: &Core) -> Result<Vec<String>, Error> { let resp = self.get(core, "https://api.github.com/repos/SierraSoftworks/git-tool/git/trees/main?recursive=true").await?; match resp.status() { http::StatusCode::OK => { let tree: GitHubTree = resp.json().awai
async fn get_entry(&self, core: &Core, id: &str) -> Result<Entry, Error> { let resp = self .get( core, &format!( "https://raw.githubusercontent.com/SierraSoftworks/git-tool/main/registry/{}.yaml", id ), ) .await?; match resp.status() { http::StatusCode::OK => { let body = resp.bytes().await?; let entity = serde_yaml::from_slice(&body)?; Ok(entity) }, http::StatusCode::NOT_FOUND => { Err(errors::user( &format!("Could not find {} in the Git-Tool registry.", id), "Please make sure that you've selected a configuration entry which exists in the registry. You can check this with `git-tool config list`.")) }, http::StatusCode::TOO_MANY_REQUESTS | http::StatusCode::FORBIDDEN => { let inner_error = errors::reqwest::ResponseError::with_body(resp).await; Err(errors::user_with_internal( "GitHub has rate limited requests from your IP address.", "Please wait until GitHub removes this rate limit before trying again.", inner_error)) }, status => { let inner_error = errors::reqwest::ResponseError::with_body(resp).await; Err(errors::system_with_internal( &format!("Received an HTTP {} response from GitHub when attempting to fetch /registry/{}.yaml.", status, id), "Please read the error message below and decide if there is something you can do to fix the problem, or report it to us on GitHub.", inner_error)) } } } } #[derive(Debug, Deserialize, Clone)] struct GitHubTree { pub tree: Vec<GitHubTreeNode>, pub truncated: bool, } #[derive(Debug, Deserialize, Clone)] struct GitHubTreeNode { #[serde(rename = "type")] pub node_type: String, pub path: String, } #[cfg(test)] mod tests { use super::*; #[tokio::test] async fn get_entries() { let core = Core::builder().build(); let registry = GitHubRegistry; let entries = registry.get_entries(&core).await.unwrap(); assert_ne!(entries.len(), 0); assert!(entries.iter().any(|i| i == "apps/bash")); } #[tokio::test] async fn get_entry() { let core = Core::builder().build(); let registry = GitHubRegistry; let entry = registry.get_entry(&core, "apps/bash").await.unwrap(); assert_eq!(entry.name, "Bash"); } }
t?; let mut entries: Vec<String> = Vec::new(); let prefix = "registry/"; let suffix = ".yaml"; for node in tree.tree { if node.node_type == "blob" && node.path.starts_with(prefix) && node.path.ends_with(suffix) { let len = node.path.len(); let name: String = node.path[prefix.len()..(len - suffix.len())].into(); entries.push(name); } } Ok(entries) } http::StatusCode::TOO_MANY_REQUESTS | http::StatusCode::FORBIDDEN => { let inner_error = errors::reqwest::ResponseError::with_body(resp).await; Err(errors::user_with_internal( "GitHub has rate limited requests from your IP address.", "Please wait until GitHub removes this rate limit before trying again.", inner_error, )) } status => { let inner_error = errors::reqwest::ResponseError::with_body(resp).await; Err(errors::system_with_internal( &format!("Received an HTTP {} response from GitHub when attempting to list items in the Git-Tool registry.", status), "Please read the error message below and decide if there is something you can do to fix the problem, or report it to us on GitHub.", inner_error)) } } }
function_block-function_prefixed
[ { "content": "pub fn render(tmpl: &str, context: Value) -> Result<String, errors::Error> {\n\n template(tmpl, context).map_err(|e| errors::user_with_internal(\n\n format!(\"We couldn't render your template '{}'.\", tmpl).as_str(),\n\n \"Check that your template follows the Go template syntax here: https://golang.org/pkg/text/template/\",\n\n errors::detailed_message(&e.to_string())))\n\n}\n\n\n", "file_path": "src/core/templates.rs", "rank": 0, "score": 209170.06460450432 }, { "content": "pub fn matches(value: &str, sequence: &str) -> bool {\n\n if sequence.len() > value.len() {\n\n return false;\n\n }\n\n\n\n let mut seq_iter = sequence.chars().peekable();\n\n for c in value.chars() {\n\n match seq_iter.peek() {\n\n Some(sc) => {\n\n if c.to_lowercase().eq(sc.to_lowercase()) {\n\n seq_iter.next();\n\n }\n\n }\n\n None => break,\n\n }\n\n }\n\n\n\n match seq_iter.peek() {\n\n Some(_) => false,\n\n None => true,\n", "file_path": "src/search/v1.rs", "rank": 1, "score": 188571.41053305293 }, { "content": "pub fn render_list<S: AsRef<str>>(\n\n items: Vec<S>,\n\n context: Value,\n\n) -> Result<Vec<String>, errors::Error> {\n\n let mut out = Vec::new();\n\n out.reserve(items.len());\n\n\n\n for item in items {\n\n let rendered = render(item.as_ref(), context.clone())?;\n\n out.push(rendered);\n\n }\n\n\n\n Ok(out)\n\n}\n\n\n", "file_path": "src/core/templates.rs", "rank": 2, "score": 179568.3953175831 }, { "content": "#[cfg(test)]\n\npub fn mock(data: &str) {\n\n let reader = mocks::MockInputReader::from(data);\n\n input.mock_safe(move || MockResult::Return(Box::new(reader.clone())))\n\n}\n\n\n\n#[cfg(test)]\n\npub mod mocks {\n\n use super::*;\n\n use std::sync::{Arc, RwLock};\n\n\n\n #[derive(Clone)]\n\n pub struct MockInputReader {\n\n readable_data: Arc<RwLock<String>>,\n\n position: Arc<RwLock<usize>>,\n\n }\n\n\n\n impl MockInputReader {\n\n fn read_n(&mut self, n: usize) -> std::io::Result<String> {\n\n self.readable_data\n\n .read()\n", "file_path": "src/console/input.rs", "rank": 5, "score": 156891.3030197139 }, { "content": "pub fn best_matches<T>(sequence: &str, values: T) -> Vec<T::Item>\n\nwhere\n\n T: IntoIterator,\n\n T::Item: AsRef<str> + Clone,\n\n{\n\n let matcher = SequenceMatcher::new(sequence);\n\n matcher.order_by(values, |v| v.to_owned())\n\n}\n\n\n", "file_path": "src/search/v2.rs", "rank": 6, "score": 151878.0595422309 }, { "content": "fn translate_os_name(name: &str) -> &str {\n\n match name {\n\n \"macos\" => \"darwin\",\n\n _ => name,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn is_compatible() {\n\n assert_eq!(\n\n EntryConfig {\n\n platform: \"any\".to_string(),\n\n ..Default::default()\n\n }\n\n .is_compatible(),\n\n true\n", "file_path": "src/online/registry/mod.rs", "rank": 7, "score": 147921.9953846424 }, { "content": "pub fn best_matches_by<'a, T, F, K>(sequence: &str, values: T, to_key: F) -> Vec<T::Item>\n\nwhere\n\n T: IntoIterator,\n\n T::Item: Clone + 'a,\n\n F: Fn(&T::Item) -> K,\n\n K: AsRef<str>,\n\n{\n\n let matcher = SequenceMatcher::new(sequence);\n\n matcher.order_by(values, to_key)\n\n}\n\n\n", "file_path": "src/search/v2.rs", "rank": 8, "score": 133916.87428483152 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct UserProfile {\n\n pub login: String,\n\n}\n\n\n", "file_path": "src/online/service/github.rs", "rank": 9, "score": 132639.64171558307 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct GitHubError {\n\n pub message: String,\n\n\n\n #[serde(default)]\n\n pub resource: String,\n\n\n\n #[serde(default)]\n\n pub code: String,\n\n\n\n #[serde(default)]\n\n pub field: String,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use mocktopus::mocking::*;\n\n\n\n #[tokio::test]\n\n async fn test_happy_path_user_repo() {\n", "file_path": "src/online/service/github.rs", "rank": 10, "score": 128995.73001204032 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct GitHubErrorResponse {\n\n #[serde(skip)]\n\n pub http_status_code: StatusCode,\n\n\n\n pub message: String,\n\n pub documentation_url: String,\n\n #[serde(default)]\n\n pub errors: Vec<GitHubError>,\n\n}\n\n\n\nimpl Into<errors::Error> for GitHubErrorResponse {\n\n fn into(self) -> errors::Error {\n\n match self.http_status_code {\n\n http::StatusCode::UNAUTHORIZED => {\n\n errors::user(\n\n \"You have not provided a valid authentication token for github.com.\",\n\n \"Please generate a valid Personal Access Token at https://github.com/settings/tokens (with the `repo` scope) and add it using `git-tool auth github.com`.\")\n\n },\n\n http::StatusCode::FORBIDDEN => {\n\n errors::user_with_internal(\n", "file_path": "src/online/service/github.rs", "rank": 11, "score": 125702.70608845903 }, { "content": "fn translate_platform(platform: &str) -> &str {\n\n match platform {\n\n \"macos\" => \"darwin\",\n\n x => x,\n\n }\n\n}\n\n\n", "file_path": "src/update/release.rs", "rank": 12, "score": 119628.37673528846 }, { "content": "fn translate_arch(arch: &str) -> &str {\n\n match arch {\n\n \"x86_64\" => \"amd64\",\n\n \"i686\" => \"386\",\n\n \"aarch64\" => \"arm64\",\n\n x => x,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_get_latest() {\n\n assert_eq!(Release::get_latest(vec![]), None);\n\n\n\n let releases = vec![\n\n Release {\n\n id: \"1\".to_string(),\n", "file_path": "src/update/release.rs", "rank": 13, "score": 119628.37673528846 }, { "content": "pub fn repo_context<'a>(config: &'a Config, repo: &'a Repo) -> Value {\n\n match config.get_service(&repo.get_domain()) {\n\n Some(service) => RepoWithService { repo, service }.into(),\n\n None => repo.into(),\n\n }\n\n}\n\n\n", "file_path": "src/core/templates.rs", "rank": 14, "score": 115854.520400287 }, { "content": "pub fn is_enabled() -> bool {\n\n ENABLED.read().map(|v| *v).unwrap_or_default()\n\n}\n\n\n", "file_path": "src/telemetry/mod.rs", "rank": 15, "score": 107215.33555576034 }, { "content": "pub fn get_launch_app<'a>(\n\n core: &'a Core,\n\n first: Option<&'a str>,\n\n second: Option<&'a str>,\n\n) -> LaunchTarget<'a> {\n\n match (first, second) {\n\n (Some(first), Some(second)) => {\n\n if let Some(app) = core.config().get_app(first) {\n\n LaunchTarget::AppAndTarget(app, second)\n\n } else if let Some(app) = core.config().get_app(second) {\n\n LaunchTarget::AppAndTarget(app, first)\n\n } else {\n\n LaunchTarget::Err(errors::user(\n\n format!(\"Could not find application with name '{}'.\", first).as_str(),\n\n format!(\"Make sure that you are using an application which is present in your configuration file, or install it with 'git-tool config add apps/{}'.\", first).as_str()))\n\n }\n\n }\n\n (Some(first), None) => {\n\n if let Some(app) = core.config().get_app(first) {\n\n LaunchTarget::App(app)\n", "file_path": "src/commands/helpers.rs", "rank": 16, "score": 104866.65654055579 }, { "content": "#[cfg_attr(test, mockable)]\n\nfn get_child_directories(from: &std::path::PathBuf, pattern: &str) -> Vec<std::path::PathBuf> {\n\n let depth = pattern.split(\"/\").count();\n\n\n\n get_directory_tree_to_depth(from, depth)\n\n}\n\n\n", "file_path": "src/core/resolver.rs", "rank": 17, "score": 103044.92379972339 }, { "content": "#[cfg(test)]\n\nfn score<T: AsRef<str>>(value: T, sequence: &str) -> Option<f32> {\n\n let matcher = SequenceMatcher::new(sequence);\n\n matcher.score(value)\n\n}\n\n\n", "file_path": "src/search/v2.rs", "rank": 18, "score": 101249.54711745655 }, { "content": "pub fn get_shells() -> Vec<Shell> {\n\n vec![\n\n Shell {\n\n name: \"powershell\",\n\n short_init: format!(\n\n r#\"Invoke-Expression (@(&\"{app}\" shell-init powershell --full) -join \"`n\")\"#,\n\n app = args().next().unwrap_or(\"git-tool\".to_string())\n\n ),\n\n long_init: format!(\n\n r#\"\n\nRegister-ArgumentCompleter -CommandName gt, git-tool, git-tool.exe -ScriptBlock {{\n\nparam([string]$commandName, [string]$wordToComplete, [int]$cursorPosition)\n\n\n\n&\"{app}\" complete --position $cursorPosition \"$wordToComplete\" | ForEach-Object {{\n\n [System.Management.Automation.CompletionResult]::new($_, $_, 'ParameterValue', $_)\n\n}}\n\n}} -Native\n\n \"#,\n\n app = args().next().unwrap_or(\"git-tool\".to_string())\n\n ),\n", "file_path": "src/completion/setup.rs", "rank": 19, "score": 100399.42219658528 }, { "content": "#[cfg(test)]\n\npub fn mock() -> mocks::MockOutput {\n\n let writer = mocks::MockOutput::default();\n\n let sacrificial_writer = writer.clone();\n\n output.mock_safe(move || MockResult::Return(Box::new(sacrificial_writer.clone())));\n\n writer\n\n}\n\n\n\n#[cfg(test)]\n\n#[allow(dead_code)]\n\npub mod mocks {\n\n use super::*;\n\n use std::{\n\n io::ErrorKind,\n\n sync::{Arc, Mutex},\n\n };\n\n\n\n #[derive(Clone)]\n\n pub struct MockOutput {\n\n written_data: Arc<Mutex<String>>,\n\n }\n", "file_path": "src/console/output.rs", "rank": 20, "score": 100399.42219658528 }, { "content": "pub fn set_enabled(enable: bool) {\n\n ENABLED.write().map(|mut v| *v = enable).unwrap_or_default()\n\n}\n\npub struct Session {\n\n raven: ClientInitGuard,\n\n}\n\n\n\nimpl Session {\n\n pub fn new() -> Self {\n\n let logger = sentry::integrations::log::SentryLogger::new();\n\n log::set_boxed_logger(Box::new(logger)).unwrap();\n\n log::set_max_level(log::LevelFilter::Debug);\n\n\n\n let raven = sentry::init((\n\n \"https://[email protected]/1486938\",\n\n sentry::ClientOptions {\n\n release: Some(version!(\"git-tool@v\").into()),\n\n default_integrations: true,\n\n attach_stacktrace: true,\n\n before_send: Some(Arc::new(|mut event| {\n", "file_path": "src/telemetry/mod.rs", "rank": 21, "score": 100399.42219658528 }, { "content": "struct RepoWithService<'a> {\n\n repo: &'a Repo,\n\n service: &'a Service,\n\n}\n\n\n\nimpl<'a> std::convert::Into<Value> for RepoWithService<'a> {\n\n fn into(self) -> Value {\n\n let service: Value = self.service.into();\n\n\n\n Value::Object(map! {\n\n \"Target\" => Value::Object(map!{\n\n \"Name\" => Value::String(self.repo.get_full_name()),\n\n \"Path\" => Value::String(String::from(self.repo.get_path().to_str().unwrap_or_default())),\n\n \"Exists\" => Value::Bool(self.repo.exists())\n\n }),\n\n \"Repo\" => Value::Object(map!{\n\n \"FullName\" => Value::String(self.repo.get_full_name()),\n\n \"Name\" => Value::String(self.repo.get_name()),\n\n \"Namespace\" => Value::String(self.repo.get_namespace()),\n\n \"Domain\" => Value::String(self.repo.get_domain()),\n", "file_path": "src/core/templates.rs", "rank": 22, "score": 96289.1009868557 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct NewRepo {\n\n pub name: String,\n\n pub private: bool,\n\n}\n\n\n", "file_path": "src/online/service/github.rs", "rank": 23, "score": 95971.36297309707 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct GitHubAsset {\n\n pub name: String,\n\n}\n\n\n\n#[cfg(test)]\n\npub mod mocks {\n\n use crate::core::HttpClient;\n\n\n\n pub fn mock_get_releases() {\n\n HttpClient::mock(vec![\n\n HttpClient::route(\n\n \"GET\",\n\n \"https://api.github.com/repos/SierraSoftworks/git-tool/releases\",\n\n 200,\n\n r#\"[\n\n {\n\n \"name\": \"Version 2.0.0\",\n\n \"tag_name\":\"v2.0.0\",\n\n \"body\": \"Example Release\",\n\n \"prerelease\": false,\n", "file_path": "src/update/github.rs", "rank": 24, "score": 95971.36297309707 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct GitHubRelease {\n\n pub name: String,\n\n pub tag_name: String,\n\n pub body: String,\n\n pub prerelease: bool,\n\n pub assets: Vec<GitHubAsset>,\n\n}\n\n\n", "file_path": "src/update/github.rs", "rank": 25, "score": 95971.36297309707 }, { "content": "#[async_trait::async_trait]\n\npub trait Registry: Send + Sync {\n\n async fn get_entries(&self, core: &Core) -> Result<Vec<String>, Error>;\n\n async fn get_entry(&self, core: &Core, id: &str) -> Result<Entry, Error>;\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone, Default)]\n\npub struct Entry {\n\n pub name: String,\n\n pub description: String,\n\n #[serde(default)]\n\n pub configs: Vec<EntryConfig>,\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Clone, Default)]\n\npub struct EntryConfig {\n\n pub platform: String,\n\n #[serde(default)]\n\n pub app: Option<EntryApp>,\n\n #[serde(default)]\n\n pub service: Option<EntryService>,\n", "file_path": "src/online/registry/mod.rs", "rank": 26, "score": 95118.58516765252 }, { "content": "#[cfg_attr(test, mockable)]\n\npub fn input() -> Box<dyn Read + Send> {\n\n Box::new(stdin())\n\n}\n\n\n", "file_path": "src/console/input.rs", "rank": 27, "score": 94591.9424029027 }, { "content": "#[cfg_attr(test, mockable)]\n\npub fn output() -> Box<dyn Write + Send> {\n\n Box::new(stdout())\n\n}\n\n\n", "file_path": "src/console/output.rs", "rank": 28, "score": 94591.9424029027 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct NewRepoResponse {\n\n pub id: u64,\n\n}\n\n\n", "file_path": "src/online/service/github.rs", "rank": 29, "score": 93636.82481559122 }, { "content": "fn has_whitespace<T: AsRef<str>>(value: T) -> bool {\n\n value\n\n .as_ref()\n\n .split_ascii_whitespace()\n\n .skip(1)\n\n .next()\n\n .is_some()\n\n}\n", "file_path": "src/completion/completer.rs", "rank": 30, "score": 93291.46358944691 }, { "content": "pub trait Target {\n\n fn get_name(&self) -> String;\n\n fn get_path(&self) -> std::path::PathBuf;\n\n fn exists(&self) -> bool;\n\n fn template_context(&self, config: &Config) -> Value;\n\n}\n", "file_path": "src/core/target.rs", "rank": 31, "score": 93267.7988830056 }, { "content": "pub fn commands() -> Vec<Arc<dyn CommandRunnable>> {\n\n vec![\n\n Arc::new(apps::AppsCommand {}),\n\n #[cfg(feature = \"auth\")]\n\n Arc::new(auth::AuthCommand {}),\n\n Arc::new(clone::CloneCommand {}),\n\n Arc::new(complete::CompleteCommand {}),\n\n Arc::new(config::ConfigCommand {}),\n\n Arc::new(fix::FixCommand {}),\n\n Arc::new(info::InfoCommand {}),\n\n Arc::new(ignore::IgnoreCommand {}),\n\n Arc::new(list::ListCommand {}),\n\n Arc::new(new::NewCommand {}),\n\n Arc::new(open::OpenCommand {}),\n\n Arc::new(scratch::ScratchCommand {}),\n\n Arc::new(services::ServicesCommand {}),\n\n Arc::new(setup::SetupCommand {}),\n\n Arc::new(shell_init::ShellInitCommand {}),\n\n Arc::new(update::UpdateCommand {}),\n\n Arc::new(switch::SwitchCommand {}),\n\n ]\n\n}\n", "file_path": "src/commands/mod.rs", "rank": 32, "score": 92644.95825243903 }, { "content": "pub fn get_dev_dir() -> std::path::PathBuf {\n\n get_repo_root().join(\"test\").join(\"devdir\")\n\n}\n\n\n", "file_path": "src/test/dirs.rs", "rank": 33, "score": 92562.9909277783 }, { "content": "pub fn get_repo_root() -> std::path::PathBuf {\n\n std::path::PathBuf::from(file!())\n\n .canonicalize()\n\n .unwrap()\n\n .parent()\n\n .and_then(|f| f.parent())\n\n .and_then(|f| f.parent())\n\n .unwrap()\n\n .to_path_buf()\n\n}\n\n\n", "file_path": "src/test/dirs.rs", "rank": 34, "score": 92562.9909277783 }, { "content": "pub fn default_commands() -> Vec<Arc<dyn CommandRunnable>> {\n\n commands()\n\n}\n\n\n", "file_path": "src/commands/mod.rs", "rank": 35, "score": 90809.16973337886 }, { "content": "#[allow(dead_code)]\n\npub fn services() -> Vec<Arc<dyn OnlineService>> {\n\n vec![Arc::new(github::GitHubService::default())]\n\n}\n", "file_path": "src/online/service/mod.rs", "rank": 36, "score": 90809.16973337886 }, { "content": "#[cfg_attr(test, mockable)]\n\nfn repo_from_relative_path<'a>(\n\n config: &'a Config,\n\n relative_path: &std::path::PathBuf,\n\n fallback_to_default: bool,\n\n) -> Result<Repo, Error> {\n\n if !relative_path.is_relative() {\n\n return Err(errors::system(\n\n &format!(\"The path '{}' used to resolve a repo was not relative.\", relative_path.display()),\n\n \"Please report this issue to us on GitHub, including the command you ran, so that we can troubleshoot the problem.\"));\n\n }\n\n\n\n let svc = service_from_relative_path(config, relative_path)?;\n\n let name_length = svc.get_pattern().split_terminator(\"/\").count() + 1;\n\n let mut name_parts: Vec<String> = relative_path\n\n .components()\n\n .take(name_length)\n\n .map(|c| c.as_os_str().to_str().unwrap().to_string())\n\n .collect();\n\n\n\n let mut true_path = relative_path.to_path_buf();\n", "file_path": "src/core/resolver.rs", "rank": 37, "score": 89970.20596615729 }, { "content": "#[cfg_attr(test, mockable)]\n\nfn service_from_relative_path<'a>(\n\n config: &'a Config,\n\n relative_path: &std::path::PathBuf,\n\n) -> Result<&'a Service, Error> {\n\n if !relative_path.is_relative() {\n\n return Err(errors::system(\n\n &format!(\"The path '{}' used to resolve a repo was not relative.\", relative_path.display()),\n\n \"Please report this issue to us on GitHub, including the command you ran, so that we can troubleshoot the problem.\"));\n\n }\n\n\n\n let mut components = relative_path.components();\n\n match components.next() {\n\n Some(std::path::Component::Normal(name)) => {\n\n match config.get_service(name.to_str().unwrap()) {\n\n Some(svc) => Ok(svc),\n\n None => config.get_default_service().ok_or(errors::user(\n\n \"No services configured for use with Git Tool.\",\n\n \"Make sure that you have registered a service in your git-tool config using `git-tool config add services/NAME`.\"\n\n ))\n\n }\n\n },\n\n _ => Err(errors::user(\n\n &format!(\"The path '{}' used to resolve a repo did not start with a service domain name.\", relative_path.display()),\n\n \"Make sure that your repository starts with the name of a service, such as 'github.com/sierrasoftworks/git-tool'.\"))\n\n }\n\n}\n\n\n", "file_path": "src/core/resolver.rs", "rank": 38, "score": 89970.20596615729 }, { "content": "#[cfg_attr(test, mockable)]\n\nfn get_directory_tree_to_depth(from: &std::path::PathBuf, depth: usize) -> Vec<std::path::PathBuf> {\n\n if depth == 0 {\n\n return vec![from.clone()];\n\n }\n\n\n\n from.read_dir()\n\n .map(|dirs| {\n\n dirs.map(|dir| match dir {\n\n Ok(d) => match d.file_type() {\n\n Ok(ft) => {\n\n if ft.is_dir() {\n\n Some(d.path())\n\n } else {\n\n None\n\n }\n\n }\n\n Err(_) => None,\n\n },\n\n Err(_) => None,\n\n })\n", "file_path": "src/core/resolver.rs", "rank": 39, "score": 88281.45885050498 }, { "content": "#[async_trait::async_trait]\n\npub trait Source: Default + Send + Sync {\n\n async fn get_releases(&self, core: &Core) -> Result<Vec<Release>, errors::Error>;\n\n async fn get_binary<W: io::Write + Send>(\n\n &self,\n\n core: &Core,\n\n release: &Release,\n\n variant: &ReleaseVariant,\n\n into: &mut W,\n\n ) -> Result<(), errors::Error>;\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, Eq, PartialEq)]\n\npub enum UpdatePhase {\n\n #[serde(rename = \"no-update\")]\n\n NoUpdate,\n\n #[serde(rename = \"prepare\")]\n\n Prepare,\n\n #[serde(rename = \"replace\")]\n\n Replace,\n\n #[serde(rename = \"cleanup\")]\n", "file_path": "src/update/api.rs", "rank": 47, "score": 83867.40232984249 }, { "content": "pub fn to_native_path<T: Into<PathBuf>>(path: T) -> std::path::PathBuf {\n\n let mut output = std::path::PathBuf::new();\n\n let input: PathBuf = path.into();\n\n\n\n output.extend(input.components().flat_map(|c| {\n\n match c {\n\n std::path::Component::Normal(n) => n\n\n .to_str()\n\n .unwrap()\n\n .split(\"/\")\n\n .map(|p| std::path::Component::Normal(p.as_ref()))\n\n .collect(),\n\n _ => vec![c],\n\n }\n\n }));\n\n\n\n output\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/fs/mod.rs", "rank": 48, "score": 79048.68055043212 }, { "content": "struct SequenceMatcher<'a> {\n\n pattern: &'a str,\n\n}\n\n\n\nimpl<'a> SequenceMatcher<'a> {\n\n pub fn new(pattern: &'a str) -> Self {\n\n Self { pattern }\n\n }\n\n\n\n pub fn order_by<'b, T, F, K>(&self, values: T, to_key: F) -> Vec<T::Item>\n\n where\n\n T: IntoIterator,\n\n T::Item: Clone + 'b,\n\n F: Fn(&T::Item) -> K,\n\n K: AsRef<str>,\n\n {\n\n if self.pattern.is_empty() {\n\n return values.into_iter().collect();\n\n }\n\n\n", "file_path": "src/search/v2.rs", "rank": 49, "score": 59599.16911392681 }, { "content": "struct GitIgnoreFileSection {\n\n prologue: String,\n\n languages: Vec<String>,\n\n content: String,\n\n}\n\n\n\nimpl std::convert::Into<String> for GitIgnoreFileSection {\n\n fn into(self) -> String {\n\n if self.languages.is_empty() {\n\n return self.prologue;\n\n }\n\n\n\n format!(\"{}\\n## -------- Managed by Git Tool -------- ##\\n## Add any custom rules above this block ##\\n## ------------------------------------- ##\\n## @languages: {}\\n{}\", self.prologue, self.languages.join(\",\"), self.content)\n\n }\n\n}\n\n\n\nimpl GitIgnoreFileSection {\n\n fn parse(input: &str) -> Option<GitIgnoreFileSection> {\n\n let mut has_section = false;\n\n let mut in_header = true;\n", "file_path": "src/online/gitignore.rs", "rank": 50, "score": 58948.68158284406 }, { "content": "#[async_trait]\n\npub trait Task {\n\n async fn apply_repo(&self, core: &Core, repo: &core::Repo) -> Result<(), core::Error>;\n\n async fn apply_scratchpad(\n\n &self,\n\n core: &Core,\n\n scratch: &core::Scratchpad,\n\n ) -> Result<(), core::Error>;\n\n}\n\n\n\n#[cfg(test)]\n\npub struct TestTask {\n\n ran_repo: Mutex<Option<core::Repo>>,\n\n ran_scratchpad: Mutex<Option<core::Scratchpad>>,\n\n error: bool,\n\n}\n\n\n\n#[cfg(test)]\n\nimpl Default for TestTask {\n\n fn default() -> Self {\n\n Self {\n", "file_path": "src/tasks/mod.rs", "rank": 51, "score": 56582.451520732226 }, { "content": "#[test]\n\nfn test_dev_dir() {\n\n assert!(get_dev_dir().exists());\n\n assert!(get_dev_dir().join(\"github.com\").exists());\n\n}\n", "file_path": "src/test/dirs.rs", "rank": 52, "score": 55988.97976899029 }, { "content": "### Registry\n\n\n\nGit-Tool's registry is simply a folder hosted on [GitHub](https://github.com/SierraSoftworks/git-tool/tree/main/registry). Specifically, it's the `registry` folder in the Git-Tool repo. Within this folder are folders for `apps` and `services` to help keep things organized.\n\n\n\nFor those who prefer a visual representation of what the registry looks like.\n\n\n\n```text\n\n.\n\n└── registry/\n\n ├── apps/\n\n │ ├── bash.yaml\n\n→ │ └── your-app.yaml\n\n └── services/\n\n ├── github.yaml\n\n→ └── your-service.yaml\n\n```\n\n\n\n### Example Template\n\n\n\nHere is an example of what a registry template might look like and you are welcome to use it as the basis for your own. Keep reading for more information on what each field does and how to use them \\(or just wing it, if you're already familiar with how Git-Tool's [apps](apps.md) and [services](services.md) are defined\\).\n\n\n\n{% hint style=\"warning\" %}\n\nWe usually avoid bundling apps and services into a single file, but if you've got a compelling reason to do so - then we can certainly make an exception. _The example below includes both apps and services to show how to use them, not because it's a good idea._\n\n{% endhint %}\n\n\n\n```yaml\n", "file_path": "docs/config/registry.md", "rank": 53, "score": 54156.3612180805 }, { "content": "#### `description`\n\n\n\nThe description is used to explain to humans what your template will add and why that might be of use to them. If possible, use [plain English](https://en.wikipedia.org/wiki/Plain_English) and assume that the reader will not be familiar with the tool or service you are adding.\n\n\n\n```yaml\n\ndescription: |\n\n Launches the PowerShell Core shell as your current user.\n\n PowerShell Core must be installed on your platform before use.\n\n\n\n https://github.com/PowerShell/PowerShell/releases\n\n```\n\n\n\n#### `version`\n\n\n\nThe version is used to show humans when you have updated this template and it should follow [SemVer](https://semver.org) conventions. Currently Git-Tool doesn't keep track of this field, but in future we may add support for updating the items you have installed from the registry using this field.\n\n\n\n```yaml\n\nversion: 1.0.0\n\n```\n\n\n\n#### `configs`\n\n\n\nThis is where the heart of the template fits in. The `configs` field is a list \\(array\\) of config templates which Git-Tool will apply to your [config](overview.md) file. These templates can either be for an [app](apps.md) or a [service](services.md) and require that you specify the `platform` that they support. Keep reading for details on what fields you use within each config.\n\n\n\n```yaml\n\nconfigs:\n\n - platform: any\n\n app:\n\n name: shell\n\n command: pwsh\n\n```\n\n\n\n**configs.\\*.platform**\n\n\n\nWhen describing a config template, you need to provide the platform that it supports. This allows Git-Tool to apply the right template in situations where different platforms require different configuration.\n\n\n\nThe list of supported platform types includes:\n\n\n\n* `any` will apply to any platform.\n\n* `windows` will only apply to Windows platforms.\n\n* `linux` will only apply to Linux platforms.\n\n* `darwin` will only apply to Mac OS X platforms.\n\n\n\n```yaml\n\nconfigs:\n\n - platform: linux\n\n```\n\n\n", "file_path": "docs/config/registry.md", "rank": 54, "score": 54153.874161103464 }, { "content": "# yaml-language-server: $schema=https://schemas.sierrasoftworks.com/git-tool/v1/template.schema.json\n\nname: Demo\n\ndescription: This is an example of how to create a config template\n\nversion: 1.0.0\n\nconfigs:\n\n # Your config should include either a service (like this)...\n\n - platform: any\n\n service:\n\n domain: github.com\n\n website: \"https://{{ .Service.Domain }}/{{ .Repo.FullName }}\"\n\n httpUrl: \"https://{{ .Service.Domain }}/{{ .Repo.FullName }}.git\"\n\n gitUrl: \"git@{{ .Service.Domain }}:{{ .Repo.FullName }}.git\"\n\n pattern: \"*/*\"\n\n\n\n # Or an app (like this) but usually not both.\n\n - platform: windows\n\n app:\n\n name: shell\n\n command: powershell.exe\n\n\n\n # You can also add platform specific versions of each app\n\n - platform: linux\n\n app:\n\n name: shell\n\n command: pwsh\n\n```\n\n\n\n### Template Structure\n\n\n\nRegistry templates are `yaml` files \\(with the `.yaml` extension\\) which Git-Tool will use to update your local config. They have a bit of metadata to explain to humans what they do, but the most important part is the list of `configs` which tell Git-Tool how to modify your local [config](overview.md) file.\n\n\n\n{% hint style=\"success\" %}\n\nWe publish a [JSONSchema](https://json-schema.org) schema for Git-Tool templates which your editor can use to give you autocomplete and automatic validation. To include it, just add the following to the top of your template.\n\n\n\n```yaml\n\n# yaml-language-server: $schema=https://schemas.sierrasoftworks.com/git-tool/v1/template.schema.json\n\n```\n\n{% endhint %}\n\n\n\n#### `name`\n\n\n\nThis is the human readable name you wish to give to this template. It doesn't need to match the names given to [apps](apps.md) or [services](services.md) contained within, but it usually should be pretty close.\n\n\n\n```yaml\n\nname: PowerShell Core\n\n```\n\n\n", "file_path": "docs/config/registry.md", "rank": 55, "score": 54153.83702149738 }, { "content": "---\n\ndescription: >-\n\n Quickly configure Git-Tool by installing configuration snippets maintained by\n\n the community.\n\n---\n\n\n\n# Registry\n\n\n\nWriting your own [service](services.md) and [application](apps.md) entries can sometimes be a bit more work than we're feeling like and since Git-Tool is meant to make your life easier, not harder, we figured it would be a good idea to try and simplify this part for you as well.\n\n\n\nTo solve the problem, we added a central registry of config templates which you can search through and install with the [`gt config add`](../commands/config.md#config-add) command. This makes the process of setting up your Git-Tool config as easy as doing the following:\n\n\n\n```bash\n\n# Get a list of all the apps and services available to me\n\ngt config list\n\n\n\n# Add my favourite apps\n\ngt config add apps/powershell\n\ngt config add apps/powershell-admin\n\ngt config add apps/vscode\n\ngt config add apps/visualstudio\n\n\n\n# Add the services I use\n\ngt config add services/github\n\ngt config add services/azure-devops\n\n```\n\n\n\n## Browse\n\n\n\nTo get the latest list of apps and services in the registry, you can always use [`gt config list`](../commands/config.md#config-list) straight from your command line. You can also [browse the templates on GitHub](https://github.com/SierraSoftworks/git-tool/tree/main/registry).\n\n\n\n## Contributing\n\n\n\nThanks for choosing to contribute to the Git-Tool community :heart:! We'd like to make this as easy as possible, so keep reading for We're so happy that you're considering contributing an app or service config to Git-Tool's registry. In theory, all you need to do is write a `yaml` file and submit a PR to our \\[GitHub repo\\]\\[git-tool\\] to get it added to the `registry` folder. We have some automated tests in place which should help ensure that what you are submitting is valid and those run locally as part of the standard Git-Tool test suite \\(run with `cargo test` if you have Rust installed\\).\n\n\n", "file_path": "docs/config/registry.md", "rank": 56, "score": 54152.41221125572 }, { "content": "### Creating a PR\n\n\n\nThe easiest way to get started adding a new [app](apps.md) or [service](services.md) to the registry is by using the GitHub web editor to create your template and submit a PR.\n\n\n\n* [Add a new service →](https://github.com/SierraSoftworks/git-tool/new/main/registry/services)\n\n* [Add a new app →](https://github.com/SierraSoftworks/git-tool/new/main/registry/apps)\n\n\n\nFill in the name of your app or service \\(this is the name people will use to install it, so keep it short but descriptive\\) and add your template. Once you're done, create a new PR for your change and we'll get to reviewing it for you!\n\n\n\n#### Automated Testing\n\n\n\nOur automated test suite on GitHub will check your PR to make sure that your template can be loaded by Git-Tool correctly and will warn you if there are any problems.\n\n\n\nYou can also run this same test suite locally if you have `rust` installed on your machine by cloning the Git-Tool repo and running `cargo test` in it. If you already have Git-Tool set up, this is as easy as:\n\n\n\n```bash\n\n# Replace this with your Git-Tool fork, if you have one\n\ngt o github.com/SierraSoftworks/git-tool\n\ncargo test\n\n```\n\n\n", "file_path": "docs/config/registry.md", "rank": 57, "score": 54152.18134671795 }, { "content": "**configs.\\*.app**\n\n\n\nWhen creating a config template which adds an [app](apps.md), you will use the `app` field to provide an application definition as you would in your normal [config file](apps.md). All of the normal [app](apps.md) fields are supported.\n\n\n\n::: warning If you specify the `app` field, you will not be able to provide the `service` field in the same entry. Add a new item to the `configs` array if you need to do this. :::\n\n\n\n```yaml\n\nconfigs:\n\n - app:\n\n name: shell\n\n command: bash\n\n args: [] # Optional\n\n environment: [] # Optional\n\n```\n\n\n\n**configs.\\*.service**\n\n\n\nWhen creating a config template which adds a [service](services.md), you will use the `service` field to provide a service definition as you would in your normal [config file](services.md). All of the normal [service](services.md) fields are supported.\n\n\n\n{% hint style=\"warning\" %}\n\nIf you specify the `app` field, you will not be able to provide the `service` field in the same entry. Add a new item to the `configs` array if you need to do this.\n\n{% endhint %}\n\n\n\n```yaml\n\nconfigs:\n\n - service:\n\n domain: github.com\n\n website: \"https://{{ .Service.Domain }}/{{ .Repo.FullName }}\"\n\n httpUrl: \"https://{{ .Service.Domain }}/{{ .Repo.FullName }}.git\"\n\n gitUrl: \"git@{{ .Service.Domain }}:{{ .Repo.FullName }}.git\"\n\n pattern: \"*/*\"\n\n```\n\n\n", "file_path": "docs/config/registry.md", "rank": 58, "score": 54149.91926429745 }, { "content": "#[async_trait]\n\npub trait CommandRunnable: Command {\n\n async fn run(&self, core: &Core, matches: &ArgMatches) -> Result<i32, errors::Error>;\n\n async fn complete(&self, core: &Core, completer: &Completer, matches: &ArgMatches);\n\n}\n\n\n", "file_path": "src/commands/mod.rs", "rank": 59, "score": 52819.318322148 }, { "content": "pub trait Command: Send + Sync {\n\n fn name(&self) -> String;\n\n fn app<'a>(&self) -> App<'a>;\n\n}\n\n\n", "file_path": "src/commands/mod.rs", "rank": 60, "score": 51657.80853828366 }, { "content": "#[async_trait]\n\npub trait OnlineService: Send + Sync {\n\n fn handles(&self, service: &Service) -> bool;\n\n async fn ensure_created(&self, core: &Core, repo: &Repo) -> Result<(), Error>;\n\n}\n\n\n", "file_path": "src/online/service/mod.rs", "rank": 61, "score": 49526.32706311605 }, { "content": "use super::{Config, Error, HttpClient, KeyChain, Launcher, Resolver};\n\nuse std::{io::Write, sync::Arc};\n\n\n\n#[cfg(test)]\n\nuse mocktopus::macros::*;\n\n\n\n#[cfg_attr(test, mockable)]\n\npub struct Core {\n\n config: Arc<Config>,\n\n launcher: Arc<Launcher>,\n\n resolver: Arc<Resolver>,\n\n keychain: Arc<KeyChain>,\n\n http_client: Arc<HttpClient>,\n\n}\n\n\n\n#[cfg_attr(test, mockable)]\n\nimpl Core {\n\n pub fn builder() -> CoreBuilder {\n\n let config = Arc::new(Config::default());\n\n CoreBuilder { config }\n", "file_path": "src/core/core.rs", "rank": 62, "score": 48465.83043683965 }, { "content": " }\n\n\n\n pub fn http_client(&self) -> &HttpClient {\n\n &self.http_client\n\n }\n\n}\n\n\n\npub struct CoreBuilder {\n\n config: Arc<Config>,\n\n}\n\n\n\nimpl std::convert::Into<Core> for CoreBuilder {\n\n fn into(self) -> Core {\n\n self.build()\n\n }\n\n}\n\n\n\nimpl CoreBuilder {\n\n pub fn build(self) -> Core {\n\n Core {\n", "file_path": "src/core/core.rs", "rank": 63, "score": 48458.701998607336 }, { "content": " config: self.config.clone(),\n\n launcher: Arc::new(Launcher::from(self.config.clone())),\n\n resolver: Arc::new(Resolver::from(self.config.clone())),\n\n keychain: Arc::new(KeyChain::from(self.config.clone())),\n\n http_client: Arc::new(HttpClient::from(self.config.clone())),\n\n }\n\n }\n\n\n\n pub fn with_config(self, config: &Config) -> Self {\n\n let c = Arc::new(config.clone());\n\n\n\n Self { config: c.clone() }\n\n }\n\n\n\n pub fn with_config_file(self, cfg_file: &str) -> Result<Self, Error> {\n\n let cfg = Config::from_file(&std::path::PathBuf::from(cfg_file))?;\n\n\n\n Ok(self.with_config(&cfg))\n\n }\n\n}\n", "file_path": "src/core/core.rs", "rank": 64, "score": 48453.530020161066 }, { "content": " }\n\n\n\n pub fn config(&self) -> &Config {\n\n &self.config\n\n }\n\n\n\n pub fn keychain(&self) -> &KeyChain {\n\n &self.keychain\n\n }\n\n\n\n pub fn launcher(&self) -> &Launcher {\n\n &self.launcher\n\n }\n\n\n\n pub fn resolver(&self) -> &Resolver {\n\n &self.resolver\n\n }\n\n\n\n pub fn output(&self) -> Box<dyn Write + Send> {\n\n crate::console::output::output()\n", "file_path": "src/core/core.rs", "rank": 65, "score": 48448.88186674818 }, { "content": "use super::*;\n\nuse crate::{errors, fs::to_native_path};\n\nuse std::{fs::read_dir, fs::read_to_string, path::PathBuf};\n\n\n\npub struct FileRegistry {\n\n path: PathBuf,\n\n}\n\n\n\nimpl FileRegistry {\n\n #[allow(dead_code)]\n\n fn new(path: PathBuf) -> Self {\n\n Self { path }\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Registry for FileRegistry {\n\n async fn get_entries(&self, _core: &Core) -> Result<Vec<String>, Error> {\n\n let mut entries = Vec::new();\n\n\n", "file_path": "src/online/registry/file_registry.rs", "rank": 66, "score": 47501.59085445164 }, { "content": " ),\n\n \"Check that the registry entry is valid YAML and matches the registry entry schema.\",\n\n err,\n\n )\n\n })?)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::test::get_repo_root;\n\n\n\n #[tokio::test]\n\n async fn get_entries() {\n\n let registry = FileRegistry::new(get_repo_root().join(\"registry\"));\n\n let core = Core::builder().build();\n\n\n\n let entries = registry.get_entries(&core).await.unwrap();\n\n assert_ne!(entries.len(), 0);\n", "file_path": "src/online/registry/file_registry.rs", "rank": 67, "score": 47489.67293338412 }, { "content": " }\n\n\n\n async fn get_entry(&self, _core: &Core, id: &str) -> Result<Entry, Error> {\n\n let path = self.path.join(to_native_path(format!(\"{}.yaml\", id)));\n\n let contents = read_to_string(&path).map_err(|err| {\n\n errors::user_with_internal(\n\n &format!(\n\n \"Could not read the local filesystem registry entry '{}' due to an OS-level error.\",\n\n path.display()\n\n ),\n\n \"Check that the file exists and that Git-Tool has read access to it.\",\n\n err,\n\n )\n\n })?;\n\n\n\n Ok(serde_yaml::from_str(&contents).map_err(|err| {\n\n errors::user_with_internal(\n\n &format!(\n\n \"Could not deserialize the registry entry '{}' due to a YAML parser error.\",\n\n id\n", "file_path": "src/online/registry/file_registry.rs", "rank": 68, "score": 47489.64133175488 }, { "content": " async fn validate_entry(registry: &FileRegistry, name: &str) -> Result<bool, Error> {\n\n let core = Core::builder().build();\n\n let entry = registry.get_entry(&core, name).await?;\n\n let mut valid = true;\n\n\n\n let is_app = name.starts_with(\"apps/\");\n\n let is_service = name.starts_with(\"services/\");\n\n\n\n if !name.is_ascii() {\n\n println!(\"- {} has a non-ascii ID\", name);\n\n valid = false;\n\n }\n\n\n\n if entry.name.is_empty() {\n\n println!(\"- {} has an empty name field\", name);\n\n valid = false;\n\n }\n\n\n\n if entry.description.is_empty() {\n\n println!(\"- {} has an empty description field\", name);\n", "file_path": "src/online/registry/file_registry.rs", "rank": 69, "score": 47487.25423161681 }, { "content": " assert!(entries.iter().any(|i| i == \"apps/bash\"));\n\n }\n\n\n\n #[tokio::test]\n\n async fn get_entry() {\n\n let registry = FileRegistry::new(get_repo_root().join(\"registry\"));\n\n let core = Core::builder().build();\n\n\n\n let entry = registry.get_entry(&core, \"apps/bash\").await.unwrap();\n\n assert_eq!(entry.name, \"Bash\");\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod registry_compliance {\n\n use super::*;\n\n use crate::test::get_repo_root;\n\n\n\n #[tokio::test]\n\n async fn registry_validation() {\n", "file_path": "src/online/registry/file_registry.rs", "rank": 70, "score": 47487.14405808692 }, { "content": " let registry = FileRegistry::new(get_repo_root().join(\"registry\"));\n\n let core = Core::builder().build();\n\n\n\n let mut valid = true;\n\n for entry in registry.get_entries(&core).await.unwrap() {\n\n println!(\"Validating {}\", entry);\n\n match validate_entry(&registry, &entry).await {\n\n Ok(v) => {\n\n valid = valid && v;\n\n }\n\n Err(e) => {\n\n println!(\"{}\", e.message());\n\n valid = false;\n\n }\n\n }\n\n }\n\n\n\n assert!(valid, \"all registry entries should be valid\");\n\n }\n\n\n", "file_path": "src/online/registry/file_registry.rs", "rank": 71, "score": 47482.6917136743 }, { "content": " for entry in read_dir(&self.path).map_err(|err| {\n\n errors::user_with_internal(\n\n &format!(\n\n \"Could not enumerate the directories within the local filesystem registry folder '{}' due to an OS-level error.\",\n\n self.path.display()\n\n ),\n\n \"Check that the directory exists and that Git-Tool has read access to it.\",\n\n err,\n\n )\n\n })? {\n\n let type_entry = entry.map_err(|err| errors::user_with_internal(\n\n &format!(\"Could not enumerate the directories within the local filesystem registry folder '{}' due to an OS-level error.\", self.path.display()),\n\n \"Check that the directory exists and that Git-Tool has read access to it and its children.\",\n\n err\n\n ))?;\n\n\n\n if !type_entry.path().is_dir() {\n\n continue;\n\n }\n\n\n", "file_path": "src/online/registry/file_registry.rs", "rank": 72, "score": 47478.68014392419 }, { "content": " for entry in read_dir(type_entry.path()).map_err(|err| {\n\n errors::user_with_internal(\n\n &format!(\n\n \"Could not enumerate the files within the local filesystem registry folder '{}' due to an OS-level error.\",\n\n type_entry.path().display()\n\n ),\n\n \"Check that the directory exists and that Git-Tool has read access to it.\",\n\n err,\n\n )\n\n })? {\n\n let file_entry =\n\n entry.map_err(|err| {\n\n errors::user_with_internal(\n\n &format!(\n\n \"Could not enumerate the files within the local filesystem registry folder '{}' due to an OS-level error.\",\n\n type_entry.path().display()\n\n ),\n\n \"Check that the directory exists and that Git-Tool has read access to it and the files within.\",\n\n err,\n\n )\n", "file_path": "src/online/registry/file_registry.rs", "rank": 73, "score": 47478.63820713313 }, { "content": " }\n\n }\n\n }\n\n }\n\n\n\n Ok(valid)\n\n }\n\n\n\n fn valid_service_pattern(pattern: &str) -> bool {\n\n let mut expecting_slash = false;\n\n for c in pattern.chars() {\n\n if expecting_slash && c != '/' {\n\n return false;\n\n }\n\n\n\n if !expecting_slash && c != '*' {\n\n return false;\n\n }\n\n\n\n expecting_slash = !expecting_slash;\n\n }\n\n\n\n true\n\n }\n\n}\n", "file_path": "src/online/registry/file_registry.rs", "rank": 74, "score": 47473.502816071734 }, { "content": " } else if !valid_service_pattern(&svc.pattern) {\n\n println!(\"- {}#{} has a service entry with an invalid pattern, it should match the regex: /^\\\\*(\\\\/\\\\*)*$/\", name, &config.platform);\n\n valid = false;\n\n }\n\n\n\n if svc.website.is_empty() {\n\n println!(\n\n \"- {}#{} has a service entry which is missing its website template\",\n\n name, &config.platform\n\n );\n\n valid = false;\n\n }\n\n\n\n if svc.http_url.is_empty() {\n\n println!(\n\n \"- {}#{} has a service entry which is missing its Git+HTTP template\",\n\n name, &config.platform\n\n );\n\n valid = false;\n\n }\n", "file_path": "src/online/registry/file_registry.rs", "rank": 75, "score": 47472.37527733615 }, { "content": " }\n\n\n\n if let Err(err) = test_service.get_git_url(&test_repo) {\n\n println!(\n\n \"- {}#{} could not render the Git+SSH URL for a repository: {}\",\n\n name,\n\n &config.platform,\n\n err.message()\n\n );\n\n valid = false;\n\n }\n\n\n\n if let Err(err) = test_service.get_http_url(&test_repo) {\n\n println!(\n\n \"- {}#{} could not render the Git+HTTP URL for a repository: {}\",\n\n name,\n\n &config.platform,\n\n err.message()\n\n );\n\n valid = false;\n", "file_path": "src/online/registry/file_registry.rs", "rank": 76, "score": 47470.928816965374 }, { "content": "\n\n if svc.git_url.is_empty() {\n\n println!(\n\n \"- {}#{} has a service entry which is missing its Git+SSH URL template\",\n\n name, &config.platform\n\n );\n\n valid = false;\n\n }\n\n\n\n if valid {\n\n let test_service: Service = svc.into();\n\n\n\n if let Err(err) = test_service.get_website(&test_repo) {\n\n println!(\n\n \"- {}#{} could not render the website URL for a repository: {}\",\n\n name,\n\n &config.platform,\n\n err.message()\n\n );\n\n valid = false;\n", "file_path": "src/online/registry/file_registry.rs", "rank": 77, "score": 47470.677263670164 }, { "content": " })?;\n\n\n\n if file_entry\n\n .file_name()\n\n .to_str()\n\n .map(|s| s.ends_with(\".yaml\"))\n\n .unwrap_or_default()\n\n {\n\n if let Some(file_name) = PathBuf::from(file_entry.file_name()).file_stem() {\n\n entries.push(format!(\n\n \"{}/{}\",\n\n PathBuf::from(type_entry.file_name()).display(),\n\n PathBuf::from(file_name).display()\n\n ));\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(entries)\n", "file_path": "src/online/registry/file_registry.rs", "rank": 78, "score": 47469.27922976297 }, { "content": " );\n\n valid = false;\n\n }\n\n }\n\n\n\n if let Some(svc) = config.service {\n\n if svc.domain.is_empty() {\n\n println!(\n\n \"- {}#{} has a service entry which is missing its domain\",\n\n name, &config.platform\n\n );\n\n valid = false;\n\n }\n\n\n\n if svc.pattern.is_empty() {\n\n println!(\n\n \"- {}#{} has a service entry which is missing its pattern\",\n\n name, &config.platform\n\n );\n\n valid = false;\n", "file_path": "src/online/registry/file_registry.rs", "rank": 79, "score": 47465.61592296582 }, { "content": " valid = false;\n\n }\n\n\n\n let test_repo = Repo::new(\n\n \"example.com/test/repo\",\n\n PathBuf::from(\"/dev/example.com/test/repo\"),\n\n );\n\n\n\n for config in entry.configs {\n\n if config.platform.is_empty() {\n\n println!(\n\n \"- {} has a config which is missing the platform field\",\n\n name\n\n );\n\n valid = false;\n\n }\n\n\n\n if is_app && config.app.is_none() {\n\n println!(\"- {} is in the apps/ namespace but has a configuration which is missing an app setting\", name);\n\n valid = false;\n", "file_path": "src/online/registry/file_registry.rs", "rank": 80, "score": 47465.61592296582 }, { "content": " }\n\n\n\n if is_service && config.service.is_none() {\n\n println!(\"- {} is in the services/ namespace but has a configuration which is missing a service setting\", name);\n\n valid = false;\n\n }\n\n\n\n if let Some(app) = config.app {\n\n if app.name.is_empty() {\n\n println!(\n\n \"- {}#{} has an app entry which is missing its name\",\n\n name, &config.platform\n\n );\n\n valid = false;\n\n }\n\n\n\n if app.command.is_empty() {\n\n println!(\n\n \"- {}#{} has an app entry which is missing its command\",\n\n name, &config.platform\n", "file_path": "src/online/registry/file_registry.rs", "rank": 81, "score": 47465.61592296582 }, { "content": "use super::release::*;\n\nuse crate::{core::Core, errors};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::io;\n\nuse std::path::PathBuf;\n\n\n\n#[async_trait::async_trait]\n", "file_path": "src/update/api.rs", "rank": 82, "score": 40693.18069252576 }, { "content": " }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::core::*;\n\n use mocktopus::mocking::*;\n\n use tempfile::tempdir;\n\n\n\n #[tokio::test]\n\n async fn run() {\n\n let cmd = OpenCommand {};\n\n\n\n let args = cmd.app().get_matches_from(vec![\"open\", \"test-app\", \"repo\"]);\n\n\n\n let cfg = Config::from_str(\n\n \"\n\ndirectory: /dev\n", "file_path": "src/commands/open.rs", "rank": 83, "score": 40678.8889124437 }, { "content": " Cleanup,\n\n}\n\n\n\nimpl Default for UpdatePhase {\n\n fn default() -> Self {\n\n UpdatePhase::NoUpdate\n\n }\n\n}\n\n\n\nimpl ToString for UpdatePhase {\n\n fn to_string(&self) -> String {\n\n match self {\n\n UpdatePhase::NoUpdate => \"no-update\",\n\n UpdatePhase::Prepare => \"prepare\",\n\n UpdatePhase::Replace => \"replace\",\n\n UpdatePhase::Cleanup => \"cleanup\",\n\n }\n\n .to_string()\n\n }\n\n}\n", "file_path": "src/update/api.rs", "rank": 84, "score": 40678.871774221305 }, { "content": "\n\n#[derive(Debug, Default, Serialize, Deserialize, Eq, PartialEq)]\n\npub struct UpdateState {\n\n #[serde(rename = \"app\", default, skip_serializing_if = \"Option::is_none\")]\n\n pub target_application: Option<PathBuf>,\n\n\n\n #[serde(rename = \"update\", default, skip_serializing_if = \"Option::is_none\")]\n\n pub temporary_application: Option<PathBuf>,\n\n\n\n pub phase: UpdatePhase,\n\n}\n\n\n\nimpl UpdateState {\n\n pub fn for_phase(&self, phase: UpdatePhase) -> Self {\n\n UpdateState {\n\n target_application: self.target_application.clone(),\n\n temporary_application: self.temporary_application.clone(),\n\n phase,\n\n }\n\n }\n", "file_path": "src/update/api.rs", "rank": 85, "score": 40678.81493846484 }, { "content": "use super::Command;\n\nuse super::*;\n\nuse crate::core::Target;\n\nuse crate::tasks::*;\n\nuse clap::{App, Arg, ArgMatches};\n\n\n\npub struct OpenCommand {}\n\n\n\nimpl Command for OpenCommand {\n\n fn name(&self) -> String {\n\n String::from(\"open\")\n\n }\n\n\n\n fn app<'a>(&self) -> App<'a> {\n\n App::new(self.name().as_str())\n\n .version(\"1.0\")\n\n .visible_aliases(&vec![\"o\", \"run\"])\n\n .about(\"opens a repository using an application defined in your config\")\n\n .long_about(\"This command launches an application defined in your configuration within the specified repository. You can specify any combination of alias, app and repo. Aliases take precedence over repos, which take precedence over apps. When specifying an app, it should appear before the repo/alias parameter. If you are already inside a repository, you can specify only an app and it will launch in the context of the current repo.\n\n \n", "file_path": "src/commands/open.rs", "rank": 86, "score": 40678.477125999525 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_serialize() {\n\n assert_eq!(\n\n serde_json::to_string(&UpdateState {\n\n target_application: Some(PathBuf::from(\"/bin/git-tool\")),\n\n temporary_application: Some(PathBuf::from(\"/tmp/git-tool-update\")),\n\n phase: UpdatePhase::Replace\n\n })\n\n .unwrap(),\n\n r#\"{\"app\":\"/bin/git-tool\",\"update\":\"/tmp/git-tool-update\",\"phase\":\"replace\"}\"#\n\n );\n\n\n\n assert_eq!(\n\n serde_json::to_string(&UpdateState {\n", "file_path": "src/update/api.rs", "rank": 87, "score": 40677.01733466777 }, { "content": " helpers::LaunchTarget::Err(err) => {\n\n return Err(err)\n\n },\n\n helpers::LaunchTarget::None => {\n\n return Err(errors::user(\n\n \"You did not specify the name of a repository to use.\",\n\n \"Remember to specify a repository name like this: 'git-tool open github.com/sierrasoftworks/git-tool'.\"))\n\n }\n\n };\n\n\n\n if !repo.exists() {\n\n match sequence![GitClone {}].apply_repo(core, &repo).await {\n\n Ok(()) => {}\n\n Err(_) if matches.is_present(\"create\") => {\n\n sequence![\n\n GitInit {},\n\n GitRemote { name: \"origin\" },\n\n GitCheckout { branch: \"main\" },\n\n CreateRemote {\n\n enabled: !matches.is_present(\"no-create-remote\")\n", "file_path": "src/commands/open.rs", "rank": 88, "score": 40673.946230651585 }, { "content": " target_application: None,\n\n temporary_application: Some(PathBuf::from(\"/tmp/git-tool-update\")),\n\n phase: UpdatePhase::Cleanup\n\n })\n\n .unwrap(),\n\n r#\"{\"update\":\"/tmp/git-tool-update\",\"phase\":\"cleanup\"}\"#\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_deserialize() {\n\n let update = UpdateState {\n\n target_application: None,\n\n temporary_application: Some(PathBuf::from(\"/tmp/git-tool-update\")),\n\n phase: UpdatePhase::Cleanup,\n\n };\n\n\n\n let deserialized: UpdateState =\n\n serde_json::from_str(r#\"{\"update\":\"/tmp/git-tool-update\",\"phase\":\"cleanup\"}\"#).unwrap();\n\n assert_eq!(deserialized, update);\n", "file_path": "src/update/api.rs", "rank": 89, "score": 40673.728205072744 }, { "content": " async fn run(&self, core: &Core, matches: &ArgMatches) -> Result<i32, errors::Error> {\n\n if core.config().get_config_file().is_none() {\n\n warn!(\"No configuration file has been loaded, continuing with defaults.\");\n\n writeln!(core.output(),\"Hi! It looks like you haven't set up a Git-Tool config file yet. Try running `git-tool setup` to get started or make sure you've set the GITTOOL_CONFIG environment variable.\\n\")?;\n\n }\n\n\n\n let (app, repo) = match helpers::get_launch_app(core, matches.value_of(\"app\"), matches.value_of(\"repo\")) {\n\n helpers::LaunchTarget::AppAndTarget(app, target) => {\n\n (app, core.resolver().get_best_repo(target)?)\n\n },\n\n helpers::LaunchTarget::App(app) => {\n\n (app, core.resolver().get_current_repo()?)\n\n },\n\n helpers::LaunchTarget::Target(target) => {\n\n let app = core.config().get_default_app().ok_or(errors::user(\n\n \"No default application available.\",\n\n \"Make sure that you add an app to your config file using 'git-tool config add apps/bash' or similar.\"))?;\n\n\n\n (app, core.resolver().get_best_repo(target)?)\n\n },\n", "file_path": "src/commands/open.rs", "rank": 90, "score": 40673.65840530413 }, { "content": " }\n\n\n\n #[test]\n\n fn test_to_string() {\n\n assert_eq!(UpdatePhase::Prepare.to_string(), \"prepare\");\n\n assert_eq!(UpdatePhase::Replace.to_string(), \"replace\");\n\n assert_eq!(UpdatePhase::Cleanup.to_string(), \"cleanup\");\n\n assert_eq!(UpdatePhase::NoUpdate.to_string(), \"no-update\");\n\n }\n\n\n\n #[test]\n\n fn test_for_phase() {\n\n let update = UpdateState {\n\n target_application: Some(PathBuf::from(\"/bin/git-tool\")),\n\n temporary_application: Some(PathBuf::from(\"/tmp/git-tool-update\")),\n\n phase: UpdatePhase::Replace,\n\n };\n\n\n\n let new_update = update.for_phase(UpdatePhase::Cleanup);\n\n assert_eq!(new_update.target_application, update.target_application);\n", "file_path": "src/update/api.rs", "rank": 91, "score": 40670.63273563918 }, { "content": " assert_eq!(\n\n new_update.temporary_application,\n\n update.temporary_application\n\n );\n\n assert_eq!(\n\n update.phase,\n\n UpdatePhase::Replace,\n\n \"the old update entry should not be modified\"\n\n );\n\n assert_eq!(\n\n new_update.phase,\n\n UpdatePhase::Cleanup,\n\n \"the new update entry should have the correct phase\"\n\n );\n\n }\n\n}\n", "file_path": "src/update/api.rs", "rank": 92, "score": 40670.63273563918 }, { "content": "New applications can be configured either by making changes to your configuration, or by using the `git-tool config add` command to install them from the GitHub registry. For example, you can use `gt config add apps/bash` to configure `bash` as an available app.\")\n\n .arg(Arg::new(\"app\")\n\n .about(\"The name of the application to launch.\")\n\n .index(1))\n\n .arg(Arg::new(\"repo\")\n\n .about(\"The name of the repository to open.\")\n\n .index(2))\n\n .arg(Arg::new(\"create\")\n\n .long(\"create\")\n\n .short('c')\n\n .about(\"create the repository if it does not exist.\"))\n\n .arg(Arg::new(\"no-create-remote\")\n\n .long(\"no-create-remote\")\n\n .short('R')\n\n .about(\"prevent the creation of a remote repository (on supported services)\"))\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl CommandRunnable for OpenCommand {\n", "file_path": "src/commands/open.rs", "rank": 93, "score": 40667.53432898238 }, { "content": " }\n\n ]\n\n .apply_repo(core, &repo)\n\n .await?;\n\n }\n\n Err(e) => return Err(e),\n\n }\n\n }\n\n\n\n let status = core.launcher().run(app, &repo).await?;\n\n Ok(status)\n\n }\n\n\n\n async fn complete(&self, core: &Core, completer: &Completer, _matches: &ArgMatches) {\n\n completer.offer(\"--create\");\n\n completer.offer(\"--no-create-remote\");\n\n completer.offer_many(core.config().get_aliases().map(|(a, _)| a));\n\n completer.offer_many(core.config().get_apps().map(|a| a.get_name()));\n\n\n\n let default_svc = core\n", "file_path": "src/commands/open.rs", "rank": 94, "score": 40663.99669385014 }, { "content": " );\n\n\n\n MockResult::Return(Box::pin(async move { Ok(5) }))\n\n });\n\n\n\n match cmd.run(&core, &args).await {\n\n Ok(status) => {\n\n assert_eq!(\n\n status, 5,\n\n \"the status code of the child app should be forwarded\"\n\n );\n\n }\n\n Err(err) => panic!(\"{}\", err.message()),\n\n }\n\n }\n\n}\n", "file_path": "src/commands/open.rs", "rank": 95, "score": 40662.46006787745 }, { "content": " .config()\n\n .get_default_service()\n\n .map(|s| s.get_domain())\n\n .unwrap_or_default();\n\n\n\n match core.resolver().get_repos() {\n\n Ok(repos) => {\n\n completer.offer_many(\n\n repos\n\n .iter()\n\n .filter(|r| r.get_domain() == default_svc)\n\n .map(|r| r.get_full_name()),\n\n );\n\n completer.offer_many(\n\n repos\n\n .iter()\n\n .map(|r| format!(\"{}/{}\", r.get_domain(), r.get_full_name())),\n\n );\n\n }\n\n _ => {}\n", "file_path": "src/commands/open.rs", "rank": 96, "score": 40658.133649762945 }, { "content": "\n\napps:\n\n - name: test-app\n\n command: test\n\n args:\n\n - '{{ .Target.Name }}'\n\n\n\nfeatures:\n\n http_transport: true\n\n\",\n\n )\n\n .unwrap();\n\n\n\n let temp = tempdir().unwrap();\n\n let core = Core::builder().with_config(&cfg).build();\n\n\n\n let temp_path = temp.path().to_owned();\n\n Resolver::get_best_repo.mock_safe(move |_, name| {\n\n assert_eq!(\n\n name, \"repo\",\n", "file_path": "src/commands/open.rs", "rank": 97, "score": 40655.70245706994 }, { "content": " \"it should be called with the name of the repo to be cloned\"\n\n );\n\n\n\n MockResult::Return(Ok(Repo::new(\n\n \"github.com/git-fixtures/basic\",\n\n temp_path.join(\"repo\").into(),\n\n )))\n\n });\n\n\n\n Launcher::run.mock_safe(move |_, app, target| {\n\n assert_eq!(\n\n app.get_name(),\n\n \"test-app\",\n\n \"it should launch the correct app\"\n\n );\n\n\n\n assert_eq!(\n\n target.get_path(),\n\n temp.path().join(\"repo\"),\n\n \"the target should be launched in the correct directory\"\n", "file_path": "src/commands/open.rs", "rank": 98, "score": 40654.76934264418 }, { "content": "use super::{system_with_internal, user, user_with_internal, Error};\n\nuse http::{uri::InvalidUri, StatusCode};\n\nuse std::{convert, fmt::Debug};\n\n\n\nimpl convert::From<InvalidUri> for Error {\n\n fn from(err: InvalidUri) -> Self {\n\n user_with_internal(\n\n \"We could not parse the URL.\",\n\n \"Please make sure that the URLs you are using are well formed and try this operation again.\",\n\n err\n\n )\n\n }\n\n}\n\n\n\nimpl convert::From<reqwest::Error> for Error {\n\n fn from(err: reqwest::Error) -> Self {\n\n if err.is_redirect() {\n\n user_with_internal(\n\n \"We could not complete a web request to due to a redirect loop.\",\n\n \"This is likely due to a problem with the remote server, please try again later and report the problem to us on GitHub if the issue persists.\", \n", "file_path": "src/errors/reqwest.rs", "rank": 99, "score": 40628.72549360261 } ]
Rust
onion_lib/src/api_protocol/messages/mod.rs
leonbeckmann/voip-onion-routing
06e86d957c01cf83e7095ff966c24a0fd99a2ff0
use std::convert::{TryFrom, TryInto}; use std::net::IpAddr; /* * Onion Message Header [size: u16, type: u16] * Direction: Incoming, Outgoing */ #[derive(Debug, PartialEq)] pub struct OnionMessageHeader { pub size: u16, pub msg_type: u16, } impl OnionMessageHeader { pub fn new(size: u16, msg_type: u16) -> Self { assert!(size as usize >= Self::hdr_size()); Self { size, msg_type } } pub const fn hdr_size() -> usize { 4 } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self.size.to_be_bytes().to_vec()); v.append(&mut self.msg_type.to_be_bytes().to_vec()); v } } impl TryFrom<&[u8; Self::hdr_size()]> for OnionMessageHeader { type Error = anyhow::Error; fn try_from(raw: &[u8; Self::hdr_size()]) -> Result<Self, Self::Error> { let hdr = Self { size: u16::from_be_bytes(raw[0..2].try_into().unwrap()), msg_type: u16::from_be_bytes(raw[2..4].try_into().unwrap()), }; if (hdr.size as usize) >= OnionMessageHeader::hdr_size() { Ok(hdr) } else { Err(anyhow::Error::msg( "Given packet size in OnionMessageHeader less than sizeof OnionMessageHeader", )) } } } /* * Onion Tunnel Build [reserved: u15, ip_version: u1, onion_port: u16, ip_addr: u32/u128, key: [u8]] * Direction: Incoming */ #[derive(Clone, Debug, PartialEq)] pub struct OnionTunnelBuild { _reserved_v: u16, pub onion_port: u16, pub ip: IpAddr, pub host_key: Vec<u8>, } impl OnionTunnelBuild { pub fn new(ip: IpAddr, onion_port: u16, host_key: Vec<u8>) -> Self { Self { _reserved_v: if ip.is_ipv6() { 1 } else { 0 }, onion_port, ip, host_key, } } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self._reserved_v.to_be_bytes().to_vec()); v.append(&mut self.onion_port.to_be_bytes().to_vec()); match self.ip { IpAddr::V4(v4) => { v.append(&mut v4.octets().to_vec()); } IpAddr::V6(v6) => { v.append(&mut v6.octets().to_vec()); } } v.extend(&self.host_key); v } } impl TryFrom<Vec<u8>> for Box<OnionTunnelBuild> { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() < 4 { return Err(anyhow::Error::msg( "Cannot parse OnionTunnelBuild: Invalid number of bytes", )); } let reserved_v = u16::from_be_bytes(raw[0..2].try_into().unwrap()); let onion_port = u16::from_be_bytes(raw[2..4].try_into().unwrap()); let (ip, host_key) = match (1 & reserved_v) == 1 { true => { if raw.len() < 20 { return Err(anyhow::Error::msg( "Cannot parse OnionTunnelBuild: Invalid number of bytes", )); } let mut ip_buf = [0u8; 16]; ip_buf.copy_from_slice(&raw[4..20]); (IpAddr::from(ip_buf), raw[20..].to_vec()) } false => { if raw.len() < 8 { return Err(anyhow::Error::msg( "Cannot parse OnionTunnelBuild: Invalid number of bytes", )); } let mut ip_buf = [0u8; 4]; ip_buf.copy_from_slice(&raw[4..8]); (IpAddr::from(ip_buf), raw[8..].to_vec()) } }; Ok(Box::new(OnionTunnelBuild { _reserved_v: reserved_v, onion_port, ip, host_key, })) } } /* * Onion Tunnel Ready [tunnel_id: u32, host_key: [u8]] * Direction: Outgoing */ #[derive(Debug, PartialEq)] pub struct OnionTunnelReady { pub tunnel_id: u32, pub host_key: Vec<u8>, } impl OnionTunnelReady { pub fn new(tunnel_id: u32, host_key: Vec<u8>) -> Self { Self { tunnel_id, host_key, } } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self.tunnel_id.to_be_bytes().to_vec()); v.extend(&self.host_key); v } } impl TryFrom<Vec<u8>> for Box<OnionTunnelReady> { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() < 4 { return Err(anyhow::Error::msg( "Cannot parse OnionTunnelReady: Invalid number of bytes", )); } Ok(Box::new(OnionTunnelReady { tunnel_id: u32::from_be_bytes(raw[0..4].try_into().unwrap()), host_key: raw[4..].to_vec(), })) } } /* * Onion Tunnel Incoming [tunnel_id: u32] * Direction: Outgoing */ #[derive(Debug, PartialEq)] pub struct OnionTunnelIncoming { pub tunnel_id: u32, } impl OnionTunnelIncoming { pub fn new(tunnel_id: u32) -> Self { Self { tunnel_id } } pub fn to_be_vec(&self) -> Vec<u8> { self.tunnel_id.to_be_bytes().to_vec() } const fn packet_size() -> usize { 4 } } impl TryFrom<Vec<u8>> for OnionTunnelIncoming { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() != Self::packet_size() { Err(anyhow::Error::msg( "Cannot parse OnionTunnelIncoming: Invalid number of bytes", )) } else { Ok(Self { tunnel_id: u32::from_be_bytes(raw[0..4].try_into().unwrap()), }) } } } /* * Onion Tunnel Destroy [tunnel_id: u32] * Direction: Incoming */ #[derive(Debug, PartialEq)] pub struct OnionTunnelDestroy { pub tunnel_id: u32, } impl OnionTunnelDestroy { pub fn new(tunnel_id: u32) -> Self { Self { tunnel_id } } pub fn to_be_vec(&self) -> Vec<u8> { self.tunnel_id.to_be_bytes().to_vec() } const fn packet_size() -> usize { 4 } } impl TryFrom<Vec<u8>> for OnionTunnelDestroy { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() != Self::packet_size() { Err(anyhow::Error::msg( "Cannot parse OnionTunnelDestroy: Invalid number of bytes", )) } else { Ok(Self { tunnel_id: u32::from_be_bytes(raw[0..4].try_into().unwrap()), }) } } } /* * Onion Tunnel Data [tunnel_id: u32, data: Vec<u8>] * Direction: Incoming, Outgoing */ #[derive(Debug, PartialEq)] pub struct OnionTunnelData { pub tunnel_id: u32, pub data: Vec<u8>, } impl OnionTunnelData { pub fn new(tunnel_id: u32, data: Vec<u8>) -> Self { Self { tunnel_id, data } } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self.tunnel_id.to_be_bytes().to_vec()); v.extend(&self.data); v } } impl TryFrom<Vec<u8>> for Box<OnionTunnelData> { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() < 4 { Err(anyhow::Error::msg( "Cannot parse OnionTunnelData: Invalid number of bytes", )) } else { Ok(Box::new(OnionTunnelData { tunnel_id: u32::from_be_bytes(raw[0..4].try_into().unwrap()), data: raw[4..].to_vec(), })) } } } /* * Onion Tunnel Error [request_type: u16, reserved: u16, tunnel_id: u32] * Direction: Outgoing */ #[derive(Debug, PartialEq)] pub struct OnionError { pub request_type: u16, _reserved: u16, pub tunnel_id: u32, } impl OnionError { pub fn new(request_type: u16, tunnel_id: u32) -> Self { Self { request_type, _reserved: 0, tunnel_id, } } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self.request_type.to_be_bytes().to_vec()); v.append(&mut self._reserved.to_be_bytes().to_vec()); v.append(&mut self.tunnel_id.to_be_bytes().to_vec()); v } const fn packet_size() -> usize { 8 } } impl TryFrom<Vec<u8>> for OnionError { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() != Self::packet_size() { Err(anyhow::Error::msg( "Cannot parse OnionError: Invalid number of bytes", )) } else { Ok(Self { request_type: u16::from_be_bytes(raw[0..2].try_into().unwrap()), _reserved: 0, tunnel_id: u32::from_be_bytes(raw[4..8].try_into().unwrap()), }) } } } /* * Onion Tunnel Data [cover_size: 16, reserved: u16] * Direction: Incoming */ #[derive(Debug, PartialEq)] pub struct OnionCover { pub cover_size: u16, _reserved: u16, } impl OnionCover { pub fn new(cover_size: u16) -> Self { Self { cover_size, _reserved: 0, } } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self.cover_size.to_be_bytes().to_vec()); v.append(&mut self._reserved.to_be_bytes().to_vec()); v } const fn packet_size() -> usize { 4 } } impl TryFrom<Vec<u8>> for OnionCover { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() != Self::packet_size() { Err(anyhow::Error::msg( "Cannot parse OnionCover: Invalid number of bytes", )) } else { Ok(Self { cover_size: u16::from_be_bytes(raw[0..2].try_into().unwrap()), _reserved: 0, }) } } } #[cfg(test)] mod tests { use crate::api_protocol::messages::{ OnionCover, OnionError, OnionMessageHeader, OnionTunnelBuild, OnionTunnelData, OnionTunnelDestroy, OnionTunnelIncoming, OnionTunnelReady, }; use crate::api_protocol::{ONION_TUNNEL_BUILD, ONION_TUNNEL_DATA}; use std::convert::{TryFrom, TryInto}; use std::net::IpAddr; use std::str::FromStr; #[test] fn unit_test_only_messages() { let hdr = OnionMessageHeader::new(25, ONION_TUNNEL_DATA); let mut hdr_raw: [u8; 4] = hdr.to_be_vec().try_into().unwrap(); let hdr2 = OnionMessageHeader::try_from(&hdr_raw).unwrap(); assert_eq!(hdr, hdr2); hdr_raw[0] = 0; hdr_raw[1] = 2; OnionMessageHeader::try_from(&hdr_raw).unwrap_err(); let ip_addr = IpAddr::from_str("127.0.0.1").unwrap(); let build_v4 = OnionTunnelBuild::new(ip_addr, 1234, "key".as_bytes().to_vec()); let build2 = Box::<OnionTunnelBuild>::try_from(build_v4.to_be_vec()).unwrap(); assert_eq!(Box::new(build_v4.clone()), build2); let ip_addr = IpAddr::from_str("::1").unwrap(); let build_v6 = OnionTunnelBuild::new(ip_addr, 1234, "key".as_bytes().to_vec()); let build2 = Box::<OnionTunnelBuild>::try_from(build_v6.to_be_vec()).unwrap(); assert_eq!(Box::new(build_v6.clone()), build2); Box::<OnionTunnelBuild>::try_from(vec![0, 1, 2]).unwrap_err(); let mut build_v4_invalid = build_v4.to_be_vec(); build_v4_invalid.truncate(7); let mut build_v6_invalid = build_v6.to_be_vec(); build_v6_invalid.truncate(19); Box::<OnionTunnelBuild>::try_from(build_v4_invalid).unwrap_err(); Box::<OnionTunnelBuild>::try_from(build_v6_invalid).unwrap_err(); let ready = OnionTunnelReady::new(1025, "key".as_bytes().to_vec()); let ready2 = Box::<OnionTunnelReady>::try_from(ready.to_be_vec()).unwrap(); assert_eq!(Box::new(ready), ready2); Box::<OnionTunnelReady>::try_from(vec![0, 1, 2]).unwrap_err(); let incoming = OnionTunnelIncoming::new(1025); let incoming2 = OnionTunnelIncoming::try_from(incoming.to_be_vec()).unwrap(); assert_eq!(incoming, incoming2); OnionTunnelIncoming::try_from(vec![0, 1, 2]).unwrap_err(); let destroy = OnionTunnelDestroy::new(1025); let destroy2 = OnionTunnelDestroy::try_from(destroy.to_be_vec()).unwrap(); assert_eq!(destroy, destroy2); let data = OnionTunnelData::new(1025, "Data".as_bytes().to_vec()); let data2 = Box::<OnionTunnelData>::try_from(data.to_be_vec()).unwrap(); assert_eq!(Box::new(data), data2); Box::<OnionTunnelData>::try_from(vec![0, 1, 2]).unwrap_err(); let error = OnionError::new(ONION_TUNNEL_BUILD, 0); let error2 = OnionError::try_from(error.to_be_vec()).unwrap(); assert_eq!(error, error2); OnionError::try_from(vec![0, 1, 2, 3, 4, 5, 6, 7, 8]).unwrap_err(); let cover = OnionCover::new(1025); let cover2 = OnionCover::try_from(cover.to_be_vec()).unwrap(); assert_eq!(cover, cover2); OnionCover::try_from(vec![0, 1, 2, 3, 4]).unwrap_err(); } }
use std::convert::{TryFrom, TryInto}; use std::net::IpAddr; /* * Onion Message Header [size: u16, type: u16] * Direction: Incoming, Outgoing */ #[derive(Debug, PartialEq)] pub struct OnionMessageHeader { pub size: u16, pub msg_type: u16, } impl OnionMessageHeader { pub fn new(size: u16, msg_type: u16) -> Self { assert!(size as usize >= Self::hdr_size()); Self { size, msg_type } } pub const fn hdr_size() -> usize { 4 } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self.size.to_be_bytes().to_vec()); v.append(&mut self.msg_type.to_be_bytes().to_vec()); v } } impl TryFrom<&[u8; Self::hdr_size()]> for OnionMessageHeader { type Error = anyhow::Error; fn try_from(raw: &[u8; Self::hdr_size()]) -> Result<Self, Self::Error> { let hdr = Self { size: u16::from_be_bytes(raw[0..2].try_into().unwrap()), msg_type: u16::from_be_bytes(raw[2..4].try_into().unwrap()), }; if (hdr.size as usize) >= OnionMessageHeader::hdr_size() { Ok(hdr) } else { Err(anyhow::Error::msg( "Given packet size in OnionMessageHeader less than sizeof OnionMessageHeader", )) } } } /* * Onion Tunnel Build [reserved: u15, ip_version: u1, onion_port: u16, ip_addr: u32/u128, key: [u8]] * Direction: Incoming */ #[derive(Clone, Debug, PartialEq)] pub struct OnionTunnelBuild { _reserved_v: u16, pub onion_port: u16, pub ip: IpAddr, pub host_key: Vec<u8>, } impl OnionTunnelBuild { pub fn new(ip: IpAddr, onion_port: u16, host_key: Vec<u8>) -> Self { Self { _reserved_v: if ip.is_ipv6() { 1 } else { 0 }, onion_port, ip, host_key, } } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self._reserved_v.to_be_bytes().to_vec()); v.append(&mut self.onion_port.to_be_bytes().to_vec()); match self.ip { IpAddr::V4(v4) => { v.append(&mut v4.octets().to_vec()); } IpAddr::V6(v6) => { v.append(&mut v6.octets().to_vec()); } } v.extend(&self.host_key); v } } impl TryFrom<Vec<u8>> for Box<OnionTunnelBuild> { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() < 4 { return Err(anyhow::Error::msg( "Cannot parse OnionTunnelBuild: Invalid number of bytes", )); } let reserved_v = u16::from_be_bytes(raw[0..2].try_into().unwrap()); let onion_port = u16::from_be_bytes(raw[2..4].try_into().unwrap()); let (ip, host_key) = match (1 & reserved_v) == 1 { true => { if raw.len() < 20 { return Err(anyhow::Error::msg( "Cannot parse OnionTunnelBuild: Invalid number of bytes", )); } let mut ip_buf = [0u8; 16]; ip_buf.copy_from_slice(&raw[4..20]); (IpAddr::from(ip_buf), raw[20..].to_vec()) } false => { if raw.len() < 8 { return Err(anyhow::Error::msg( "Cannot parse OnionTunnelBuild: Invalid number of bytes", )); } let mut ip_buf = [0u8; 4]; ip_buf.copy_from_slice(&raw[4..8]); (IpAddr::from(ip_buf), raw[8..].to_vec()) } }; Ok(Box::new(OnionTunnelBuild { _reserved_v: reserved_v, onion_port, ip, host_key, })) } } /* * Onion Tunnel Ready [tunnel_id: u32, host_key: [u8]] * Direction: Outgoing */ #[derive(Debug, PartialEq)] pub struct OnionTunnelReady { pub tunnel_id: u32, pub host_key: Vec<u8>, } impl OnionTunnelReady { pub fn new(tunnel_id: u32, host_key: Vec<u8>) -> Self { Self { tunnel_id, host_key, } } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self.tunnel_id.to_be_bytes().to_vec()); v.extend(&self.host_key); v } } impl TryFrom<Vec<u8>> for Box<OnionTunnelReady> { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() < 4 { return Err(anyhow::Error::msg( "Cannot parse OnionTunnelReady: Invalid number of bytes", )); } Ok(Box::new(OnionTunnelReady { tunnel_id: u32::from_be_bytes(raw[0..4].try_into().unwrap()), host_key: raw[4..].to_vec(), })) } } /* * Onion Tunnel Incoming [tunnel_id: u32] * Direction: Outgoing */ #[derive(Debug, PartialEq)] pub struct OnionTunnelIncoming { pub tunnel_id: u32, } impl OnionTunnelIncoming { pub fn new(tunnel_id: u32) -> Self { Self { tunnel_id } } pub fn to_be_vec(&self) -> Vec<u8> { self.tunnel_id.to_be_bytes().to_vec() } const fn packet_size() -> usize { 4 } } impl TryFrom<Vec<u8>> for OnionTunnelIncoming { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() != Self::packet_size() { Err(anyhow::Error::msg( "Cannot parse OnionTunnelIncoming: Invalid number of bytes", )) } else { Ok(Self { tunnel_id: u32::from_be_bytes(raw[0..4].try_into().unwrap()), }) } } } /* * Onion Tunnel Destroy [tunnel_id: u32] * Direction: Incoming */ #[derive(Debug, PartialEq)] pub struct OnionTunnelDestroy { pub tunnel_id: u32, } impl OnionTunnelDestroy { pub fn new(tunnel_id: u32) -> Self { Self { tunnel_id } } pub fn to_be_vec(&self) -> Vec<u8> { self.tunnel_id.to_be_bytes().to_vec() } const fn packet_size() -> usize { 4 } } impl TryFrom<Vec<u8>> for OnionTunnelDestroy { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() != Self::packet_size() { Err(anyhow::Error::msg( "Cannot parse OnionTunnelDestroy: Invalid number of bytes", )) } else { Ok(Self { tunnel_id: u32::from_be_bytes(raw[0..4].try_into().unwrap()), }) } } } /* * Onion Tunnel Data [tunnel_id: u32, data: Vec<u8>] * Direction: Incoming, Outgoing */ #[derive(Debug, PartialEq)] pub struct OnionTunnelData { pub tunnel_id: u32, pub data: Vec<u8>, } impl OnionTunnelData { pub fn new(tunnel_id: u32, data: Vec<u8>) -> Self { Self { tunnel_id, data } } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self.tunnel_id.to_be_bytes().to_vec()); v.extend(&self.data); v } } impl TryFrom<Vec<u8>> for Box<OnionTunnelData> { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() < 4 { Err(anyhow::Error::msg( "Cannot parse OnionTunnelData: Invalid number of bytes", )) } else { Ok(Box::new(OnionTunnelData { tunnel_id: u32::from_be_bytes(raw[0..4].try_into().unwrap()), data: raw[4..].to_vec(), })) } } } /* * Onion Tunnel Error [request_type: u16, reserved: u16, tunnel_id: u32] * Direction: Outgoing */ #[derive(Debug, PartialEq)] pub struct OnionError { pub request_type: u16, _reserved: u16, pub tunnel_id: u32, } impl OnionError { pub fn new(request_type: u16, tunnel_id: u32) -> Self { Self { request_type, _reserved: 0, tunnel_id, } } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self.request_type.to_be_bytes().to_vec()); v.append(&mut self._reserved.to_be_bytes().to_vec()); v.append(&mut self.tunnel_id.to_be_bytes().to_vec()); v } const fn packet_size() -> usize { 8 } } impl TryFrom<Vec<u8>> for OnionError { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() != Self::packet_size() { Err(anyhow::Error::msg( "Cannot parse OnionError: Invalid number of bytes", )) } else { Ok(Self { request_type: u16::from_be_bytes(raw[0..2].try_into().unwrap()), _reserved: 0, tunnel_id: u32::from_be_bytes(raw[4..8].try_into().unwrap()), }) } } } /* * Onion Tunnel Data [cover_size: 16, reserved: u16] * Direction: Incoming */ #[derive(Debug, PartialEq)] pub struct OnionCover { pub cover_size: u16, _reserved: u16, } impl OnionCover { pub fn new(cover_size: u16) -> Self { Self { cover_size, _reserved: 0, } } pub fn to_be_vec(&self) -> Vec<u8> { let mut v = vec![]; v.append(&mut self.cover_size.to_be_bytes().to_vec()); v.append(&mut self._reserved.to_be_bytes().to_vec()); v } const fn packet_size() -> usize { 4 } } impl TryFrom<Vec<u8>> for OnionCover { type Error = anyhow::Error; fn try_from(raw: Vec<u8>) -> Result<Self, Self::Error> { if raw.len() != Self::packet_size() { Err(anyhow::Error::msg( "Cannot parse OnionCover: Invalid number of bytes", )) } else { Ok(Self { cover_size: u16::from_be_bytes(raw[0..2].try_into().unwrap()), _reserved: 0, }) } } } #[cfg(test)] mod tests { use crate::api_protocol::messages::{ OnionCover, OnionError, OnionMessageHeader, OnionTunnelBuild, OnionTunnelData, OnionTunnelDestroy, OnionTunnelIncoming, OnionTunnelReady, }; use crate::api_protocol::{ONION_TUNNEL_BUILD, ONION_TUNNEL_DATA}; use std::convert::{TryFrom, TryInto}; use std::net::IpAddr; use std::str::FromStr; #[test]
}
fn unit_test_only_messages() { let hdr = OnionMessageHeader::new(25, ONION_TUNNEL_DATA); let mut hdr_raw: [u8; 4] = hdr.to_be_vec().try_into().unwrap(); let hdr2 = OnionMessageHeader::try_from(&hdr_raw).unwrap(); assert_eq!(hdr, hdr2); hdr_raw[0] = 0; hdr_raw[1] = 2; OnionMessageHeader::try_from(&hdr_raw).unwrap_err(); let ip_addr = IpAddr::from_str("127.0.0.1").unwrap(); let build_v4 = OnionTunnelBuild::new(ip_addr, 1234, "key".as_bytes().to_vec()); let build2 = Box::<OnionTunnelBuild>::try_from(build_v4.to_be_vec()).unwrap(); assert_eq!(Box::new(build_v4.clone()), build2); let ip_addr = IpAddr::from_str("::1").unwrap(); let build_v6 = OnionTunnelBuild::new(ip_addr, 1234, "key".as_bytes().to_vec()); let build2 = Box::<OnionTunnelBuild>::try_from(build_v6.to_be_vec()).unwrap(); assert_eq!(Box::new(build_v6.clone()), build2); Box::<OnionTunnelBuild>::try_from(vec![0, 1, 2]).unwrap_err(); let mut build_v4_invalid = build_v4.to_be_vec(); build_v4_invalid.truncate(7); let mut build_v6_invalid = build_v6.to_be_vec(); build_v6_invalid.truncate(19); Box::<OnionTunnelBuild>::try_from(build_v4_invalid).unwrap_err(); Box::<OnionTunnelBuild>::try_from(build_v6_invalid).unwrap_err(); let ready = OnionTunnelReady::new(1025, "key".as_bytes().to_vec()); let ready2 = Box::<OnionTunnelReady>::try_from(ready.to_be_vec()).unwrap(); assert_eq!(Box::new(ready), ready2); Box::<OnionTunnelReady>::try_from(vec![0, 1, 2]).unwrap_err(); let incoming = OnionTunnelIncoming::new(1025); let incoming2 = OnionTunnelIncoming::try_from(incoming.to_be_vec()).unwrap(); assert_eq!(incoming, incoming2); OnionTunnelIncoming::try_from(vec![0, 1, 2]).unwrap_err(); let destroy = OnionTunnelDestroy::new(1025); let destroy2 = OnionTunnelDestroy::try_from(destroy.to_be_vec()).unwrap(); assert_eq!(destroy, destroy2); let data = OnionTunnelData::new(1025, "Data".as_bytes().to_vec()); let data2 = Box::<OnionTunnelData>::try_from(data.to_be_vec()).unwrap(); assert_eq!(Box::new(data), data2); Box::<OnionTunnelData>::try_from(vec![0, 1, 2]).unwrap_err(); let error = OnionError::new(ONION_TUNNEL_BUILD, 0); let error2 = OnionError::try_from(error.to_be_vec()).unwrap(); assert_eq!(error, error2); OnionError::try_from(vec![0, 1, 2, 3, 4, 5, 6, 7, 8]).unwrap_err(); let cover = OnionCover::new(1025); let cover2 = OnionCover::try_from(cover.to_be_vec()).unwrap(); assert_eq!(cover, cover2); OnionCover::try_from(vec![0, 1, 2, 3, 4]).unwrap_err(); }
function_block-full_function
[ { "content": "fn write_msg(msg_type: u16, data: Vec<u8>, stream: &mut TcpStream) {\n\n let hdr = OnionMessageHeader::new(\n\n (data.len() + OnionMessageHeader::hdr_size()) as u16,\n\n msg_type,\n\n )\n\n .to_be_vec();\n\n\n\n stream.write_all(hdr.as_slice()).unwrap();\n\n stream.write_all(data.as_slice()).unwrap();\n\n}\n\n\n", "file_path": "onion_tests/tests/integration.rs", "rank": 0, "score": 286168.2152436187 }, { "content": "fn read_msg(stream: &mut TcpStream) -> (OnionMessageHeader, Vec<u8>) {\n\n let mut buf = [0u8; OnionMessageHeader::hdr_size()];\n\n stream.read_exact(&mut buf).unwrap();\n\n\n\n // parse buf the onion_msg_hdr\n\n let hdr = OnionMessageHeader::try_from(&buf).unwrap();\n\n\n\n // read remaining message into buf without the hdr\n\n let mut buf = vec![0u8; hdr.size as usize - OnionMessageHeader::hdr_size()];\n\n stream.read_exact(&mut buf).unwrap();\n\n\n\n (hdr, buf)\n\n}\n\n\n", "file_path": "onion_tests/tests/integration.rs", "rank": 1, "score": 240842.91686234178 }, { "content": "fn serialize(message: FrameDataType) -> Vec<u8> {\n\n let mut buf = vec![AUTH_PLACEHOLDER; AUTH_SIZE];\n\n let mut frame_data = FrameData::new();\n\n let mut data = message.write_to_bytes().unwrap();\n\n assert!(data.len() <= FRAME_DATA_CONTENT_SIZE);\n\n frame_data.set_data_size(data.len() as u32);\n\n let padding_size = FRAME_DATA_CONTENT_SIZE - data.len();\n\n let mut padding: Vec<_> = (0..padding_size).map(|_| rand::random::<u8>()).collect();\n\n data.append(&mut padding);\n\n assert_eq!(data.len(), FRAME_DATA_CONTENT_SIZE);\n\n frame_data.set_data(data.into());\n\n let mut frame_data_serialized = frame_data.write_to_bytes().unwrap();\n\n assert_eq!(frame_data_serialized.len(), SERIALIZED_FRAME_DATA_SIZE);\n\n buf.append(&mut frame_data_serialized);\n\n assert_eq!(buf.len(), FRAME_DATA_SIZE);\n\n buf\n\n}\n\n\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 2, "score": 227595.40291796223 }, { "content": "fn deserialize(raw: &[u8], tunnel_id: TunnelId) -> Result<FrameDataType, ProtocolError> {\n\n if raw.len() != FRAME_DATA_SIZE {\n\n log::warn!(\n\n \"Tunnel={:?}: Received packet with invalid frame data size. Disconnect\",\n\n tunnel_id\n\n );\n\n return Err(ProtocolError::InvalidPacketLength);\n\n }\n\n\n\n let (_auth_tag, raw) = raw.split_at(AUTH_SIZE);\n\n let frame_data = match FrameData::parse_from_bytes(raw) {\n\n Ok(data) => data,\n\n Err(_) => {\n\n log::warn!(\"Tunnel={:?}: Cannot parse frame data\", tunnel_id);\n\n return Err(ProtocolError::ProtobufError);\n\n }\n\n };\n\n let raw_content = frame_data.data;\n\n let (data, _padding) = raw_content.split_at((frame_data.data_size) as usize);\n\n match FrameDataType::parse_from_bytes(data) {\n\n Ok(data) => Ok(data),\n\n Err(_) => {\n\n log::warn!(\"Tunnel={:?}: Cannot parse frame data type\", tunnel_id);\n\n Err(ProtocolError::ProtobufError)\n\n }\n\n }\n\n}\n\n\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 3, "score": 200718.17817972676 }, { "content": "type SequenceNumber = u32;\n\n\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 4, "score": 177789.3020756193 }, { "content": "pub fn decrypt_no_pad(\n\n t: Cipher,\n\n key: &[u8],\n\n iv: Option<&[u8]>,\n\n data: &[u8],\n\n) -> Result<Vec<u8>, ErrorStack> {\n\n let mut c = Crypter::new(t, Mode::Decrypt, key, iv)?;\n\n c.pad(false);\n\n let mut out = vec![0; data.len() + t.block_size()];\n\n let count = c.update(data, &mut out)?;\n\n let rest = c.finalize(&mut out[count..])?;\n\n out.truncate(count + rest);\n\n Ok(out)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::sync::Arc;\n\n\n\n use openssl::rsa::Rsa;\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/crypto/mod.rs", "rank": 5, "score": 143518.62327918 }, { "content": "type IV = Bytes;\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 6, "score": 134724.5370555451 }, { "content": "#[derive(Debug, Clone)]\n\nstruct SequenceNumberContext {\n\n outgoing: SequenceNumber,\n\n newest_received: SequenceNumber,\n\n used_seq_nrs: HashSet<SequenceNumber>,\n\n}\n\n\n\nimpl SequenceNumberContext {\n\n fn new() -> Self {\n\n Self {\n\n outgoing: 0,\n\n newest_received: 0,\n\n used_seq_nrs: HashSet::new(),\n\n }\n\n }\n\n\n\n fn get_next_seq_nr(&mut self) -> SequenceNumber {\n\n self.outgoing += 1;\n\n self.outgoing\n\n }\n\n\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 7, "score": 134643.10845254472 }, { "content": "type IV = Bytes;\n\n\n\npub(super) struct InitiatorStateMachine {\n\n tunnel_result_tx: Option<oneshot::Sender<TunnelResult>>, // signal the listener completion\n\n event_tx: Sender<FsmEvent>, // only for cloning purpose to pass the sender to the handshake fsm\n\n endpoint_codec: Arc<Mutex<Box<dyn P2pCodec + Send>>>,\n\n listener_tx: Sender<IncomingEventMessage>,\n\n tunnel_id: TunnelId,\n\n hops: Vec<Peer>,\n\n fsm_lock: Arc<(Mutex<FsmLockState>, Notify)>,\n\n local_crypto_config: Arc<HandshakeCryptoConfig>,\n\n handshake_timeout: Duration,\n\n frame_id_manager: Arc<RwLock<FrameIdManager>>,\n\n tunnel_update_ref: Option<FrameId>,\n\n cover_only: bool,\n\n}\n\n\n\nimpl InitiatorStateMachine {\n\n #[allow(clippy::too_many_arguments)]\n\n pub fn new(\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/fsm/mod.rs", "rank": 8, "score": 130112.45192086895 }, { "content": "type FrameId = u32;\n\npub type ConnectionId = u64;\n", "file_path": "onion_lib/src/p2p_protocol/mod.rs", "rank": 9, "score": 125848.77837953769 }, { "content": "pub fn run_peer<P: AsRef<Path> + Debug>(config_file: P) {\n\n let runtime = tokio::runtime::Runtime::new().unwrap();\n\n runtime.block_on(run_peer_async(config_file))\n\n}\n\n\n\nasync fn run_peer_async<P: AsRef<Path> + Debug>(config_file: P) {\n\n // parse config file\n\n log::debug!(\"Parse config file from {:?}\", config_file);\n\n let config = match OnionConfiguration::parse_from_file(config_file) {\n\n Ok(config) => {\n\n log::info!(\"Peer configuration: {:?}\", config);\n\n config\n\n }\n\n Err(e) => {\n\n log::error!(\"Cannot parse config file: {}\", e);\n\n return;\n\n }\n\n };\n\n\n\n let api_interface = Arc::new(api_protocol::ApiInterface::new());\n", "file_path": "onion_lib/src/lib.rs", "rank": 10, "score": 114323.07257943382 }, { "content": "enum IncomingEventMessage {\n\n TunnelCompletion,\n\n CoverTunnelCompletion,\n\n TunnelUpdate(FrameId),\n\n Data(Vec<u8>),\n\n Downgraded,\n\n}\n\n\n\npub(crate) type Peer = (SocketAddr, Vec<u8>);\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/mod.rs", "rank": 11, "score": 112642.95610665427 }, { "content": "#[test]\n\nfn integration_test() {\n\n // enable logging\n\n env::set_var(\"RUST_LOG\", \"info\");\n\n #[cfg(not(coverage))]\n\n env_logger::init();\n\n #[cfg(coverage)]\n\n env_logger::Builder::new()\n\n .target(env_logger::Target::Stdout)\n\n .parse_filters(\"trace\")\n\n .try_init()\n\n .unwrap();\n\n\n\n log::info!(\"Starting integration test\");\n\n\n\n // run alice and bob and hop1\n\n let dir = TempDir::new().unwrap();\n\n\n\n let config_file_alice = dir.path().join(\"alice.config\");\n\n let config_file_bob = dir.path().join(\"bob.config\");\n\n let config_file_hop1 = dir.path().join(\"hop1.config\");\n", "file_path": "onion_tests/tests/integration.rs", "rank": 12, "score": 104737.35261074532 }, { "content": "#[async_trait]\n\npub trait P2pCodec {\n\n /*\n\n * Send data to the previous peer (for endpoints, previous == next)\n\n * Used for sending handshake data to or back from hops or sending application data from endpoints\n\n */\n\n async fn write(&mut self, data: DataType) -> Result<(), ProtocolError>;\n\n\n\n /*\n\n * Process incoming encrypted data.\n\n *\n\n * If self is an intermediate hop, the data are processed and transferred to the next hop after\n\n * checked for magic close number.\n\n * If self is an endpoint, the data are returned as IncomingData if no endpoint\n\n */\n\n async fn process_data(\n\n &mut self,\n\n d: Direction,\n\n data: Bytes,\n\n iv: IV,\n\n ) -> Result<ProcessedData, ProtocolError>;\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 13, "score": 104080.39471973492 }, { "content": "pub trait PeerType {\n\n const INIT_STATE: HandshakeState;\n\n fn tunnel_update_reference(&self) -> FrameId;\n\n}\n\n\n\nimpl PeerType for Client {\n\n const INIT_STATE: HandshakeState = HandshakeState::Start;\n\n\n\n fn tunnel_update_reference(&self) -> FrameId {\n\n self.tunnel_update_ref\n\n }\n\n}\n\n\n\nimpl Client {\n\n pub fn new(tunnel_update_ref: FrameId) -> Client {\n\n Client { tunnel_update_ref }\n\n }\n\n}\n\n\n\nimpl PeerType for Server {\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/fsm/handshake_fsm.rs", "rank": 14, "score": 102292.1965588801 }, { "content": "fn encrypt_no_pad(\n\n t: Cipher,\n\n key: &[u8],\n\n iv: Option<&[u8]>,\n\n data: &[u8],\n\n) -> Result<Vec<u8>, ErrorStack> {\n\n let mut c = Crypter::new(t, Mode::Encrypt, key, iv)?;\n\n c.pad(false);\n\n let mut out = vec![0; data.len() + t.block_size()];\n\n let count = c.update(data, &mut out)?;\n\n let rest = c.finalize(&mut out[count..])?;\n\n out.truncate(count + rest);\n\n Ok(out)\n\n}\n\n\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/crypto/mod.rs", "rank": 15, "score": 101734.78455373678 }, { "content": "type IsCoverOnly = bool;\n\n\n\n#[derive(Debug)]\n\npub enum FsmEvent {\n\n Init, // Start the FSM\n\n Close, // Close the Tunnel (connected -> downgraded)\n\n Shutdown, // Shutdown\n\n Send(Vec<u8>), // Send Data via the Tunnel\n\n Cover(Vec<u8>), // Send cover traffic via Tunnel\n\n IncomingFrame((Bytes, Direction, IV)), // Received data frame\n\n HandshakeResult(Result<(IsTargetEndpoint, IsCoverOnly, Option<FrameId>), ProtocolError>), // HandshakeResult from handshake fsm\n\n}\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/fsm/mod.rs", "rank": 16, "score": 100900.6850121053 }, { "content": "#[allow(clippy::too_many_arguments)]\n\nfn run_peer(\n\n p2p_port: &str,\n\n onion_api_addr: &str,\n\n rps_api_addr: &str,\n\n config_file: PathBuf,\n\n key_file: &Path,\n\n priv_key_file: &Path,\n\n cert_file: &Path,\n\n pki_cert_file: &Path,\n\n round_time: Duration,\n\n) {\n\n // write to config file\n\n let mut config = Ini::new();\n\n config\n\n .with_general_section()\n\n .set(\"hostkey\", key_file.to_str().unwrap());\n\n config\n\n .with_section(Some(\"onion\"))\n\n .set(\"p2p_port\", p2p_port)\n\n .set(\"p2p_hostname\", \"127.0.0.1\")\n", "file_path": "onion_tests/tests/integration.rs", "rank": 17, "score": 99953.31543323351 }, { "content": "type IsTargetEndpoint = bool;\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/fsm/mod.rs", "rank": 18, "score": 98444.17046813425 }, { "content": "fn run_rps_api(\n\n addr: SocketAddr,\n\n port_hop1: u16,\n\n port_hop2: u16,\n\n key_hop1: Vec<u8>,\n\n key_hop2: Vec<u8>,\n\n) -> anyhow::Result<()> {\n\n let listener = TcpListener::bind(addr)?;\n\n\n\n let ip = IpAddr::from_str(\"127.0.0.1\").unwrap();\n\n let port = 1234;\n\n let mut port_map_hop1 = HashMap::new();\n\n port_map_hop1.insert(ONION_PORT, port_hop1);\n\n let mut port_map_hop2 = HashMap::new();\n\n port_map_hop2.insert(ONION_PORT, port_hop2);\n\n\n\n let hop1 = RpsPeer::new(ip, port, port_map_hop1, key_hop1);\n\n let hop2 = RpsPeer::new(ip, port, port_map_hop2, key_hop2);\n\n\n\n let hops = vec![hop1, hop2];\n", "file_path": "onion_tests/tests/integration.rs", "rank": 19, "score": 97615.7193804886 }, { "content": "fn main() {\n\n // println!(\"cargo:rerun-if-changed=src/p2p_protocol/messages/p2p_messages.proto\");\n\n std::fs::create_dir_all(GEN_OUT_DIR)\n\n .unwrap_or_else(|_| panic!(\"could not create or find directory {}\", GEN_OUT_DIR));\n\n\n\n protoc_rust::Codegen::new()\n\n .out_dir(GEN_OUT_DIR)\n\n .input(Path::new(PROTO_FILE_PATHS))\n\n .customize(Customize {\n\n carllerche_bytes_for_bytes: Some(true),\n\n carllerche_bytes_for_string: Some(false),\n\n ..Default::default()\n\n })\n\n .run()\n\n .expect(\"protoc\");\n\n}\n", "file_path": "onion_lib/build.rs", "rank": 20, "score": 95910.3950489719 }, { "content": "type Listeners = Arc<Mutex<HashSet<ConnectionId>>>;\n\n\n\n#[derive(PartialEq)]\n\npub(crate) struct UpdateInformation {\n\n pub listener: ConnectionId,\n\n pub target: Peer,\n\n}\n\n\n\nimpl UpdateInformation {\n\n pub fn new(listener: ConnectionId, target: Peer) -> UpdateInformation {\n\n UpdateInformation { listener, target }\n\n }\n\n}\n\n\n\n#[derive(PartialEq)]\n\npub(crate) enum TunnelType {\n\n Initiator(UpdateInformation),\n\n NonInitiator,\n\n}\n\n\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/mod.rs", "rank": 21, "score": 87697.89270880692 }, { "content": "type AvailableForNextRound = bool;\n\n\n\npub(crate) struct TunnelManager {\n\n tunnel_registry: HashMap<TunnelId, (OnionTunnel, AvailableForNextRound)>,\n\n links: HashMap<TunnelId, TunnelId>, // old_tunnel_ids to new_tunnel_ids\n\n reverse_links: HashMap<TunnelId, TunnelId>, // new_tunnel_ids to old_tunnel_ids\n\n}\n\n\n\nimpl TunnelManager {\n\n pub(crate) fn new() -> TunnelManager {\n\n TunnelManager {\n\n tunnel_registry: HashMap::new(),\n\n links: HashMap::new(),\n\n reverse_links: HashMap::new(),\n\n }\n\n }\n\n\n\n pub(crate) fn get_id() -> TunnelId {\n\n ID_COUNTER.fetch_add(1, Ordering::Relaxed)\n\n }\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/tunnel_manager.rs", "rank": 22, "score": 83617.9383756009 }, { "content": "fn connect(addr: &'static str) -> Option<TcpStream> {\n\n for _ in 1..6 {\n\n sleep(Duration::from_millis(500));\n\n if let Ok(s) = TcpStream::connect(addr) {\n\n return Some(s);\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "onion_tests/tests/integration.rs", "rank": 23, "score": 82757.20673497317 }, { "content": "struct RoundSynchronizer {\n\n update_notify: Arc<(Mutex<NotifyState>, Notify, Notify)>,\n\n new_notify: Arc<Mutex<VecDeque<Registration>>>,\n\n cover_notify: Arc<(Mutex<NotifyState>, Notify, Notify)>,\n\n round_time: Duration,\n\n registration_window: Duration,\n\n tunnel_registration_counter: Arc<Mutex<u8>>,\n\n round_cover_tunnel: Arc<Mutex<Option<TunnelId>>>,\n\n local_addr: String,\n\n blocklist: Arc<RwLock<Blocklist>>,\n\n}\n\n\n\nimpl RoundSynchronizer {\n\n fn new(\n\n round_time: Duration,\n\n build_window: Duration,\n\n host: &str,\n\n port: u16,\n\n blocklist: Arc<RwLock<Blocklist>>,\n\n ) -> RoundSynchronizer {\n", "file_path": "onion_lib/src/p2p_protocol/mod.rs", "rank": 24, "score": 81951.94996588302 }, { "content": "#[derive(Debug)]\n\nstruct UdpChannel {\n\n raw_incoming: std::sync::mpsc::Sender<Vec<u8>>,\n\n ssl_stream: Option<SslStream<UdpSocketWrapper>>, // This is None during connection establishment\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct DtlsSocketLayer {\n\n socket: Arc<UdpSocket>,\n\n dtls_config: Arc<DtlsConfig>,\n\n blocklist: Arc<RwLock<Blocklist>>,\n\n connection_sockets: Arc<Mutex<HashMap<SocketAddr, (Instant, UdpChannel)>>>,\n\n incoming_forwarding_worker: JoinHandle<()>,\n\n outgoing_forwarding_worker: JoinHandle<()>,\n\n outgoing_channel: Sender<(Instant, SocketAddr, Vec<u8>)>,\n\n loopback_channel: (Sender<Vec<u8>>, Mutex<Receiver<Vec<u8>>>),\n\n}\n\n\n\nimpl DtlsSocketLayer {\n\n async fn accept_channel(\n\n connection_sockets_mutex: Arc<Mutex<HashMap<SocketAddr, (Instant, UdpChannel)>>>,\n", "file_path": "onion_lib/src/p2p_protocol/dtls_connections/mod.rs", "rank": 25, "score": 78050.97138812541 }, { "content": "type ConnectionId = u64;\n\n\n\nasync fn handle_incoming_event(\n\n e: IncomingEvent,\n\n p2p_interface: Weak<P2pInterface>,\n\n connection_id: ConnectionId,\n\n) -> Option<OutgoingEvent> {\n\n // first we have to upgrade the p2p interface reference to communicate with p2p layer\n\n match p2p_interface.upgrade() {\n\n Some(interface) => match e {\n\n IncomingEvent::TunnelBuild(onion_build) => {\n\n log::debug!(\n\n \"TunnelBuild request from connection {:?}: {:?}\",\n\n connection_id,\n\n onion_build\n\n );\n\n match interface\n\n .build_tunnel(\n\n (onion_build.ip, onion_build.onion_port).into(),\n\n onion_build.host_key,\n", "file_path": "onion_lib/src/api_protocol/mod.rs", "rank": 26, "score": 76552.75792865083 }, { "content": "#[derive(Debug)]\n\nstruct UdpSocketWrapper {\n\n socket: Arc<UdpSocket>,\n\n remote_addr: SocketAddr,\n\n incoming_message: std::sync::mpsc::Receiver<Vec<u8>>, // channel for incoming UDP frames\n\n}\n\n\n\nimpl Read for UdpSocketWrapper {\n\n fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {\n\n match self.incoming_message.try_recv() {\n\n Err(std::sync::mpsc::TryRecvError::Disconnected) => {\n\n /*log::debug!(\n\n \"UdpSocketWrapper from {} to {}: incoming_message channel is disconnected\",\n\n self.socket.local_addr().unwrap(),\n\n self.remote_addr\n\n );*/\n\n Err(ErrorKind::ConnectionReset.into())\n\n }\n\n Err(std::sync::mpsc::TryRecvError::Empty) => Err(ErrorKind::WouldBlock.into()),\n\n Ok(data) => {\n\n log::trace!(\n", "file_path": "onion_lib/src/p2p_protocol/dtls_connections/mod.rs", "rank": 27, "score": 76277.42974805232 }, { "content": "fn get_id() -> ConnectionId {\n\n ID_COUNTER.fetch_add(1, Ordering::Relaxed)\n\n}\n\n\n\npub struct Connection {\n\n pub(super) internal_id: ConnectionId,\n\n write_tx: Sender<OutgoingEvent>,\n\n}\n\n\n\nasync fn read_event<T>(rx: &mut ReadHalf<T>) -> anyhow::Result<IncomingEvent>\n\nwhere\n\n T: AsyncRead,\n\n{\n\n // read message header\n\n let mut buf = [0u8; OnionMessageHeader::hdr_size()];\n\n rx.read_exact(&mut buf).await?;\n\n\n\n // parse buf the onion_msg_hdr\n\n let hdr = OnionMessageHeader::try_from(&buf)?;\n\n\n", "file_path": "onion_lib/src/api_protocol/api_connection/mod.rs", "rank": 28, "score": 70120.81514357188 }, { "content": "type Registration = Arc<(Mutex<NotifyState>, Notify)>;\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub enum Direction {\n\n Forward,\n\n Backward,\n\n}\n\n\n", "file_path": "onion_lib/src/p2p_protocol/mod.rs", "rank": 29, "score": 67058.45804000802 }, { "content": " async fn process_data(\n\n &mut self,\n\n d: Direction,\n\n data: Bytes,\n\n iv: IV,\n\n ) -> Result<ProcessedData, ProtocolError> {\n\n log::trace!(\n\n \"Tunnel={:?}: Process incoming data at intermediate hop\",\n\n self.tunnel_id\n\n );\n\n if data.len() != FRAME_DATA_SIZE {\n\n // disconnect immediately due to size error\n\n log::warn!(\n\n \"Tunnel={:?}: Received packet with invalid payload size. Disconnect\",\n\n self.tunnel_id\n\n );\n\n return Err(ProtocolError::InvalidPacketLength);\n\n }\n\n\n\n // expect incoming data backward or forward, apply encryption or decryption and delegate to next hop\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 30, "score": 63451.16883756012 }, { "content": " CryptoContext, HandshakeCryptoConfig, HandshakeCryptoContext,\n\n };\n\n use crate::p2p_protocol::onion_tunnel::fsm::ProtocolError;\n\n use crate::p2p_protocol::onion_tunnel::message_codec::{\n\n deserialize, serialize, SequenceNumberContext, COVER_CHUNK_SIZE, FRAME_DATA_SIZE,\n\n FRAME_SIZE, PAYLOAD_CHUNK_SIZE,\n\n };\n\n use openssl::rsa::Rsa;\n\n use protobuf::Message;\n\n use std::sync::Arc;\n\n\n\n #[test]\n\n fn unit_frame_size() {\n\n let mut data_vec = vec![];\n\n let iv = vec![0; 16];\n\n let cover_chunk = vec![0; COVER_CHUNK_SIZE];\n\n let payload_chunk = vec![0; PAYLOAD_CHUNK_SIZE];\n\n let mut frame = TunnelFrame::new();\n\n frame.set_iv(iv.into());\n\n frame.set_frame_id(1);\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 31, "score": 63451.11115486351 }, { "content": " FrameDataType_oneof_message::handshake_data(data) => assert!(data.has_routing()),\n\n _ => panic!(\"Expected RoutingInformation\"),\n\n };\n\n data_vec.push(data);\n\n\n\n for raw_data in data_vec.into_iter() {\n\n assert_eq!(raw_data.len(), FRAME_DATA_SIZE);\n\n frame.set_data(raw_data.into());\n\n let data = frame.write_to_bytes().unwrap();\n\n assert_eq!(data.len(), FRAME_SIZE);\n\n }\n\n }\n\n\n\n #[test]\n\n fn unit_test_sequence_number() {\n\n let mut context = SequenceNumberContext::new();\n\n\n\n // outgoing\n\n assert_eq!(context.get_next_seq_nr(), 1);\n\n assert_eq!(context.get_next_seq_nr(), 2);\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 32, "score": 63450.34464270967 }, { "content": " match deserialize(&data, 1).unwrap().message.unwrap() {\n\n FrameDataType_oneof_message::handshake_data(data) => assert!(data.has_server_hello()),\n\n _ => panic!(\"Expected ServerHello\"),\n\n };\n\n data_vec.push(data);\n\n\n\n // routing, test maximum size\n\n let mut routing = RoutingInformation::new();\n\n routing.set_cover_only(true);\n\n routing.set_tunnel_update_reference(0xffffffff);\n\n routing.set_is_endpoint(true);\n\n routing.set_next_hop_addr(vec![0; 16].into());\n\n routing.set_next_hop_port(0xffff_u32);\n\n let response = cc1.sign(cc2.get_challenge());\n\n routing.set_challenge_response(response.into());\n\n handshake.set_routing(routing);\n\n let mut data = FrameDataType::new();\n\n data.set_handshake_data(handshake);\n\n let data = serialize(data);\n\n match deserialize(&data, 1).unwrap().message.unwrap() {\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 33, "score": 63448.74256987712 }, { "content": " }\n\n Ok(())\n\n }\n\n\n\n async fn process_data(\n\n &mut self,\n\n _d: Direction,\n\n data: Bytes,\n\n iv: IV,\n\n ) -> Result<ProcessedData, ProtocolError> {\n\n // expect incoming data, handshake or encrypted handshake, process and return\n\n log::trace!(\n\n \"Tunnel={:?}: Process incoming data at target hop\",\n\n self.tunnel_id\n\n );\n\n\n\n // if the crypto_context has already been set, we expect encrypted data\n\n let mut data = data.to_vec();\n\n if let Some(cc) = &mut self.crypto_context {\n\n // decrypt using keys and iv\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 34, "score": 63448.566535572194 }, { "content": " .as_mut()\n\n .unwrap()\n\n .encrypt(None, &raw_data, true)?;\n\n\n\n log::debug!(\n\n \"Tunnel={:?}: Send encrypted close to prev hop {:?}\",\n\n self.tunnel_id,\n\n self.prev_hop\n\n );\n\n\n\n assert_eq!(raw_data.len(), FRAME_DATA_SIZE);\n\n vec![(iv, raw_data)]\n\n }\n\n DataType::AppData(data) => {\n\n // fragmentation\n\n let mut chunks = vec![];\n\n for data_chunk in data.chunks(PAYLOAD_CHUNK_SIZE) {\n\n let mut app_data = ApplicationData::new();\n\n app_data.set_sequence_number(self.seq_nr_context.get_next_seq_nr());\n\n app_data.set_data(Bytes::copy_from_slice(data_chunk));\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 35, "score": 63448.278436022905 }, { "content": "\n\npub enum ProcessedData {\n\n TransferredToNextHop,\n\n HandshakeData(HandshakeData),\n\n IncomingData(Vec<u8>),\n\n IncomingCover(Vec<u8>, bool),\n\n ReceivedClose,\n\n}\n\n\n\npub enum DataType {\n\n ForwardFrameId(FrameId),\n\n Close,\n\n AppData(Vec<u8>),\n\n Cover(Vec<u8>, bool),\n\n ClientHello(ClientHello),\n\n ServerHello(ServerHello),\n\n RoutingInformation(RoutingInformation),\n\n}\n\n\n\nconst FRAME_SIZE: usize = 1024;\n\nconst FRAME_DATA_SIZE: usize = 998;\n\nconst SERIALIZED_FRAME_DATA_SIZE: usize = 982;\n\nconst FRAME_DATA_CONTENT_SIZE: usize = 974;\n\nconst PAYLOAD_CHUNK_SIZE: usize = 963;\n\nconst COVER_CHUNK_SIZE: usize = 958;\n\n\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 36, "score": 63448.2690141975 }, { "content": " log::warn!(\"Close action not supported for intermediate hop codec\");\n\n }\n\n\n\n fn as_any(&mut self) -> &mut dyn Any {\n\n self\n\n }\n\n\n\n async fn process_forward_frame_id(&mut self, _id: FrameId) -> Result<(), ProtocolError> {\n\n log::warn!(\"Setting forward frame_id not supported for this codec\");\n\n Err(ProtocolError::UnsupportedAction)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::p2p_protocol::messages::p2p_messages::{\n\n ApplicationData, ClientHello, Close, CoverTraffic, EncryptedServerHelloData, FrameDataType,\n\n FrameDataType_oneof_message, HandshakeData, RoutingInformation, ServerHello, TunnelFrame,\n\n };\n\n use crate::p2p_protocol::onion_tunnel::crypto::{\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 37, "score": 63447.64300456074 }, { "content": " assert_eq!(raw_data.len(), FRAME_DATA_SIZE);\n\n vec![(iv, raw_data)]\n\n }\n\n DataType::AppData(data) => {\n\n // fragmentation\n\n let mut chunks = vec![];\n\n for data_chunk in data.chunks(PAYLOAD_CHUNK_SIZE) {\n\n let mut app_data = ApplicationData::new();\n\n app_data.set_sequence_number(self.seq_nr_context.get_next_seq_nr());\n\n app_data.set_data(Bytes::copy_from_slice(data_chunk));\n\n let mut data = FrameDataType::new();\n\n data.set_app_data(app_data);\n\n let mut raw_data = serialize(data);\n\n\n\n // Unencrypted data transfer is not allowed\n\n assert!(!self.crypto_contexts.is_empty());\n\n\n\n // layered encryption via iv and keys using the crypto contexts\n\n let mut iv: Option<Vec<u8>> = None;\n\n for (i, cc) in self.crypto_contexts.iter_mut().rev().enumerate() {\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 38, "score": 63447.35991056461 }, { "content": "use std::{net::SocketAddr, sync::Arc};\n\n\n\nuse crate::p2p_protocol::dtls_connections::DtlsSocketLayer;\n\nuse crate::p2p_protocol::messages::p2p_messages::{\n\n ApplicationData, ApplicationData_oneof_message, ClientHello, Close, CoverTraffic, FrameData,\n\n FrameDataType, FrameDataType_oneof_message, HandshakeData, RoutingInformation, ServerHello,\n\n TunnelFrame,\n\n};\n\nuse crate::p2p_protocol::onion_tunnel::crypto::{CryptoContext, AUTH_PLACEHOLDER, IV_SIZE};\n\nuse crate::p2p_protocol::onion_tunnel::fsm::ProtocolError;\n\nuse crate::p2p_protocol::{Direction, FrameId, TunnelId, CLIENT_HELLO_FORWARD_ID};\n\nuse async_trait::async_trait;\n\nuse bytes::Bytes;\n\nuse std::any::Any;\n\nuse std::collections::HashSet;\n\nuse tokio::sync::RwLock;\n\n\n\nuse super::crypto::AUTH_SIZE;\n\nuse crate::p2p_protocol::onion_tunnel::frame_id_manager::FrameIdManager;\n\nuse protobuf::Message;\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 39, "score": 63447.09985958691 }, { "content": " data.set_handshake_data(handshake);\n\n let mut data = serialize(data);\n\n\n\n // Unencrypted routing transfer is not allowed\n\n assert!(!self.crypto_contexts.is_empty());\n\n\n\n // encrypt via iv and keys using the crypto contexts\n\n let mut iv: Option<Vec<u8>> = None;\n\n for (i, cc) in self.crypto_contexts.iter_mut().rev().enumerate() {\n\n let (iv_, data_) = cc.encrypt(iv.as_deref(), &data, i == 0)?;\n\n iv = Some(iv_);\n\n data = data_;\n\n }\n\n assert_eq!(data.len(), FRAME_DATA_SIZE);\n\n vec![(iv, data)]\n\n }\n\n _ => {\n\n log::warn!(\n\n \"Tunnel={:?}: Invalid write action in initiator codec\",\n\n self.tunnel_id\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 40, "score": 63446.76012716352 }, { "content": " _ => panic!(\"Expected ApplicationData\"),\n\n };\n\n data_vec.push(data);\n\n\n\n // data\n\n app_data.set_data(payload_chunk.into());\n\n let mut data = FrameDataType::new();\n\n data.set_app_data(app_data);\n\n let data = serialize(data);\n\n match deserialize(&data, 1).unwrap().message.unwrap() {\n\n FrameDataType_oneof_message::app_data(data) => assert!(data.has_data()),\n\n _ => panic!(\"Expected ApplicationData\"),\n\n };\n\n data_vec.push(data);\n\n\n\n // test handshake data\n\n let key = Rsa::generate(4096).unwrap();\n\n let pub_key = Rsa::public_key_from_pem(key.public_key_to_pem().unwrap().as_ref()).unwrap();\n\n let priv_key =\n\n Rsa::private_key_from_pem(key.private_key_to_pem().unwrap().as_ref()).unwrap();\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 41, "score": 63446.33527458956 }, { "content": " self.tunnel_id\n\n );\n\n Ok(ProcessedData::HandshakeData(data))\n\n }\n\n FrameDataType_oneof_message::app_data(message) => {\n\n log::trace!(\n\n \"Tunnel={:?}: Initiator receives application data with sequence_number={:?}\",\n\n self.tunnel_id,\n\n message.sequence_number\n\n );\n\n if let Err(e) = self\n\n .seq_nr_context\n\n .verify_incoming_seq_nr(message.sequence_number)\n\n {\n\n return Err(e);\n\n }\n\n return match message.message {\n\n None => Err(ProtocolError::EmptyMessage),\n\n Some(data) => match data {\n\n ApplicationData_oneof_message::data(data) => {\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 42, "score": 63445.74198684548 }, { "content": " for data_chunk in data.chunks(COVER_CHUNK_SIZE) {\n\n let mut app_data = ApplicationData::new();\n\n let mut cover_packet = CoverTraffic::new();\n\n cover_packet.set_data(Bytes::copy_from_slice(data_chunk));\n\n cover_packet.set_mirrored(false);\n\n app_data.set_sequence_number(self.seq_nr_context.get_next_seq_nr());\n\n app_data.set_cover_traffic(cover_packet);\n\n let mut data = FrameDataType::new();\n\n data.set_app_data(app_data);\n\n let mut raw_data = serialize(data);\n\n\n\n // Unencrypted data transfer is not allowed\n\n assert!(!self.crypto_contexts.is_empty());\n\n\n\n // layered encryption via iv and keys using the crypto contexts\n\n let mut iv: Option<Vec<u8>> = None;\n\n for (i, cc) in self.crypto_contexts.iter_mut().rev().enumerate() {\n\n let (iv_, data_) = cc.encrypt(iv.as_deref(), &raw_data, i == 0)?;\n\n iv = Some(iv_);\n\n raw_data = data_;\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 43, "score": 63445.516878676164 }, { "content": " let mut iv: Option<Vec<u8>> = None;\n\n for (i, cc) in self.crypto_contexts.iter_mut().rev().enumerate() {\n\n let (iv_, data_) = cc.encrypt(iv.as_deref(), &data, i == 0)?;\n\n iv = Some(iv_);\n\n data = data_;\n\n }\n\n assert_eq!(data.len(), FRAME_DATA_SIZE);\n\n vec![(iv, data)]\n\n }\n\n DataType::RoutingInformation(data) => {\n\n log::debug!(\n\n \"Tunnel={:?}: Send RoutingInformation={:?} to next hop {:?}\",\n\n self.tunnel_id,\n\n data,\n\n self.next_hop\n\n );\n\n\n\n let mut handshake = HandshakeData::new();\n\n handshake.set_routing(data);\n\n let mut data = FrameDataType::new();\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 44, "score": 63445.23046408657 }, { "content": " frame.set_frame_id(self.forward_frame_id);\n\n\n\n let iv_data_chunks = match data {\n\n DataType::ForwardFrameId(id) => {\n\n let mut data = FrameDataType::new();\n\n data.set_forward_frame_id(id);\n\n let mut data = serialize(data);\n\n\n\n // Unencrypted frameID transfer is not allowed\n\n assert!(!self.crypto_contexts.is_empty());\n\n\n\n // layered encryption via iv and keys using the crypto contexts\n\n let mut iv: Option<Vec<u8>> = None;\n\n for (i, cc) in self.crypto_contexts.iter_mut().rev().enumerate() {\n\n let (iv_, data_) = cc.encrypt(iv.as_deref(), &data, i == 0)?;\n\n iv = Some(iv_);\n\n data = data_;\n\n }\n\n assert_eq!(data.len(), FRAME_DATA_SIZE);\n\n vec![(iv, data)]\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 45, "score": 63445.108350307724 }, { "content": " self.prev_hop\n\n );\n\n chunks\n\n }\n\n DataType::Cover(data, _) => {\n\n // mirror received cover data frame\n\n let mut chunks = vec![];\n\n for data_chunk in data.chunks(COVER_CHUNK_SIZE) {\n\n let mut app_data = ApplicationData::new();\n\n let mut cover_packet = CoverTraffic::new();\n\n cover_packet.set_data(Bytes::copy_from_slice(data_chunk));\n\n cover_packet.set_mirrored(true);\n\n app_data.set_sequence_number(self.seq_nr_context.get_next_seq_nr());\n\n app_data.set_cover_traffic(cover_packet);\n\n let mut data = FrameDataType::new();\n\n data.set_app_data(app_data);\n\n let raw_data = serialize(data);\n\n\n\n // Unencrypted cover transfer is not allowed\n\n assert!(self.crypto_context.is_some());\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 46, "score": 63444.832443278865 }, { "content": "\n\n // test app data\n\n let mut app_data = ApplicationData::new();\n\n app_data.set_sequence_number(1);\n\n\n\n // close\n\n app_data.set_close(Close::new());\n\n let mut data = FrameDataType::new();\n\n data.set_app_data(app_data.clone());\n\n let data = serialize(data);\n\n match deserialize(&data, 1).unwrap().message.unwrap() {\n\n FrameDataType_oneof_message::app_data(data) => assert!(data.has_close()),\n\n _ => panic!(\"Expected ApplicationData\"),\n\n };\n\n data_vec.push(data);\n\n\n\n // cover not mirrored\n\n let mut cover = CoverTraffic::new();\n\n cover.set_data(cover_chunk.clone().into());\n\n cover.set_mirrored(true);\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 47, "score": 63444.77266021144 }, { "content": " );\n\n if let Err(e) = self\n\n .seq_nr_context\n\n .verify_incoming_seq_nr(message.sequence_number)\n\n {\n\n return Err(e);\n\n }\n\n return match message.message {\n\n None => Err(ProtocolError::EmptyMessage),\n\n Some(data) => match data {\n\n ApplicationData_oneof_message::data(data) => {\n\n Ok(ProcessedData::IncomingData(data.to_vec()))\n\n }\n\n ApplicationData_oneof_message::cover_traffic(cover) => Ok(\n\n ProcessedData::IncomingCover(cover.data.to_vec(), cover.mirrored),\n\n ),\n\n ApplicationData_oneof_message::close(_) => {\n\n log::trace!(\"Tunnel={:?}: Target receives close\", self.tunnel_id);\n\n Ok(ProcessedData::ReceivedClose)\n\n }\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 48, "score": 63444.62421908206 }, { "content": " \"Cannot write frame via initiator codec: {:?}\",\n\n e\n\n )));\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n async fn process_data(\n\n &mut self,\n\n _d: Direction,\n\n data: Bytes,\n\n iv: IV,\n\n ) -> Result<ProcessedData, ProtocolError> {\n\n // expected incoming data or incoming handshake, process and return\n\n let mut iv = iv.to_vec();\n\n\n\n log::trace!(\n\n \"Tunnel={:?}: Process incoming data at initiator hop\",\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 49, "score": 63444.550664801085 }, { "content": " log::trace!(\"Tunnel={:?}: Decrypt incoming data\", self.tunnel_id);\n\n let (_iv, data_) = cc.decrypt(&iv, &data, true)?;\n\n data = data_;\n\n }\n\n\n\n let msg = deserialize(data.as_ref(), self.tunnel_id)?;\n\n if let Some(msg) = msg.message {\n\n match msg {\n\n FrameDataType_oneof_message::handshake_data(data) => {\n\n log::trace!(\n\n \"Tunnel={:?}: Target receives handshake data\",\n\n self.tunnel_id\n\n );\n\n Ok(ProcessedData::HandshakeData(data))\n\n }\n\n FrameDataType_oneof_message::app_data(message) => {\n\n log::trace!(\n\n \"Tunnel={:?}: Target receives application data with sequence_number={:?}\",\n\n self.tunnel_id,\n\n message.sequence_number\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 50, "score": 63444.32038199301 }, { "content": " \"Tunnel={:?}: Send ServerHello=({:?}) to/via prev hop {:?}\",\n\n self.tunnel_id,\n\n server_hello,\n\n self.prev_hop\n\n );\n\n\n\n // prepare frame\n\n let mut iv = vec![0; IV_SIZE];\n\n openssl::rand::rand_bytes(&mut iv).expect(\"Failed to generated random IV\");\n\n let mut handshake = HandshakeData::new();\n\n handshake.set_server_hello(server_hello);\n\n let mut data = FrameDataType::new();\n\n data.set_handshake_data(handshake);\n\n let raw_data = serialize(data);\n\n assert_eq!(raw_data.len(), FRAME_DATA_SIZE);\n\n vec![(iv, raw_data)]\n\n }\n\n _ => {\n\n log::warn!(\n\n \"Tunnel={:?}: Invalid write action in target codec\",\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 51, "score": 63444.19926977907 }, { "content": " },\n\n };\n\n }\n\n _ => Err(ProtocolError::UnexpectedMessageType),\n\n }\n\n } else {\n\n // empty\n\n Err(ProtocolError::UnexpectedMessageType)\n\n }\n\n }\n\n\n\n async fn close(&mut self) {\n\n log::debug!(\n\n \"Tunnel={:?}: Send close at target endpoint to initiator\",\n\n self.tunnel_id\n\n );\n\n let _ = self.write(DataType::Close).await;\n\n }\n\n\n\n fn as_any(&mut self) -> &mut dyn Any {\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 52, "score": 63443.69883239397 }, { "content": " self.tunnel_id\n\n );\n\n // decrypt from next hop to target using iv and cc\n\n let mut dec_data = data.to_vec();\n\n if !self.crypto_contexts.is_empty() {\n\n log::trace!(\"Tunnel={:?}: Decrypt incoming data\", self.tunnel_id);\n\n for (i, cc) in self.crypto_contexts.iter_mut().rev().enumerate().rev() {\n\n let (iv_, data_) = cc.decrypt(&iv, &dec_data, i == 0)?;\n\n iv = iv_;\n\n dec_data = data_;\n\n }\n\n }\n\n\n\n // deserialize data\n\n let msg = deserialize(dec_data.as_ref(), self.tunnel_id)?;\n\n if let Some(msg) = msg.message {\n\n match msg {\n\n FrameDataType_oneof_message::handshake_data(data) => {\n\n log::trace!(\n\n \"Tunnel={:?}: Initiator receives handshake data\",\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 53, "score": 63443.34119754677 }, { "content": "\n\n let (iv, raw_data) = self\n\n .crypto_context\n\n .as_mut()\n\n .unwrap()\n\n .encrypt(None, &raw_data, true)?;\n\n\n\n assert_eq!(raw_data.len(), FRAME_DATA_SIZE);\n\n chunks.push((iv, raw_data));\n\n }\n\n log::debug!(\n\n \"Tunnel={:?}: Send encrypted application data ({:?} fragment(s)) to prev hop {:?}\",\n\n self.tunnel_id,\n\n chunks.len(),\n\n self.prev_hop\n\n );\n\n chunks\n\n }\n\n DataType::ServerHello(server_hello) => {\n\n log::debug!(\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 54, "score": 63442.12267035796 }, { "content": " Ok(ProcessedData::IncomingData(data.to_vec()))\n\n }\n\n ApplicationData_oneof_message::cover_traffic(cover) => Ok(\n\n ProcessedData::IncomingCover(cover.data.to_vec(), cover.mirrored),\n\n ),\n\n ApplicationData_oneof_message::close(_) => {\n\n // initiator received close, deconstruct tunnel\n\n log::trace!(\n\n \"Tunnel={:?}: Initiator receives close\",\n\n self.tunnel_id\n\n );\n\n Ok(ProcessedData::ReceivedClose)\n\n }\n\n },\n\n };\n\n }\n\n _ => Err(ProtocolError::UnexpectedMessageType),\n\n }\n\n } else {\n\n // empty\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 55, "score": 63442.1102702257 }, { "content": " let mut data = FrameDataType::new();\n\n data.set_app_data(app_data);\n\n let raw_data = serialize(data);\n\n\n\n // Unencrypted data transfer is not allowed\n\n assert!(self.crypto_context.is_some());\n\n\n\n let (iv, raw_data) = self\n\n .crypto_context\n\n .as_mut()\n\n .unwrap()\n\n .encrypt(None, &raw_data, true)?;\n\n\n\n assert_eq!(raw_data.len(), FRAME_DATA_SIZE);\n\n chunks.push((iv, raw_data));\n\n }\n\n log::debug!(\n\n \"Tunnel={:?}: Send encrypted application data ({:?} fragment(s)) to prev hop {:?}\",\n\n self.tunnel_id,\n\n chunks.len(),\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 56, "score": 63441.73311362327 }, { "content": " let (iv_, data_) = cc.encrypt(iv.as_deref(), &raw_data, i == 0)?;\n\n iv = Some(iv_);\n\n raw_data = data_;\n\n }\n\n assert_eq!(raw_data.len(), FRAME_DATA_SIZE);\n\n chunks.push((iv, raw_data));\n\n }\n\n\n\n log::debug!(\n\n \"Tunnel={:?}: Send encrypted application data ({:?} fragment(s)) to next hop {:?}.\",\n\n self.tunnel_id,\n\n chunks.len(),\n\n self.next_hop\n\n );\n\n\n\n chunks\n\n }\n\n DataType::Cover(data, _) => {\n\n // fragmentation\n\n let mut chunks = vec![];\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 57, "score": 63441.116639773645 }, { "content": " let mut frame = TunnelFrame::new();\n\n let (frame, addr) = match d {\n\n Direction::Forward => {\n\n log::debug!(\"Tunnel={:?}: Hop receives a forward message, decrypt the payload and pass it to the next hop {:?}\", self.tunnel_id, self.next_hop);\n\n // decrypt using iv and key\n\n // An intermediate hop is the final decrypting hop in two cases:\n\n // 1. server_hello was not forwarded so far: Then we will forward an unencrypted client_hello message\n\n // 2. forward_frame_id is zero: After server_hello is received by the server, the server sends us the forward_frame_id\n\n let (iv, decrypted_data) = self.crypto_context.decrypt(\n\n &iv,\n\n &data,\n\n !self.server_hello_forwarded\n\n || self.forward_frame_id == CLIENT_HELLO_FORWARD_ID,\n\n )?;\n\n\n\n if self.server_hello_forwarded && self.forward_frame_id == CLIENT_HELLO_FORWARD_ID {\n\n // expect frame id\n\n return match deserialize(&decrypted_data, self.tunnel_id) {\n\n Ok(data) => match data.message {\n\n None => Err(ProtocolError::EmptyFrameId),\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 58, "score": 63441.07439577981 }, { "content": "\n\n let config = Arc::new(HandshakeCryptoConfig::new(pub_key, priv_key));\n\n let mut cc1 = HandshakeCryptoContext::new(config.clone());\n\n let mut cc2 = HandshakeCryptoContext::new(config);\n\n let shared_secret = cc1.finish_ecdh(cc2.get_public_key().as_ref()).unwrap();\n\n let _cc_a = CryptoContext::new(shared_secret.clone(), true);\n\n let mut cc_b = CryptoContext::new(shared_secret, false);\n\n let mut handshake = HandshakeData::new();\n\n\n\n // client hello\n\n let mut client_hello = ClientHello::new();\n\n client_hello.set_backward_frame_id(0xffffffff);\n\n client_hello.set_ecdh_public_key(cc1.get_public_key().into());\n\n handshake.set_client_hello(client_hello);\n\n let mut data = FrameDataType::new();\n\n data.set_handshake_data(handshake.clone());\n\n let data = serialize(data);\n\n match deserialize(&data, 1).unwrap().message.unwrap() {\n\n FrameDataType_oneof_message::handshake_data(data) => assert!(data.has_client_hello()),\n\n _ => panic!(\"Expected ClientHello\"),\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 59, "score": 63440.775947784736 }, { "content": " }\n\n DataType::Close => {\n\n // send close similar as app_data without fragmentation\n\n let mut app_data = ApplicationData::new();\n\n app_data.set_sequence_number(self.seq_nr_context.get_next_seq_nr());\n\n app_data.set_close(Close::new());\n\n let mut data = FrameDataType::new();\n\n data.set_app_data(app_data);\n\n let mut raw_data = serialize(data);\n\n\n\n // Unencrypted close is not allowed\n\n assert!(!self.crypto_contexts.is_empty());\n\n\n\n // layered encryption via iv and keys using the crypto contexts\n\n let mut iv: Option<Vec<u8>> = None;\n\n for (i, cc) in self.crypto_contexts.iter_mut().rev().enumerate() {\n\n let (iv_, data_) = cc.encrypt(iv.as_deref(), &raw_data, i == 0)?;\n\n iv = Some(iv_);\n\n raw_data = data_;\n\n }\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 60, "score": 63440.767060042264 }, { "content": " );\n\n return Err(ProtocolError::CodecUnsupportedAction);\n\n }\n\n };\n\n\n\n // write fragmented frames\n\n for (iv, data) in iv_data_chunks {\n\n let iv = iv.unwrap_or_else(|| {\n\n let mut iv = vec![0; IV_SIZE];\n\n openssl::rand::rand_bytes(&mut iv).expect(\"Failed to generated random IV\");\n\n iv\n\n });\n\n assert_eq!(iv.len(), IV_SIZE);\n\n frame.set_iv(iv.into());\n\n frame.set_data(data.into());\n\n let data = frame.write_to_bytes().unwrap();\n\n assert_eq!(data.len(), FRAME_SIZE);\n\n // write to stream\n\n if let Err(e) = self.socket.send_to(data.as_ref(), self.next_hop).await {\n\n return Err(ProtocolError::IOError(format!(\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 61, "score": 63440.66651452809 }, { "content": " app_data.set_cover_traffic(cover);\n\n let mut data = FrameDataType::new();\n\n data.set_app_data(app_data.clone());\n\n let data = serialize(data);\n\n match deserialize(&data, 1).unwrap().message.unwrap() {\n\n FrameDataType_oneof_message::app_data(data) => assert!(data.has_cover_traffic()),\n\n _ => panic!(\"Expected ApplicationData\"),\n\n };\n\n data_vec.push(data);\n\n\n\n // cover mirrored\n\n let mut cover = CoverTraffic::new();\n\n cover.set_data(cover_chunk.into());\n\n cover.set_mirrored(false);\n\n app_data.set_cover_traffic(cover);\n\n let mut data = FrameDataType::new();\n\n data.set_app_data(app_data.clone());\n\n let data = serialize(data);\n\n match deserialize(&data, 1).unwrap().message.unwrap() {\n\n FrameDataType_oneof_message::app_data(data) => assert!(data.has_cover_traffic()),\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 62, "score": 63440.572970268215 }, { "content": " .await\n\n .new_frame_id(self.tunnel_id, Direction::Backward);\n\n }\n\n client_hello.set_backward_frame_id(self.next_hop_backward_frame_id);\n\n\n\n log::debug!(\n\n \"Tunnel={:?}: Send ClientHello={:?} to next hop {:?}\",\n\n self.tunnel_id,\n\n client_hello,\n\n self.next_hop\n\n );\n\n\n\n // prepare frame\n\n let mut handshake = HandshakeData::new();\n\n handshake.set_client_hello(client_hello);\n\n let mut data = FrameDataType::new();\n\n data.set_handshake_data(handshake);\n\n let mut data = serialize(data);\n\n\n\n // encrypt via iv and keys using the crypto contexts\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 63, "score": 63440.4662872328 }, { "content": " Err(ProtocolError::UnexpectedMessageType)\n\n }\n\n }\n\n\n\n async fn close(&mut self) {\n\n log::debug!(\n\n \"Tunnel={:?}: At initiator send close to target\",\n\n self.tunnel_id,\n\n );\n\n let _ = self.write(DataType::Close).await;\n\n }\n\n\n\n fn as_any(&mut self) -> &mut dyn Any {\n\n self\n\n }\n\n\n\n async fn process_forward_frame_id(&mut self, id: FrameId) -> Result<(), ProtocolError> {\n\n if self.forward_frame_id == CLIENT_HELLO_FORWARD_ID {\n\n log::trace!(\n\n \"Tunnel={:?}: Set forward_frame_id = {:?}\",\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 64, "score": 63439.999456447076 }, { "content": "// make target to intermediate\n\nimpl IntermediateHopCodec {\n\n pub fn from(target: &mut TargetEndpoint, next_hop: SocketAddr) -> Self {\n\n Self {\n\n socket: target.socket.clone(),\n\n next_hop,\n\n prev_hop: target.prev_hop,\n\n tunnel_id: target.tunnel_id,\n\n forward_frame_id: CLIENT_HELLO_FORWARD_ID,\n\n backward_frame_id: target.backward_frame_id,\n\n crypto_context: target.crypto_context.take().unwrap(),\n\n server_hello_forwarded: false,\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl P2pCodec for InitiatorEndpoint {\n\n async fn write(&mut self, data: DataType) -> Result<(), ProtocolError> {\n\n let mut frame = TunnelFrame::new();\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 65, "score": 63439.579443867064 }, { "content": " return Err(ProtocolError::EmptyFrameId);\n\n } else {\n\n frame.set_frame_id(self.backward_frame_id);\n\n }\n\n\n\n let iv_data_bytes = match data {\n\n DataType::Close => {\n\n // send close to initiator endpoint similar to appData\n\n let mut app_data = ApplicationData::new();\n\n app_data.set_sequence_number(self.seq_nr_context.get_next_seq_nr());\n\n app_data.set_close(Close::new());\n\n let mut data = FrameDataType::new();\n\n data.set_app_data(app_data);\n\n let raw_data = serialize(data);\n\n\n\n // // Unencrypted close transfer is not allowed\n\n assert!(self.crypto_context.is_some());\n\n\n\n let (iv, raw_data) = self\n\n .crypto_context\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 66, "score": 63438.810866830565 }, { "content": " fn verify_incoming_seq_nr(&mut self, seq_nr: SequenceNumber) -> Result<(), ProtocolError> {\n\n // reject packets that are too old, window is 20 sequence numbers in the past\n\n if self.newest_received >= 20 && self.newest_received - 20 > seq_nr {\n\n return Err(ProtocolError::ExpiredSequenceNumber);\n\n }\n\n\n\n // reject packets with reused sequence number\n\n if !self.used_seq_nrs.insert(seq_nr) {\n\n return Err(ProtocolError::ReusedSequenceNumber);\n\n }\n\n\n\n // update newest received\n\n if self.newest_received < seq_nr {\n\n self.newest_received = seq_nr;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n/**\n\n * P2pCodec responsible for encryption, message padding and writing messages to the socket\n\n */\n\n\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 67, "score": 63438.71053015603 }, { "content": " self.tunnel_id\n\n );\n\n return Err(ProtocolError::CodecUnsupportedAction);\n\n }\n\n };\n\n\n\n for (iv, data) in iv_data_bytes {\n\n assert_eq!(iv.len(), IV_SIZE);\n\n frame.set_iv(iv.into());\n\n frame.set_data(data.into());\n\n let data = frame.write_to_bytes().unwrap();\n\n assert_eq!(data.len(), FRAME_SIZE);\n\n\n\n // write to stream\n\n if let Err(e) = self.socket.send_to(data.as_ref(), self.prev_hop).await {\n\n return Err(ProtocolError::IOError(format!(\n\n \"Cannot write frame via target codec: {:?}\",\n\n e\n\n )));\n\n }\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 68, "score": 63438.63489559284 }, { "content": " Err(_) => Err(ProtocolError::EmptyFrameId),\n\n };\n\n }\n\n\n\n // check if we should forward a client hello\n\n frame.set_frame_id(self.forward_frame_id);\n\n frame.set_iv(iv.into());\n\n frame.set_data(decrypted_data.into());\n\n (frame, self.next_hop)\n\n }\n\n Direction::Backward => {\n\n // encrypt using iv and key\n\n log::debug!(\"Tunnel={:?}: Hop receives a backward message, encrypt the payload and pass it to the prev hop {:?}\", self.tunnel_id, self.prev_hop);\n\n // only use AES-GCM once, which is that the next hop sends its server_hello back\n\n let (iv, encrypted_data) =\n\n self.crypto_context\n\n .encrypt(Some(&iv), &data, !self.server_hello_forwarded)?;\n\n self.server_hello_forwarded = true; // never set to false again\n\n frame.set_frame_id(self.backward_frame_id);\n\n frame.set_iv(iv.into());\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 69, "score": 63438.14797525927 }, { "content": " }\n\n assert_eq!(raw_data.len(), FRAME_DATA_SIZE);\n\n chunks.push((iv, raw_data));\n\n }\n\n\n\n log::debug!(\n\n \"Tunnel={:?}: Send encrypted application data ({:?} fragment(s)) to next hop {:?}.\",\n\n self.tunnel_id,\n\n chunks.len(),\n\n self.next_hop\n\n );\n\n\n\n chunks\n\n }\n\n DataType::ClientHello(mut client_hello) => {\n\n // calculate frame_id\n\n if self.next_hop_backward_frame_id == 0 {\n\n self.next_hop_backward_frame_id = self\n\n .frame_id_manager\n\n .write()\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 70, "score": 63437.598396583024 }, { "content": " Some(data) => match data {\n\n FrameDataType_oneof_message::forward_frame_id(id) => {\n\n log::trace!(\n\n \"Tunnel={:?}: New secret forwards id: {:?}\",\n\n self.tunnel_id,\n\n id\n\n );\n\n FrameIdManager::verify_frame_id(id)?;\n\n self.forward_frame_id = id;\n\n Ok(ProcessedData::TransferredToNextHop)\n\n }\n\n _ => {\n\n log::warn!(\n\n \"Tunnel={:?}: Cannot parse to forward_frame_id\",\n\n self.tunnel_id,\n\n );\n\n Err(ProtocolError::ProtobufError)\n\n }\n\n },\n\n },\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 71, "score": 63436.79018000718 }, { "content": " };\n\n data_vec.push(data);\n\n\n\n // server hello, test maximum size\n\n let mut server_hello = ServerHello::new();\n\n let signature = cc2.hop_sign(&cc1.get_public_key());\n\n let mut encrypted_data = EncryptedServerHelloData::new();\n\n encrypted_data.set_signature(signature.into());\n\n encrypted_data.set_backward_frame_id(0xffffffff);\n\n encrypted_data.set_forward_frame_id(0xffffffff);\n\n encrypted_data.set_challenge(cc2.get_challenge().to_owned().into());\n\n let raw_enc_data = encrypted_data.write_to_bytes().unwrap();\n\n let (iv, enc_data) = cc_b.encrypt(None, &raw_enc_data, false).unwrap();\n\n server_hello.set_ecdh_public_key(cc2.get_public_key().into());\n\n server_hello.set_iv(iv.into());\n\n server_hello.set_encrypted_data(enc_data.into());\n\n handshake.set_server_hello(server_hello);\n\n let mut data = FrameDataType::new();\n\n data.set_handshake_data(handshake.clone());\n\n let data = serialize(data);\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 72, "score": 63436.569764245134 }, { "content": " frame.set_data(encrypted_data.into());\n\n (frame, self.prev_hop)\n\n }\n\n };\n\n\n\n let data = frame.write_to_bytes().unwrap();\n\n assert_eq!(data.len(), FRAME_SIZE);\n\n\n\n // write to stream\n\n if let Err(e) = self.socket.send_to(data.as_ref(), addr).await {\n\n return Err(ProtocolError::IOError(format!(\n\n \"Cannot write frame via intermediate codec: {:?}\",\n\n e\n\n )));\n\n }\n\n\n\n Ok(ProcessedData::TransferredToNextHop)\n\n }\n\n\n\n async fn close(&mut self) {\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 73, "score": 63436.20610850302 }, { "content": " \"Tunnel={:?}: Set the crypto context of this target codec\",\n\n self.tunnel_id\n\n );\n\n self.crypto_context = Some(cc);\n\n } else {\n\n log::warn!(\n\n \"Tunnel={:?}: Crypto context of this target codec already set\",\n\n self.tunnel_id\n\n );\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl P2pCodec for IntermediateHopCodec {\n\n async fn write(&mut self, _data: DataType) -> Result<(), ProtocolError> {\n\n log::warn!(\"Write action not supported for intermediate hop codec\");\n\n return Err(ProtocolError::CodecUnsupportedAction);\n\n }\n\n\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 74, "score": 63435.85289357645 }, { "content": " );\n\n self.next_hop_backward_frame_id_old = self.next_hop_backward_frame_id;\n\n self.next_hop_backward_frame_id = id;\n\n }\n\n\n\n fn add_crypto_context(&mut self, cc: CryptoContext) {\n\n log::trace!(\n\n \"Tunnel={:?}: Add crypto context to initiator codec\",\n\n self.tunnel_id\n\n );\n\n self.crypto_contexts.push(cc)\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl P2pCodec for TargetEndpoint {\n\n async fn write(&mut self, data: DataType) -> Result<(), ProtocolError> {\n\n let mut frame = TunnelFrame::new();\n\n\n\n if self.backward_frame_id == 0 {\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 75, "score": 63435.00808037965 }, { "content": "impl TargetEndpoint {\n\n pub fn new(\n\n socket: Arc<DtlsSocketLayer>,\n\n prev_hop: SocketAddr,\n\n frame_id_manager: Arc<RwLock<FrameIdManager>>,\n\n tunnel_id: TunnelId,\n\n ) -> Self {\n\n Self {\n\n socket,\n\n prev_hop,\n\n frame_id_manager,\n\n tunnel_id,\n\n backward_frame_id: 0,\n\n crypto_context: None,\n\n seq_nr_context: SequenceNumberContext::new(),\n\n }\n\n }\n\n\n\n pub async fn lock_as_target_endpoint(&mut self) {\n\n // this endpoint is the target and will not be transferred to an intermediate_hop\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 76, "score": 63434.757519376144 }, { "content": " frame_id_manager: Arc<RwLock<FrameIdManager>>,\n\n tunnel_id: TunnelId,\n\n forward_frame_id: FrameId, // used for send to next hop\n\n next_hop_backward_frame_id: FrameId,\n\n next_hop_backward_frame_id_old: FrameId, // cache for storing last, otherwise would be overwritten too early\n\n crypto_contexts: Vec<CryptoContext>,\n\n seq_nr_context: SequenceNumberContext,\n\n}\n\n\n\nimpl InitiatorEndpoint {\n\n pub fn new(\n\n socket: Arc<DtlsSocketLayer>,\n\n next_hop: SocketAddr,\n\n frame_id_manager: Arc<RwLock<FrameIdManager>>,\n\n tunnel_id: TunnelId,\n\n ) -> Self {\n\n Self {\n\n socket,\n\n next_hop,\n\n frame_id_manager,\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 77, "score": 63433.59068898879 }, { "content": "\n\n /*\n\n * Tunnel close, send close messages\n\n */\n\n async fn close(&mut self);\n\n\n\n /*\n\n * Get the implementation of the trait for updating codecs\n\n */\n\n fn as_any(&mut self) -> &mut dyn Any;\n\n\n\n /*\n\n * Set the frame_id for forwarding packets\n\n */\n\n async fn process_forward_frame_id(&mut self, _id: FrameId) -> Result<(), ProtocolError>;\n\n\n\n /*\n\n * Set the frame_id for backward packets\n\n */\n\n fn set_backward_frame_id(&mut self, _id: FrameId) {\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 78, "score": 63433.41903670635 }, { "content": " );\n\n assert_eq!(context.verify_incoming_seq_nr(30), Ok(()));\n\n assert_eq!(\n\n context.verify_incoming_seq_nr(4),\n\n Err(ProtocolError::ExpiredSequenceNumber)\n\n );\n\n assert_eq!(context.verify_incoming_seq_nr(17), Ok(()));\n\n }\n\n}\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 79, "score": 63432.95003709682 }, { "content": " assert_eq!(context.get_next_seq_nr(), 3);\n\n for _ in 0..10 {\n\n context.get_next_seq_nr();\n\n }\n\n assert_eq!(context.get_next_seq_nr(), 14);\n\n assert_eq!(context.get_next_seq_nr(), 15);\n\n assert_eq!(context.get_next_seq_nr(), 16);\n\n\n\n // incoming\n\n assert_eq!(context.verify_incoming_seq_nr(2), Ok(()));\n\n assert_eq!(\n\n context.verify_incoming_seq_nr(2),\n\n Err(ProtocolError::ReusedSequenceNumber)\n\n );\n\n assert_eq!(context.verify_incoming_seq_nr(1), Ok(()));\n\n assert_eq!(context.verify_incoming_seq_nr(7), Ok(()));\n\n assert_eq!(context.verify_incoming_seq_nr(20), Ok(()));\n\n assert_eq!(\n\n context.verify_incoming_seq_nr(20),\n\n Err(ProtocolError::ReusedSequenceNumber)\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 80, "score": 63432.59659716724 }, { "content": " self.tunnel_id,\n\n id\n\n );\n\n self.forward_frame_id = id;\n\n Ok(())\n\n } else {\n\n log::trace!(\n\n \"Tunnel={:?}: Initiator send forward_frame_id={:?} to hop\",\n\n self.tunnel_id,\n\n id\n\n );\n\n self.write(DataType::ForwardFrameId(id)).await\n\n }\n\n }\n\n\n\n fn set_backward_frame_id(&mut self, id: FrameId) {\n\n log::trace!(\n\n \"Tunnel={:?}: Initiator set_next_hop_bw_frame_id={:?}\",\n\n self.tunnel_id,\n\n id\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 81, "score": 63429.35852793374 }, { "content": " tunnel_id,\n\n forward_frame_id: CLIENT_HELLO_FORWARD_ID,\n\n next_hop_backward_frame_id: 0,\n\n next_hop_backward_frame_id_old: 0,\n\n crypto_contexts: vec![],\n\n seq_nr_context: SequenceNumberContext::new(),\n\n }\n\n }\n\n}\n\n\n\npub(crate) struct TargetEndpoint {\n\n socket: Arc<DtlsSocketLayer>,\n\n prev_hop: SocketAddr,\n\n frame_id_manager: Arc<RwLock<FrameIdManager>>,\n\n tunnel_id: TunnelId,\n\n backward_frame_id: FrameId,\n\n crypto_context: Option<CryptoContext>,\n\n seq_nr_context: SequenceNumberContext,\n\n}\n\n\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 82, "score": 63428.336847977065 }, { "content": " self\n\n }\n\n\n\n async fn process_forward_frame_id(&mut self, _id: FrameId) -> Result<(), ProtocolError> {\n\n log::warn!(\"Setting forward frame_id not supported for this codec\");\n\n Err(ProtocolError::UnsupportedAction)\n\n }\n\n\n\n fn set_backward_frame_id(&mut self, secret_id: FrameId) {\n\n log::trace!(\n\n \"Tunnel={:?}: Set backward frame id={:?}\",\n\n self.tunnel_id,\n\n secret_id\n\n );\n\n self.backward_frame_id = secret_id;\n\n }\n\n\n\n fn add_crypto_context(&mut self, cc: CryptoContext) {\n\n if self.crypto_context.is_none() {\n\n log::trace!(\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 83, "score": 63427.41266467942 }, { "content": " log::warn!(\"Setting backward frame_id not supported for this codec\");\n\n }\n\n\n\n /*\n\n * This method adds a crypto_context to the codec. It is not supported for the intermediate\n\n * hops impl, which inherits the context from the target_endpoint impl.\n\n *\n\n * In case of the target_endpoint, only one crypto_context can be set, all others calls will be\n\n * ignored. In case of the initiator_endpoint, a list of crypto_contexts is provided, one\n\n * context per each hop inclusive the target.\n\n */\n\n fn add_crypto_context(&mut self, _cc: CryptoContext) {\n\n log::warn!(\"Adding crypto context not supported for this codec\");\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct InitiatorEndpoint {\n\n socket: Arc<DtlsSocketLayer>,\n\n next_hop: SocketAddr,\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 84, "score": 63426.632220089996 }, { "content": " log::trace!(\"Tunnel={:?}: Lock target hop\", self.tunnel_id);\n\n // remove all backward frame ids, which are not used for target tunnels\n\n self.frame_id_manager\n\n .write()\n\n .await\n\n .remove_backward_frame_ids(self.tunnel_id);\n\n }\n\n}\n\n\n\npub(crate) struct IntermediateHopCodec {\n\n socket: Arc<DtlsSocketLayer>,\n\n next_hop: SocketAddr,\n\n prev_hop: SocketAddr,\n\n tunnel_id: TunnelId,\n\n forward_frame_id: FrameId,\n\n backward_frame_id: FrameId,\n\n crypto_context: CryptoContext,\n\n server_hello_forwarded: bool,\n\n}\n\n\n", "file_path": "onion_lib/src/p2p_protocol/onion_tunnel/message_codec.rs", "rank": 85, "score": 63426.17534093598 } ]
Rust
noodles-bam/src/bai/reader.rs
MaltheSR/noodles
8530af08ced193795480ea8ee2667fe8af82bd92
use std::{ convert::TryFrom, io::{self, Read}, }; use byteorder::{LittleEndian, ReadBytesExt}; use noodles_bgzf as bgzf; use noodles_csi::index::reference_sequence::{bin::Chunk, Metadata}; use super::{ index::{reference_sequence, ReferenceSequence}, Bin, Index, MAGIC_NUMBER, }; pub struct Reader<R> { inner: R, } impl<R> Reader<R> where R: Read, { pub fn new(inner: R) -> Self { Self { inner } } pub fn read_header(&mut self) -> io::Result<()> { read_magic(&mut self.inner) } pub fn read_index(&mut self) -> io::Result<Index> { let references = read_references(&mut self.inner)?; let n_no_coor = read_unplaced_unmapped_record_count(&mut self.inner)?; Ok(Index::new(references, n_no_coor)) } } fn read_magic<R>(reader: &mut R) -> io::Result<()> where R: Read, { let mut magic = [0; 4]; reader.read_exact(&mut magic)?; if magic == MAGIC_NUMBER { Ok(()) } else { Err(io::Error::new( io::ErrorKind::InvalidData, "invalid BAI header", )) } } fn read_references<R>(reader: &mut R) -> io::Result<Vec<ReferenceSequence>> where R: Read, { let n_ref = reader.read_u32::<LittleEndian>().and_then(|n| { usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) })?; let mut references = Vec::with_capacity(n_ref); for _ in 0..n_ref { let (bins, metadata) = read_bins(reader)?; let intervals = read_intervals(reader)?; references.push(ReferenceSequence::new(bins, intervals, metadata)); } Ok(references) } fn read_bins<R>(reader: &mut R) -> io::Result<(Vec<Bin>, Option<Metadata>)> where R: Read, { use reference_sequence::bin::METADATA_ID; let n_bin = reader.read_u32::<LittleEndian>().and_then(|n| { usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) })?; let mut bins = Vec::with_capacity(n_bin); let mut metadata = None; for _ in 0..n_bin { let id = reader.read_u32::<LittleEndian>()?; if id == METADATA_ID { metadata = read_metadata(reader).map(Some)?; } else { let chunks = read_chunks(reader)?; let bin = Bin::new(id, chunks); bins.push(bin); } } Ok((bins, metadata)) } fn read_chunks<R>(reader: &mut R) -> io::Result<Vec<Chunk>> where R: Read, { let n_chunk = reader.read_u32::<LittleEndian>().and_then(|n| { usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) })?; let mut chunks = Vec::with_capacity(n_chunk); for _ in 0..n_chunk { let chunk_beg = reader .read_u64::<LittleEndian>() .map(bgzf::VirtualPosition::from)?; let chunk_end = reader .read_u64::<LittleEndian>() .map(bgzf::VirtualPosition::from)?; chunks.push(Chunk::new(chunk_beg, chunk_end)); } Ok(chunks) } fn read_intervals<R>(reader: &mut R) -> io::Result<Vec<bgzf::VirtualPosition>> where R: Read, { let n_intv = reader.read_u32::<LittleEndian>().and_then(|n| { usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) })?; let mut intervals = Vec::with_capacity(n_intv); for _ in 0..n_intv { let ioffset = reader .read_u64::<LittleEndian>() .map(bgzf::VirtualPosition::from)?; intervals.push(ioffset); } Ok(intervals) } fn read_metadata<R>(reader: &mut R) -> io::Result<Metadata> where R: Read, { use reference_sequence::bin::METADATA_CHUNK_COUNT; let n_chunk = reader.read_u32::<LittleEndian>()?; if n_chunk != METADATA_CHUNK_COUNT { return Err(io::Error::new( io::ErrorKind::InvalidData, format!( "invalid metadata pseudo-bin chunk count: expected {}, got {}", METADATA_CHUNK_COUNT, n_chunk ), )); } let ref_beg = reader .read_u64::<LittleEndian>() .map(bgzf::VirtualPosition::from)?; let ref_end = reader .read_u64::<LittleEndian>() .map(bgzf::VirtualPosition::from)?; let n_mapped = reader.read_u64::<LittleEndian>()?; let n_unmapped = reader.read_u64::<LittleEndian>()?; Ok(Metadata::new(ref_beg, ref_end, n_mapped, n_unmapped)) } fn read_unplaced_unmapped_record_count<R>(reader: &mut R) -> io::Result<Option<u64>> where R: Read, { match reader.read_u64::<LittleEndian>() { Ok(n) => Ok(Some(n)), Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof => Ok(None), Err(e) => Err(e), } } #[cfg(test)] mod tests { use super::*; #[test] fn test_read_magic() { let data = b"BAI\x01"; let mut reader = &data[..]; assert!(read_magic(&mut reader).is_ok()); } #[test] fn test_read_magic_with_invalid_magic_number() { let data = []; let mut reader = &data[..]; assert!(matches!( read_magic(&mut reader), Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof )); let data = b"BAI"; let mut reader = &data[..]; assert!(matches!( read_magic(&mut reader), Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof )); let data = b"MThd"; let mut reader = &data[..]; assert!(matches!( read_magic(&mut reader), Err(ref e) if e.kind() == io::ErrorKind::InvalidData )); } #[test] fn test_read_metadata() -> io::Result<()> { let data = [ 0x02, 0x00, 0x00, 0x00, 0x62, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3d, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x37, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]; let mut reader = &data[..]; let actual = read_metadata(&mut reader)?; let expected = Metadata::new( bgzf::VirtualPosition::from(610), bgzf::VirtualPosition::from(1597), 55, 0, ); assert_eq!(actual, expected); Ok(()) } #[test] fn test_read_unplaced_unmapped_record_count() -> io::Result<()> { let data = []; let mut reader = &data[..]; assert_eq!(read_unplaced_unmapped_record_count(&mut reader)?, None); let data = [0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; let mut reader = &data[..]; assert_eq!(read_unplaced_unmapped_record_count(&mut reader)?, Some(8)); Ok(()) } }
use std::{ convert::TryFrom, io::{self, Read}, }; use byteorder::{LittleEndian, ReadBytesExt}; use noodles_bgzf as bgzf; use noodles_csi::index::reference_sequence::{bin::Chunk, Metadata}; use super::{ index::{reference_sequence, ReferenceSequence}, Bin, Index, MAGIC_NUMBER, }; pub struct Reader<R> { inner: R, } impl<R> Reader<R> where R: Read, { pub fn new(inner: R) -> Self { Self { inner } } pub fn read_header(&mut self) -> io::Result<()> { read_magic(&mut self.inner) } pub fn read_index(&mut self) -> io::Result<Index> { let references = read_references(&mut self.inner)?; let n_no_coor = read_unplaced_unmapped_record_count(&mut self.inner)?; Ok(Index::new(references, n_no_coor)) } } fn read_magic<R>(reader: &mut R) -> io::Result<()> where R: Read, { let mut magic = [0; 4]; reader.read_exact(&mut magic)?; if magic == MAGIC_NUMBER { Ok(()) } else { Err(io::Error::new( io::ErrorKind::InvalidData, "invalid BAI header", )) } } fn read_references<R>(reader: &mut R) -> io::Result<Vec<ReferenceSequence>> where R: Read, { let n_ref = reader.read_u32::<LittleEndian>().and_then(|n| { usize::try_from(n).map_err(|e| io::Error::new(io
let intervals = read_intervals(reader)?; references.push(ReferenceSequence::new(bins, intervals, metadata)); } Ok(references) } fn read_bins<R>(reader: &mut R) -> io::Result<(Vec<Bin>, Option<Metadata>)> where R: Read, { use reference_sequence::bin::METADATA_ID; let n_bin = reader.read_u32::<LittleEndian>().and_then(|n| { usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) })?; let mut bins = Vec::with_capacity(n_bin); let mut metadata = None; for _ in 0..n_bin { let id = reader.read_u32::<LittleEndian>()?; if id == METADATA_ID { metadata = read_metadata(reader).map(Some)?; } else { let chunks = read_chunks(reader)?; let bin = Bin::new(id, chunks); bins.push(bin); } } Ok((bins, metadata)) } fn read_chunks<R>(reader: &mut R) -> io::Result<Vec<Chunk>> where R: Read, { let n_chunk = reader.read_u32::<LittleEndian>().and_then(|n| { usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) })?; let mut chunks = Vec::with_capacity(n_chunk); for _ in 0..n_chunk { let chunk_beg = reader .read_u64::<LittleEndian>() .map(bgzf::VirtualPosition::from)?; let chunk_end = reader .read_u64::<LittleEndian>() .map(bgzf::VirtualPosition::from)?; chunks.push(Chunk::new(chunk_beg, chunk_end)); } Ok(chunks) } fn read_intervals<R>(reader: &mut R) -> io::Result<Vec<bgzf::VirtualPosition>> where R: Read, { let n_intv = reader.read_u32::<LittleEndian>().and_then(|n| { usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)) })?; let mut intervals = Vec::with_capacity(n_intv); for _ in 0..n_intv { let ioffset = reader .read_u64::<LittleEndian>() .map(bgzf::VirtualPosition::from)?; intervals.push(ioffset); } Ok(intervals) } fn read_metadata<R>(reader: &mut R) -> io::Result<Metadata> where R: Read, { use reference_sequence::bin::METADATA_CHUNK_COUNT; let n_chunk = reader.read_u32::<LittleEndian>()?; if n_chunk != METADATA_CHUNK_COUNT { return Err(io::Error::new( io::ErrorKind::InvalidData, format!( "invalid metadata pseudo-bin chunk count: expected {}, got {}", METADATA_CHUNK_COUNT, n_chunk ), )); } let ref_beg = reader .read_u64::<LittleEndian>() .map(bgzf::VirtualPosition::from)?; let ref_end = reader .read_u64::<LittleEndian>() .map(bgzf::VirtualPosition::from)?; let n_mapped = reader.read_u64::<LittleEndian>()?; let n_unmapped = reader.read_u64::<LittleEndian>()?; Ok(Metadata::new(ref_beg, ref_end, n_mapped, n_unmapped)) } fn read_unplaced_unmapped_record_count<R>(reader: &mut R) -> io::Result<Option<u64>> where R: Read, { match reader.read_u64::<LittleEndian>() { Ok(n) => Ok(Some(n)), Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof => Ok(None), Err(e) => Err(e), } } #[cfg(test)] mod tests { use super::*; #[test] fn test_read_magic() { let data = b"BAI\x01"; let mut reader = &data[..]; assert!(read_magic(&mut reader).is_ok()); } #[test] fn test_read_magic_with_invalid_magic_number() { let data = []; let mut reader = &data[..]; assert!(matches!( read_magic(&mut reader), Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof )); let data = b"BAI"; let mut reader = &data[..]; assert!(matches!( read_magic(&mut reader), Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof )); let data = b"MThd"; let mut reader = &data[..]; assert!(matches!( read_magic(&mut reader), Err(ref e) if e.kind() == io::ErrorKind::InvalidData )); } #[test] fn test_read_metadata() -> io::Result<()> { let data = [ 0x02, 0x00, 0x00, 0x00, 0x62, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3d, 0x06, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x37, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, ]; let mut reader = &data[..]; let actual = read_metadata(&mut reader)?; let expected = Metadata::new( bgzf::VirtualPosition::from(610), bgzf::VirtualPosition::from(1597), 55, 0, ); assert_eq!(actual, expected); Ok(()) } #[test] fn test_read_unplaced_unmapped_record_count() -> io::Result<()> { let data = []; let mut reader = &data[..]; assert_eq!(read_unplaced_unmapped_record_count(&mut reader)?, None); let data = [0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; let mut reader = &data[..]; assert_eq!(read_unplaced_unmapped_record_count(&mut reader)?, Some(8)); Ok(()) } }
::ErrorKind::InvalidData, e)) })?; let mut references = Vec::with_capacity(n_ref); for _ in 0..n_ref { let (bins, metadata) = read_bins(reader)?;
function_block-random_span
[ { "content": "fn read_header<R>(reader: &mut R) -> io::Result<index::Header>\n\nwhere\n\n R: Read,\n\n{\n\n let format = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n Format::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let col_seq = reader.read_i32::<LittleEndian>().and_then(|i| {\n\n usize::try_from(i).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let col_beg = reader.read_i32::<LittleEndian>().and_then(|i| {\n\n usize::try_from(i).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let col_end = reader.read_i32::<LittleEndian>().and_then(|i| {\n\n if i == 0 {\n\n Ok(None)\n\n } else {\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 1, "score": 447185.74320431193 }, { "content": "pub fn read_header<R>(reader: &mut R) -> io::Result<Header>\n\nwhere\n\n R: Read,\n\n{\n\n let length = reader.read_i32::<LittleEndian>()?;\n\n\n\n let reference_sequence_id = read_itf8(reader).and_then(|n| {\n\n ReferenceSequenceId::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let starting_position_on_the_reference = read_itf8(reader).and_then(|n| {\n\n if n == 0 {\n\n Ok(None)\n\n } else {\n\n sam::record::Position::try_from(n)\n\n .map(Some)\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n }\n\n })?;\n\n\n", "file_path": "noodles-cram/src/reader/container/header.rs", "rank": 2, "score": 444018.68371752335 }, { "content": "fn read_bins<R>(reader: &mut R) -> io::Result<(Vec<Bin>, Option<Metadata>)>\n\nwhere\n\n R: Read,\n\n{\n\n use reference_sequence::bin::METADATA_ID;\n\n\n\n let n_bin = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut bins = Vec::with_capacity(n_bin);\n\n let mut metadata = None;\n\n\n\n for _ in 0..n_bin {\n\n let id = reader.read_u32::<LittleEndian>()?;\n\n\n\n if id == METADATA_ID {\n\n metadata = read_metadata(reader).map(Some)?;\n\n } else {\n\n let chunks = read_chunks(reader)?;\n\n let bin = Bin::new(id, chunks);\n\n bins.push(bin);\n\n }\n\n }\n\n\n\n Ok((bins, metadata))\n\n}\n\n\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 3, "score": 430853.32356305094 }, { "content": "pub fn read_header<R>(reader: &mut R) -> io::Result<slice::Header>\n\nwhere\n\n R: Read,\n\n{\n\n let reference_sequence_id = read_itf8(reader).and_then(|n| {\n\n ReferenceSequenceId::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let alignment_start = read_itf8(reader).and_then(|n| {\n\n if n == 0 {\n\n Ok(None)\n\n } else {\n\n sam::record::Position::try_from(n)\n\n .map(Some)\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n }\n\n })?;\n\n\n\n let alignment_span = read_itf8(reader)?;\n\n\n", "file_path": "noodles-cram/src/reader/data_container/slice/header.rs", "rank": 4, "score": 426712.1215785191 }, { "content": "fn read_bins<R>(reader: &mut R, depth: i32) -> io::Result<(Vec<Bin>, Option<Metadata>)>\n\nwhere\n\n R: Read,\n\n{\n\n let n_bin = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut bins = Vec::with_capacity(n_bin);\n\n\n\n let metadata_id = Bin::metadata_id(depth);\n\n let mut metadata = None;\n\n\n\n for _ in 0..n_bin {\n\n let id = reader.read_u32::<LittleEndian>()?;\n\n\n\n let loffset = reader\n\n .read_u64::<LittleEndian>()\n\n .map(bgzf::VirtualPosition::from)?;\n\n\n", "file_path": "noodles-csi/src/reader.rs", "rank": 6, "score": 413866.4652756735 }, { "content": "pub fn read_compression_header<R>(reader: &mut R) -> io::Result<CompressionHeader>\n\nwhere\n\n R: Read,\n\n{\n\n let preservation_map = read_preservation_map(reader)?;\n\n let data_series_encoding_map = read_data_series_encoding_map(reader)?;\n\n let tag_encoding_map = read_tag_encoding_map(reader)?;\n\n\n\n Ok(CompressionHeader::new(\n\n preservation_map,\n\n data_series_encoding_map,\n\n tag_encoding_map,\n\n ))\n\n}\n", "file_path": "noodles-cram/src/reader/data_container/compression_header.rs", "rank": 7, "score": 394998.08732457005 }, { "content": "pub fn read_string_map_index<R>(reader: &mut R) -> io::Result<usize>\n\nwhere\n\n R: Read,\n\n{\n\n let i = match read_value(reader)? {\n\n Some(Value::Int8(Some(Int8::Value(i)))) => i32::from(i),\n\n Some(Value::Int16(Some(Int16::Value(i)))) => i32::from(i),\n\n Some(Value::Int32(Some(Int32::Value(i)))) => i,\n\n v => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"expected {{Int8, Int16, Int32}}, got {:?}\", v),\n\n ))\n\n }\n\n };\n\n\n\n usize::try_from(i).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/string_map.rs", "rank": 9, "score": 383852.7869499041 }, { "content": "fn read_metadata<R>(reader: &mut R) -> io::Result<Metadata>\n\nwhere\n\n R: Read,\n\n{\n\n use crate::index::reference_sequence::bin::METADATA_CHUNK_COUNT;\n\n\n\n let n_chunk = reader.read_u32::<LittleEndian>()?;\n\n\n\n if n_chunk != METADATA_CHUNK_COUNT {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\n\n \"invalid metadata pseudo-bin chunk count: expected {}, got {}\",\n\n METADATA_CHUNK_COUNT, n_chunk\n\n ),\n\n ));\n\n }\n\n\n\n let ref_beg = reader\n\n .read_u64::<LittleEndian>()\n", "file_path": "noodles-csi/src/reader.rs", "rank": 11, "score": 380800.099960782 }, { "content": "fn read_metadata<R>(reader: &mut R) -> io::Result<Metadata>\n\nwhere\n\n R: Read,\n\n{\n\n use reference_sequence::bin::METADATA_CHUNK_COUNT;\n\n\n\n let n_chunk = reader.read_u32::<LittleEndian>()?;\n\n\n\n if n_chunk != METADATA_CHUNK_COUNT {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\n\n \"invalid metadata pseudo-bin chunk count: expected {}, got {}\",\n\n METADATA_CHUNK_COUNT, n_chunk\n\n ),\n\n ));\n\n }\n\n\n\n let ref_beg = reader\n\n .read_u64::<LittleEndian>()\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 12, "score": 380800.099960782 }, { "content": "pub fn read_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let raw_kind = read_itf8(reader)?;\n\n\n\n match raw_kind {\n\n 0 => Ok(Encoding::Null),\n\n 1 => read_external_encoding(reader),\n\n 2 => unimplemented!(\"GOLOMB\"),\n\n 3 => read_huffman_encoding(reader),\n\n 4 => read_byte_array_len_encoding(reader),\n\n 5 => read_byte_array_stop_encoding(reader),\n\n 6 => read_beta_encoding(reader),\n\n 7 => read_subexp_encoding(reader),\n\n 8 => unimplemented!(\"GOLOMB_RICE\"),\n\n 9 => read_gamma_encoding(reader),\n\n _ => Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid encoding kind\",\n\n )),\n\n }\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/encoding.rs", "rank": 13, "score": 378549.4393767037 }, { "content": "/// Reads a BGZF block header.\n\n///\n\n/// The position of the stream is expected to be at the start of a block.\n\n///\n\n/// If successful, the block size (`BSIZE` + 1) is returned. If a block size of 0 is returned, the\n\n/// stream reached EOF.\n\nfn read_header<R>(reader: &mut R) -> io::Result<u32>\n\nwhere\n\n R: Read,\n\n{\n\n let mut header = [0; BGZF_HEADER_SIZE];\n\n\n\n match reader.read_exact(&mut header) {\n\n Ok(_) => {}\n\n Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof => return Ok(0),\n\n Err(e) => return Err(e),\n\n }\n\n\n\n let bsize = LittleEndian::read_u16(&header[16..]);\n\n\n\n // Add 1 because BSIZE is \"total Block SIZE minus 1\".\n\n Ok(u32::from(bsize) + 1)\n\n}\n\n\n", "file_path": "noodles-bgzf/src/reader.rs", "rank": 14, "score": 377060.17397442134 }, { "content": "pub fn read_preservation_map<R>(reader: &mut R) -> io::Result<PreservationMap>\n\nwhere\n\n R: Read,\n\n{\n\n let data_len = read_itf8(reader).and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut buf = vec![0; data_len];\n\n reader.read_exact(&mut buf)?;\n\n\n\n let mut buf_reader = &buf[..];\n\n let map_len = read_itf8(&mut buf_reader)?;\n\n\n\n let mut read_names_included = true;\n\n let mut ap_data_series_delta = true;\n\n let mut reference_required = true;\n\n let mut substitution_matrix = None;\n\n let mut tag_ids_dictionary = None;\n\n\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/preservation_map.rs", "rank": 16, "score": 366715.43830835936 }, { "content": "pub fn read_line<R>(reader: &mut R, buf: &mut String) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n let result = reader.read_line(buf);\n\n buf.pop();\n\n result\n\n}\n", "file_path": "noodles-fasta/src/fai/reader.rs", "rank": 17, "score": 356650.26618032856 }, { "content": "fn read_header_from_block<R>(reader: &mut R) -> io::Result<slice::Header>\n\nwhere\n\n R: Read,\n\n{\n\n let block = read_block(reader)?;\n\n let data = block.decompressed_data()?;\n\n let mut data_reader = &data[..];\n\n read_header(&mut data_reader)\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/slice.rs", "rank": 18, "score": 355987.97037997376 }, { "content": "pub fn read_tag_encoding_map<R>(reader: &mut R) -> io::Result<TagEncodingMap>\n\nwhere\n\n R: Read,\n\n{\n\n let data_len = read_itf8(reader).and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut buf = vec![0; data_len];\n\n reader.read_exact(&mut buf)?;\n\n\n\n let mut buf_reader = &buf[..];\n\n let map_len = read_itf8(&mut buf_reader)?;\n\n\n\n let mut map = HashMap::with_capacity(map_len as usize);\n\n\n\n for _ in 0..map_len {\n\n let key = read_itf8(&mut buf_reader)?;\n\n let encoding = read_encoding(&mut buf_reader)?;\n\n map.insert(key, encoding);\n\n }\n\n\n\n Ok(TagEncodingMap::from(map))\n\n}\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/tag_encoding_map.rs", "rank": 19, "score": 355959.9941188304 }, { "content": "pub fn read_container<R>(reader: &mut R) -> io::Result<Container>\n\nwhere\n\n R: Read,\n\n{\n\n let header = read_header(reader)?;\n\n\n\n let blocks_len = header.block_count();\n\n let mut blocks = Vec::with_capacity(blocks_len);\n\n\n\n for _ in 0..blocks_len {\n\n let block = read_block(reader)?;\n\n blocks.push(block);\n\n }\n\n\n\n Ok(Container::new(header, blocks))\n\n}\n", "file_path": "noodles-cram/src/reader/container.rs", "rank": 20, "score": 354475.6442752581 }, { "content": "/// Reads the entire contents of a BAM index.\n\n///\n\n/// This is a convenience function and is equivalent to opening the file at the given path, reading\n\n/// the header, and reading the index.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// # use std::io;\n\n/// use noodles_bam::bai;\n\n/// let index = bai::read(\"sample.bam.bai\")?;\n\n/// # Ok::<(), io::Error>(())\n\n/// ```\n\npub fn read<P>(src: P) -> io::Result<Index>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let mut reader = File::open(src).map(Reader::new)?;\n\n reader.read_header()?;\n\n reader.read_index()\n\n}\n\n\n", "file_path": "noodles-bam/src/bai.rs", "rank": 21, "score": 351614.54958483425 }, { "content": "fn read_reference_md5<R>(reader: &mut R) -> io::Result<[u8; 16]>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = [0; 16];\n\n reader.read_exact(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/slice/header.rs", "rank": 22, "score": 350678.7031549888 }, { "content": "pub fn read_ltf8<R>(reader: &mut R) -> io::Result<Ltf8>\n\nwhere\n\n R: Read,\n\n{\n\n let b0 = read_u8_as_i64(reader)?;\n\n\n\n let value = if b0 & 0x80 == 0 {\n\n b0\n\n } else if b0 & 0x40 == 0 {\n\n let b1 = read_u8_as_i64(reader)?;\n\n (b0 & 0x7f) << 8 | b1\n\n } else if b0 & 0x20 == 0 {\n\n let b1 = read_u8_as_i64(reader)?;\n\n let b2 = read_u8_as_i64(reader)?;\n\n (b0 & 0x3f) << 16 | b1 << 8 | b2\n\n } else if b0 & 0x10 == 0 {\n\n let b1 = read_u8_as_i64(reader)?;\n\n let b2 = read_u8_as_i64(reader)?;\n\n let b3 = read_u8_as_i64(reader)?;\n\n (b0 & 0x1f) << 24 | b1 << 16 | b2 << 8 | b3\n", "file_path": "noodles-cram/src/reader/num/ltf8.rs", "rank": 23, "score": 350578.9471052562 }, { "content": "pub fn read_itf8<R>(reader: &mut R) -> io::Result<Itf8>\n\nwhere\n\n R: Read,\n\n{\n\n let b0 = read_u8_as_i32(reader)?;\n\n\n\n let value = if b0 & 0x80 == 0 {\n\n b0\n\n } else if b0 & 0x40 == 0 {\n\n let b1 = read_u8_as_i32(reader)?;\n\n (b0 & 0x7f) << 8 | b1\n\n } else if b0 & 0x20 == 0 {\n\n let b1 = read_u8_as_i32(reader)?;\n\n let b2 = read_u8_as_i32(reader)?;\n\n (b0 & 0x3f) << 16 | b1 << 8 | b2\n\n } else if b0 & 0x10 == 0 {\n\n let b1 = read_u8_as_i32(reader)?;\n\n let b2 = read_u8_as_i32(reader)?;\n\n let b3 = read_u8_as_i32(reader)?;\n\n (b0 & 0x1f) << 24 | b1 << 16 | b2 << 8 | b3\n", "file_path": "noodles-cram/src/reader/num/itf8.rs", "rank": 24, "score": 350578.9471052562 }, { "content": "pub fn read_block<R>(reader: &mut R) -> io::Result<Block>\n\nwhere\n\n R: Read,\n\n{\n\n let method = reader.read_u8().and_then(|b| {\n\n CompressionMethod::try_from(b).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let block_content_type_id = reader.read_u8().and_then(|b| {\n\n ContentType::try_from(b).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let block_content_id = read_itf8(reader)?;\n\n\n\n let size_in_bytes = read_itf8(reader).and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let raw_size_in_bytes = read_itf8(reader).and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n", "file_path": "noodles-cram/src/reader/container/block.rs", "rank": 25, "score": 350578.94710525626 }, { "content": "fn read_intervals<R>(reader: &mut R) -> io::Result<Vec<bgzf::VirtualPosition>>\n\nwhere\n\n R: Read,\n\n{\n\n let n_intv = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut intervals = Vec::with_capacity(n_intv);\n\n\n\n for _ in 0..n_intv {\n\n let ioff = reader\n\n .read_u64::<LittleEndian>()\n\n .map(bgzf::VirtualPosition::from)?;\n\n\n\n intervals.push(ioff);\n\n }\n\n\n\n Ok(intervals)\n\n}\n\n\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 26, "score": 347556.49436534406 }, { "content": "pub fn read_value<R>(reader: &mut R) -> io::Result<Option<Value>>\n\nwhere\n\n R: Read,\n\n{\n\n let ty = read_type(reader)?;\n\n\n\n match ty {\n\n Some(Type::Int8(len)) => match len {\n\n 0 => Ok(Some(Value::Int8(None))),\n\n 1 => read_i8(reader)\n\n .map(Int8::from)\n\n .map(Some)\n\n .map(Value::Int8)\n\n .map(Some),\n\n _ => read_i8_array(reader, len).map(Value::Int8Array).map(Some),\n\n },\n\n Some(Type::Int16(len)) => match len {\n\n 0 => Ok(Some(Value::Int16(None))),\n\n 1 => read_i16(reader)\n\n .map(Int16::from)\n", "file_path": "noodles-bcf/src/reader/value.rs", "rank": 27, "score": 346890.63549313316 }, { "content": "pub fn read_slice<R>(reader: &mut R) -> io::Result<Slice>\n\nwhere\n\n R: Read,\n\n{\n\n let header = read_header_from_block(reader)?;\n\n let core_data_block = read_block(reader)?;\n\n\n\n let external_block_count = header.block_count() - 1;\n\n let external_blocks = read_external_blocks(reader, external_block_count)?;\n\n\n\n Ok(Slice::new(header, core_data_block, external_blocks))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/slice.rs", "rank": 28, "score": 346816.22902599047 }, { "content": "pub fn read_data_series_encoding_map<R>(reader: &mut R) -> io::Result<DataSeriesEncodingMap>\n\nwhere\n\n R: Read,\n\n{\n\n let data_len = read_itf8(reader).and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut buf = vec![0; data_len];\n\n reader.read_exact(&mut buf)?;\n\n\n\n let mut buf_reader = &buf[..];\n\n let map_len = read_itf8(&mut buf_reader)?;\n\n\n\n let mut builder = DataSeriesEncodingMap::builder();\n\n let mut key_buf = [0; 2];\n\n\n\n for _ in 0..map_len {\n\n buf_reader.read_exact(&mut key_buf)?;\n\n\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/data_series_encoding_map.rs", "rank": 29, "score": 346140.31893797795 }, { "content": "pub fn read_tag<R>(reader: &mut R) -> io::Result<Tag>\n\nwhere\n\n R: Read,\n\n{\n\n use std::str;\n\n\n\n let mut buf = [0; 2];\n\n reader.read_exact(&mut buf)?;\n\n\n\n str::from_utf8(&buf)\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n .and_then(|s| {\n\n s.parse()\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "noodles-bam/src/reader/record/data/field/tag.rs", "rank": 30, "score": 343180.57440666 }, { "content": "pub fn read_type<R>(reader: &mut R) -> io::Result<Option<Type>>\n\nwhere\n\n R: Read,\n\n{\n\n use super::{Int16, Int32, Int8};\n\n\n\n let encoding = reader.read_u8()?;\n\n\n\n let mut len = usize::from(encoding >> 4);\n\n\n\n if len == 0x0f {\n\n let value = read_value(reader)?;\n\n\n\n let next_len = match value {\n\n Some(Value::Int8(Some(Int8::Value(n)))) => i32::from(n),\n\n Some(Value::Int16(Some(Int16::Value(n)))) => i32::from(n),\n\n Some(Value::Int32(Some(Int32::Value(n)))) => n,\n\n _ => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n", "file_path": "noodles-bcf/src/reader/value/ty.rs", "rank": 31, "score": 343127.9174138675 }, { "content": "fn read_magic<R>(reader: &mut R) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut magic = [0; 4];\n\n reader.read_exact(&mut magic)?;\n\n\n\n if magic == MAGIC_NUMBER {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid tabix header\",\n\n ))\n\n }\n\n}\n\n\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 32, "score": 341036.5034039233 }, { "content": "fn read_magic<R>(reader: &mut R) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut magic = [0; 4];\n\n reader.read_exact(&mut magic)?;\n\n\n\n if magic == MAGIC_NUMBER {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid CSI file format\",\n\n ))\n\n }\n\n}\n\n\n", "file_path": "noodles-csi/src/reader.rs", "rank": 33, "score": 341036.5034039233 }, { "content": "fn read_magic<R>(reader: &mut R) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n use crate::MAGIC_NUMBER;\n\n\n\n let mut buf = [0; 3];\n\n reader.read_exact(&mut buf)?;\n\n\n\n if buf == MAGIC_NUMBER {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid BCF header\",\n\n ))\n\n }\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader.rs", "rank": 34, "score": 341036.5034039233 }, { "content": "fn read_magic<R>(reader: &mut R) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut magic = [0; 4];\n\n reader.read_exact(&mut magic)?;\n\n\n\n if magic == MAGIC_NUMBER {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid BAM header\",\n\n ))\n\n }\n\n}\n\n\n", "file_path": "noodles-bam/src/reader.rs", "rank": 35, "score": 341036.5034039233 }, { "content": "pub fn read_subtype<R>(reader: &mut R) -> io::Result<Subtype>\n\nwhere\n\n R: Read,\n\n{\n\n reader.read_u8().and_then(|n| {\n\n Subtype::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_subtype() -> io::Result<()> {\n\n let data = [b'i'];\n\n let mut reader = &data[..];\n\n assert_eq!(read_subtype(&mut reader)?, Subtype::Int32);\n\n\n\n let data = [b'n'];\n\n let mut reader = &data[..];\n\n assert!(matches!(\n\n read_subtype(&mut reader),\n\n Err(ref e) if e.kind() == io::ErrorKind::InvalidData\n\n ));\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "noodles-bam/src/reader/record/data/field/value/subtype.rs", "rank": 36, "score": 339665.54149991967 }, { "content": "pub fn read_type<R>(reader: &mut R) -> io::Result<Type>\n\nwhere\n\n R: Read,\n\n{\n\n reader\n\n .read_u8()\n\n .and_then(|n| Type::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e)))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_type() -> io::Result<()> {\n\n let data = [b'i'];\n\n let mut reader = &data[..];\n\n assert_eq!(read_type(&mut reader)?, Type::Int32);\n\n\n\n let data = [b'n'];\n\n let mut reader = &data[..];\n\n assert!(matches!(\n\n read_type(&mut reader),\n\n Err(ref e) if e.kind() == io::ErrorKind::InvalidData\n\n ));\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "noodles-bam/src/reader/record/data/field/value/ty.rs", "rank": 37, "score": 339665.5414999197 }, { "content": "fn read_reference_sequence<R>(reader: &mut R) -> io::Result<ReferenceSequence>\n\nwhere\n\n R: Read,\n\n{\n\n let l_name = reader.read_u32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut c_name = vec![0; l_name];\n\n reader.read_exact(&mut c_name)?;\n\n\n\n let name = bytes_with_nul_to_string(&c_name)?;\n\n let l_ref = reader.read_u32::<LittleEndian>().and_then(|len| {\n\n i32::try_from(len).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n ReferenceSequence::new(name, l_ref).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n}\n\n\n", "file_path": "noodles-bam/src/reader.rs", "rank": 38, "score": 338725.71676899085 }, { "content": "fn read_reference_sequences<R>(reader: &mut R) -> io::Result<ReferenceSequences>\n\nwhere\n\n R: Read,\n\n{\n\n let n_ref = reader.read_u32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut reference_sequences = ReferenceSequences::with_capacity(n_ref);\n\n\n\n for _ in 0..n_ref {\n\n let reference_sequence = read_reference_sequence(reader)?;\n\n reference_sequences.insert(reference_sequence.name().into(), reference_sequence);\n\n }\n\n\n\n Ok(reference_sequences)\n\n}\n\n\n", "file_path": "noodles-bam/src/reader.rs", "rank": 39, "score": 338725.71676899085 }, { "content": "fn read_magic_number<R>(reader: &mut R) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = [0; 4];\n\n reader.read_exact(&mut buf)?;\n\n\n\n if buf == MAGIC_NUMBER {\n\n Ok(())\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid CRAM header\",\n\n ))\n\n }\n\n}\n\n\n", "file_path": "noodles-cram/src/reader.rs", "rank": 40, "score": 336688.83897647064 }, { "content": "pub fn read_string_map_indices<R>(reader: &mut R) -> io::Result<Vec<usize>>\n\nwhere\n\n R: Read,\n\n{\n\n let indices = match read_value(reader)? {\n\n Some(Value::Int8(Some(Int8::Value(i)))) => vec![i32::from(i)],\n\n Some(Value::Int8Array(indices)) => indices.into_iter().map(i32::from).collect(),\n\n Some(Value::Int16(Some(Int16::Value(i)))) => vec![i32::from(i)],\n\n Some(Value::Int16Array(indices)) => indices.into_iter().map(i32::from).collect(),\n\n Some(Value::Int32(Some(Int32::Value(i)))) => vec![i],\n\n Some(Value::Int32Array(indices)) => indices,\n\n None => Vec::new(),\n\n v => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\n\n \"expected {{Int8, Int8Array, Int16, Int16Array, Int32, Int32Array}}, got {:?}\",\n\n v\n\n ),\n\n ))\n", "file_path": "noodles-bcf/src/reader/string_map.rs", "rank": 41, "score": 335977.2298877967 }, { "content": "pub fn read_data_container<R>(reader: &mut R) -> io::Result<Option<DataContainer>>\n\nwhere\n\n R: Read,\n\n{\n\n let header = container::read_header(reader)?;\n\n\n\n if header.is_eof() {\n\n return Ok(None);\n\n }\n\n\n\n let compression_header = read_compression_header_from_block(reader)?;\n\n\n\n let slice_count = header.landmarks().len();\n\n let mut slices = Vec::with_capacity(slice_count);\n\n\n\n for _ in 0..slice_count {\n\n let slice = read_slice(reader)?;\n\n slices.push(slice);\n\n }\n\n\n\n Ok(Some(DataContainer::new(compression_header, slices)))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container.rs", "rank": 42, "score": 335977.2298877966 }, { "content": "/// Reads a BGZF block trailer.\n\n///\n\n/// The position of the stream is expected to be at the start of the block trailer, i.e., 8 bytes\n\n/// from the end of the block.\n\n///\n\n/// This returns the length of the uncompressed data (`ISIZE`).\n\nfn read_trailer<R>(reader: &mut R) -> io::Result<u32>\n\nwhere\n\n R: Read,\n\n{\n\n let mut trailer = [0; gz::TRAILER_SIZE];\n\n reader.read_exact(&mut trailer)?;\n\n let r#isize = LittleEndian::read_u32(&trailer[4..]);\n\n Ok(r#isize)\n\n}\n\n\n", "file_path": "noodles-bgzf/src/reader.rs", "rank": 43, "score": 333568.23504866427 }, { "content": "fn read_header<R>(reader: &mut R) -> io::Result<String>\n\nwhere\n\n R: BufRead,\n\n{\n\n let mut header_buf = Vec::new();\n\n let mut is_eol = false;\n\n\n\n for i in 0.. {\n\n let buf = reader.fill_buf()?;\n\n\n\n if (i == 0 || is_eol) && buf.first().map(|&b| b != HEADER_PREFIX).unwrap_or(true) {\n\n break;\n\n }\n\n\n\n let (read_eol, len) = if let Some(i) = memchr(LINE_FEED as u8, buf) {\n\n header_buf.extend(&buf[..=i]);\n\n (true, i + 1)\n\n } else {\n\n header_buf.extend(buf);\n\n (false, buf.len())\n\n };\n\n\n\n is_eol = read_eol;\n\n\n\n reader.consume(len);\n\n }\n\n\n\n String::from_utf8(header_buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n}\n\n\n", "file_path": "noodles-vcf/src/reader.rs", "rank": 44, "score": 332201.49519835896 }, { "content": "fn read_header<R>(reader: &mut R) -> io::Result<String>\n\nwhere\n\n R: BufRead,\n\n{\n\n let mut header_buf = Vec::new();\n\n let mut is_eol = false;\n\n\n\n for i in 0.. {\n\n let buf = reader.fill_buf()?;\n\n\n\n if (i == 0 || is_eol) && buf.first().map(|&b| b != HEADER_PREFIX).unwrap_or(true) {\n\n break;\n\n }\n\n\n\n let (read_eol, len) = if let Some(i) = buf.iter().position(|&b| b == LINE_FEED as u8) {\n\n header_buf.extend(&buf[..=i]);\n\n (true, i + 1)\n\n } else {\n\n header_buf.extend(buf);\n\n (false, buf.len())\n\n };\n\n\n\n is_eol = read_eol;\n\n\n\n reader.consume(len);\n\n }\n\n\n\n String::from_utf8(header_buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n}\n\n\n", "file_path": "noodles-sam/src/reader.rs", "rank": 45, "score": 332201.49519835896 }, { "content": "fn read_header<R>(reader: &mut R) -> io::Result<String>\n\nwhere\n\n R: Read,\n\n{\n\n let l_text = reader.read_u32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut text = vec![0; l_text];\n\n reader.read_exact(&mut text)?;\n\n\n\n // § 4.2 The BAM format (2021-06-03): \"Plain header text in SAM; not necessarily\n\n // NUL-terminated\".\n\n bytes_with_nul_to_string(&text).or_else(|_| {\n\n String::from_utf8(text).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })\n\n}\n\n\n", "file_path": "noodles-bam/src/reader.rs", "rank": 46, "score": 332201.49519835896 }, { "content": "fn read_line<R>(reader: &mut R, buf: &mut Vec<u8>) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n reader.read_until(LINE_FEED, buf)\n\n}\n\n\n", "file_path": "noodles-fastq/src/indexer.rs", "rank": 47, "score": 331059.6608162989 }, { "content": "pub fn read_data_container_with_container_header<R>(\n\n reader: &mut R,\n\n) -> io::Result<Option<(crate::container::Header, DataContainer)>>\n\nwhere\n\n R: Read,\n\n{\n\n let header = container::read_header(reader)?;\n\n\n\n if header.is_eof() {\n\n return Ok(None);\n\n }\n\n\n\n let compression_header = read_compression_header_from_block(reader)?;\n\n\n\n let slice_count = header.landmarks().len();\n\n let mut slices = Vec::with_capacity(slice_count);\n\n\n\n for _ in 0..slice_count {\n\n let slice = read_slice(reader)?;\n\n slices.push(slice);\n\n }\n\n\n\n let data_container = DataContainer::new(compression_header, slices);\n\n\n\n Ok(Some((header, data_container)))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container.rs", "rank": 48, "score": 329710.8424717698 }, { "content": "/// Reads a BAM record data field value.\n\n///\n\n/// The stream is expected to be at the start of the value, i.e., after the tag and data type.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use std::io;\n\n/// use noodles_bam::{\n\n/// reader::record::data::field::read_value,\n\n/// record::data::field::{value::Type, Value}\n\n/// };\n\n///\n\n/// let data = [0x01, 0x00, 0x00, 0x00];\n\n/// let mut reader = &data[..];\n\n///\n\n/// assert_eq!(\n\n/// read_value(&mut reader, Type::Int32)?,\n\n/// Value::Int32(1)\n\n/// );\n\n/// # Ok::<(), io::Error>(())\n\n/// ```\n\npub fn read_value<R>(reader: &mut R, ty: Type) -> io::Result<Value>\n\nwhere\n\n R: BufRead,\n\n{\n\n match ty {\n\n Type::Char => reader.read_u8().map(char::from).map(Value::Char),\n\n Type::Int8 => reader.read_i8().map(Value::Int8),\n\n Type::UInt8 => reader.read_u8().map(Value::UInt8),\n\n Type::Int16 => reader.read_i16::<LittleEndian>().map(Value::Int16),\n\n Type::UInt16 => reader.read_u16::<LittleEndian>().map(Value::UInt16),\n\n Type::Int32 => reader.read_i32::<LittleEndian>().map(Value::Int32),\n\n Type::UInt32 => reader.read_u32::<LittleEndian>().map(Value::UInt32),\n\n Type::Float => reader.read_f32::<LittleEndian>().map(Value::Float),\n\n Type::String => read_string(reader).map(Value::String),\n\n Type::Hex => read_string(reader).map(Value::Hex),\n\n Type::Array => read_array(reader),\n\n }\n\n}\n\n\n", "file_path": "noodles-bam/src/reader/record/data/field/value.rs", "rank": 49, "score": 329133.2783089557 }, { "content": "fn read_block<R>(reader: &mut R, cdata: &mut Vec<u8>, block: &mut Block) -> io::Result<usize>\n\nwhere\n\n R: Read,\n\n{\n\n let clen = match read_header(reader) {\n\n Ok(0) => return Ok(0),\n\n Ok(bs) => bs as usize,\n\n Err(e) => return Err(e),\n\n };\n\n\n\n if clen < BGZF_HEADER_SIZE + gz::TRAILER_SIZE {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\n\n \"expected clen >= {}, got {}\",\n\n BGZF_HEADER_SIZE + gz::TRAILER_SIZE,\n\n clen\n\n ),\n\n ));\n\n }\n", "file_path": "noodles-bgzf/src/reader.rs", "rank": 50, "score": 327263.6936944936 }, { "content": "fn read_compression_header_from_block<R>(reader: &mut R) -> io::Result<CompressionHeader>\n\nwhere\n\n R: Read,\n\n{\n\n use super::container::read_block;\n\n\n\n let block = read_block(reader)?;\n\n let data = block.decompressed_data()?;\n\n let mut data_reader = &data[..];\n\n read_compression_header(&mut data_reader)\n\n}\n", "file_path": "noodles-cram/src/reader/data_container.rs", "rank": 51, "score": 326249.8908248297 }, { "content": "fn read_names<R>(reader: &mut R) -> io::Result<ReferenceSequenceNames>\n\nwhere\n\n R: Read,\n\n{\n\n let l_nm = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut names = vec![0; l_nm];\n\n reader.read_exact(&mut names)?;\n\n\n\n parse_names(&names)\n\n}\n\n\n\npub(crate) fn parse_names(buf: &[u8]) -> io::Result<ReferenceSequenceNames> {\n\n let mut names = ReferenceSequenceNames::new();\n\n let mut start = 0;\n\n\n\n loop {\n\n let buf = &buf[start..];\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 52, "score": 325186.4762068846 }, { "content": "pub fn rans_renorm<R>(reader: &mut R, mut r: u32) -> io::Result<u32>\n\nwhere\n\n R: Read,\n\n{\n\n while r < (1 << 23) {\n\n r = (r << 8) + reader.read_u8().map(u32::from)?;\n\n }\n\n\n\n Ok(r)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_header() -> io::Result<()> {\n\n let data = [\n\n 0x00, // order = 0\n\n 0x25, 0x00, 0x00, 0x00, // compressed size = 37\n", "file_path": "noodles-cram/src/rans/decode.rs", "rank": 53, "score": 324254.5346208702 }, { "content": "fn read_references<R>(reader: &mut R, len: usize) -> io::Result<Vec<ReferenceSequence>>\n\nwhere\n\n R: Read,\n\n{\n\n let mut references = Vec::with_capacity(len);\n\n\n\n for _ in 0..len {\n\n let (bins, metadata) = read_bins(reader)?;\n\n let intervals = read_intervals(reader)?;\n\n references.push(ReferenceSequence::new(bins, intervals, metadata));\n\n }\n\n\n\n Ok(references)\n\n}\n\n\n", "file_path": "noodles-tabix/src/reader.rs", "rank": 54, "score": 323329.78672155086 }, { "content": "pub fn decode<R>(reader: &mut R, output: &mut [u8]) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut freqs = vec![vec![0; 256]; 256];\n\n let mut cumulative_freqs = vec![vec![0; 256]; 256];\n\n\n\n read_frequencies_1(reader, &mut freqs, &mut cumulative_freqs)?;\n\n\n\n let cumulative_freqs_symbols_tables = build_cumulative_freqs_symbols_table_1(&cumulative_freqs);\n\n\n\n let mut state = [0; 4];\n\n reader.read_u32_into::<LittleEndian>(&mut state)?;\n\n\n\n let mut i = 0;\n\n let mut last_syms = [0; 4];\n\n\n\n while i < output.len() / 4 {\n\n for j in 0..4 {\n\n let f = rans_get_cumulative_freq(state[j]);\n", "file_path": "noodles-cram/src/rans/decode/order_1.rs", "rank": 56, "score": 322021.78438920534 }, { "content": "pub fn decode<R>(reader: &mut R, output: &mut [u8]) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut freqs = [0; 256];\n\n let mut cumulative_freqs = [0; 256];\n\n\n\n read_frequencies_0(reader, &mut freqs, &mut cumulative_freqs)?;\n\n\n\n let cumulative_freqs_symbols_table = build_cumulative_freqs_symbols_table_0(&cumulative_freqs);\n\n\n\n let mut state = [0; 4];\n\n reader.read_u32_into::<LittleEndian>(&mut state)?;\n\n\n\n let mut i = 0;\n\n\n\n while i < output.len() {\n\n for j in 0..4 {\n\n if i + j >= output.len() {\n\n return Ok(());\n", "file_path": "noodles-cram/src/rans/decode/order_0.rs", "rank": 57, "score": 322021.78438920534 }, { "content": "fn read_reference_sequences<R>(reader: &mut R, depth: i32) -> io::Result<Vec<ReferenceSequence>>\n\nwhere\n\n R: Read,\n\n{\n\n let n_ref = reader.read_i32::<LittleEndian>().and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let mut reference_sequences = Vec::with_capacity(n_ref);\n\n\n\n for _ in 0..n_ref {\n\n let (bins, metadata) = read_bins(reader, depth)?;\n\n let reference_sequence = ReferenceSequence::new(bins, metadata);\n\n reference_sequences.push(reference_sequence);\n\n }\n\n\n\n Ok(reference_sequences)\n\n}\n\n\n", "file_path": "noodles-csi/src/reader.rs", "rank": 58, "score": 319777.78024379443 }, { "content": "fn read_landmarks<R>(reader: &mut R) -> io::Result<Vec<Itf8>>\n\nwhere\n\n R: Read,\n\n{\n\n let len = read_itf8(reader).and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n let mut buf = Vec::with_capacity(len);\n\n\n\n for _ in 0..len {\n\n let pos = read_itf8(reader)?;\n\n buf.push(pos);\n\n }\n\n\n\n Ok(buf)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "noodles-cram/src/reader/container/header.rs", "rank": 59, "score": 317352.28741512104 }, { "content": "pub fn write_bin<W>(writer: &mut W, bin: &Bin) -> io::Result<()>\n\nwhere\n\n W: Write,\n\n{\n\n writer.write_u32::<LittleEndian>(bin.id())?;\n\n\n\n let n_chunk = i32::try_from(bin.chunks().len())\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?;\n\n writer.write_i32::<LittleEndian>(n_chunk)?;\n\n\n\n for chunk in bin.chunks() {\n\n write_chunk(writer, chunk)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "noodles-tabix/src/writer.rs", "rank": 60, "score": 316386.520292566 }, { "content": "fn read_header<R>(reader: &mut R) -> io::Result<(Order, u32, u32)>\n\nwhere\n\n R: Read,\n\n{\n\n let order = reader.read_u8().and_then(|order| {\n\n Order::try_from(order).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n let compressed_len = reader.read_u32::<LittleEndian>()?;\n\n let data_len = reader.read_u32::<LittleEndian>()?;\n\n\n\n Ok((order, compressed_len, data_len))\n\n}\n\n\n", "file_path": "noodles-cram/src/rans/decode.rs", "rank": 61, "score": 314710.64008653164 }, { "content": "fn write_header<W>(writer: &mut W, header: &index::Header) -> io::Result<()>\n\nwhere\n\n W: Write,\n\n{\n\n let format = i32::from(header.format());\n\n writer.write_i32::<LittleEndian>(format)?;\n\n\n\n let col_seq = i32::try_from(header.reference_sequence_name_index())\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?;\n\n writer.write_i32::<LittleEndian>(col_seq)?;\n\n\n\n let col_beg = i32::try_from(header.start_position_index())\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?;\n\n writer.write_i32::<LittleEndian>(col_beg)?;\n\n\n\n let col_end = header.end_position_index().map_or(Ok(0), |i| {\n\n i32::try_from(i).map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))\n\n })?;\n\n writer.write_i32::<LittleEndian>(col_end)?;\n\n\n\n let meta = i32::from(header.line_comment_prefix());\n\n writer.write_i32::<LittleEndian>(meta)?;\n\n\n\n let skip = i32::try_from(header.line_skip_count())\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?;\n\n writer.write_i32::<LittleEndian>(skip)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "noodles-tabix/src/writer.rs", "rank": 62, "score": 314623.77010151226 }, { "content": "// 0-based, [start, end)\n\nfn region_to_bins(start: usize, mut end: usize) -> BitVec {\n\n end -= 1;\n\n\n\n let mut bins = BitVec::from_elem(bin::MAX_ID as usize, false);\n\n bins.set(0, true);\n\n\n\n for k in (1 + (start >> 26))..=(1 + (end >> 26)) {\n\n bins.set(k, true);\n\n }\n\n\n\n for k in (9 + (start >> 23))..=(9 + (end >> 23)) {\n\n bins.set(k, true);\n\n }\n\n\n\n for k in (73 + (start >> 20))..=(73 + (end >> 20)) {\n\n bins.set(k, true);\n\n }\n\n\n\n for k in (585 + (start >> 17))..=(585 + (end >> 17)) {\n\n bins.set(k, true);\n", "file_path": "noodles-bam/src/bai/index/reference_sequence.rs", "rank": 64, "score": 309629.8906169077 }, { "content": "fn read_external_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let block_content_id = read_itf8(&mut args_reader)?;\n\n\n\n Ok(Encoding::External(block_content_id))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/encoding.rs", "rank": 65, "score": 309313.8129938269 }, { "content": "fn read_huffman_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let alphabet_len = read_itf8(&mut args_reader).and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n let mut alphabet = Vec::with_capacity(alphabet_len);\n\n\n\n for _ in 0..alphabet_len {\n\n let symbol = read_itf8(&mut args_reader)?;\n\n alphabet.push(symbol);\n\n }\n\n\n\n let bit_lens_len = read_itf8(&mut args_reader).and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/encoding.rs", "rank": 66, "score": 309313.8129938269 }, { "content": "fn read_beta_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let offset = read_itf8(&mut args_reader)?;\n\n let len = read_itf8(&mut args_reader).and_then(|n| {\n\n u32::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n\n\n Ok(Encoding::Beta(offset, len))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/encoding.rs", "rank": 67, "score": 309313.8129938269 }, { "content": "fn read_subexp_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let offset = read_itf8(&mut args_reader)?;\n\n let k = read_itf8(&mut args_reader)?;\n\n\n\n Ok(Encoding::Subexp(offset, k))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/encoding.rs", "rank": 68, "score": 309313.8129938269 }, { "content": "fn read_gamma_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let offset = read_itf8(&mut args_reader)?;\n\n\n\n Ok(Encoding::Gamma(offset))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_null_encoding() -> io::Result<()> {\n\n let data = [\n\n 0, // null encoding ID\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/encoding.rs", "rank": 69, "score": 309313.8129938269 }, { "content": "fn read_bool<R>(reader: &mut R) -> io::Result<bool>\n\nwhere\n\n R: Read,\n\n{\n\n match reader.read_u8() {\n\n Ok(0) => Ok(false),\n\n Ok(1) => Ok(true),\n\n Ok(_) => Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"invalid bool value\",\n\n )),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/preservation_map.rs", "rank": 70, "score": 309313.8129938269 }, { "content": "fn read_args<R>(reader: &mut R) -> io::Result<Vec<u8>>\n\nwhere\n\n R: Read,\n\n{\n\n let len = read_itf8(reader).and_then(|n| {\n\n usize::try_from(n).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n })?;\n\n let mut buf = vec![0; len];\n\n reader.read_exact(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/encoding.rs", "rank": 71, "score": 306508.77409764024 }, { "content": "fn read_optional_tags<R>(reader: &mut R) -> io::Result<Vec<u8>>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = Vec::new();\n\n reader.read_to_end(&mut buf)?;\n\n Ok(buf)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_read_header() -> Result<(), Box<dyn std::error::Error>> {\n\n let data = [\n\n 0x02, // reference sequence ID = 2\n\n 0x03, // alignment start = 3\n\n 0x05, // alignment span = 5\n\n 0x08, // number of records = 8\n", "file_path": "noodles-cram/src/reader/data_container/slice/header.rs", "rank": 72, "score": 306508.7740976403 }, { "content": "pub fn write_header<W>(writer: &mut W, header: &container::Header) -> io::Result<()>\n\nwhere\n\n W: Write,\n\n{\n\n let mut crc_writer = CrcWriter::new(writer);\n\n\n\n let length = header.len();\n\n crc_writer.write_i32::<LittleEndian>(length)?;\n\n\n\n let reference_sequence_id = i32::from(header.reference_sequence_id());\n\n write_itf8(&mut crc_writer, reference_sequence_id)?;\n\n\n\n let starting_position_on_the_reference =\n\n header.start_position().map(Itf8::from).unwrap_or_default();\n\n write_itf8(&mut crc_writer, starting_position_on_the_reference)?;\n\n\n\n let alignment_span = header.alignment_span();\n\n write_itf8(&mut crc_writer, alignment_span)?;\n\n\n\n let number_of_records = header.record_count();\n", "file_path": "noodles-cram/src/writer/container/header.rs", "rank": 73, "score": 306185.04325186537 }, { "content": "/// Reads the entire contents of a FASTA index.\n\n///\n\n/// This is a convenience function and is equivalent to opening the file at the given path and\n\n/// parsing each record.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// # use std::io;\n\n/// use noodles_fasta::fai;\n\n/// let index = fai::read(\"reference.fa.fai\")?;\n\n/// # Ok::<(), io::Error>(())\n\n/// ```\n\npub fn read<P>(src: P) -> io::Result<Index>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let mut reader = File::open(src).map(BufReader::new).map(Reader::new)?;\n\n reader.read_index()\n\n}\n", "file_path": "noodles-fasta/src/fai.rs", "rank": 74, "score": 305872.2102238581 }, { "content": "/// Reads the entire contents of a CRAM index.\n\n///\n\n/// This is a convenience function and is equivalent to opening the file at the given path and\n\n/// reading the index.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// # use std::io;\n\n/// use noodles_cram::crai;\n\n/// let index = crai::read(\"sample.cram.crai\")?;\n\n/// # Ok::<(), io::Error>(())\n\n/// ```\n\npub fn read<P>(src: P) -> io::Result<Index>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let mut reader = File::open(src).map(Reader::new)?;\n\n reader.read_index()\n\n}\n", "file_path": "noodles-cram/src/crai.rs", "rank": 75, "score": 305868.7645961193 }, { "content": "/// Reads the entire contents of a tabix index.\n\n///\n\n/// This is a convenience function and is equivalent to opening the file at the given path and\n\n/// reading the index.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// # use std::io;\n\n/// use noodles_tabix as tabix;\n\n/// let index = tabix::read(\"sample.vcf.gz.tbi\")?;\n\n/// # Ok::<(), io::Error>(())\n\n/// ```\n\npub fn read<P>(src: P) -> io::Result<Index>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let mut reader = File::open(src).map(Reader::new)?;\n\n reader.read_index()\n\n}\n\n\n", "file_path": "noodles-tabix/src/lib.rs", "rank": 76, "score": 305868.5924670036 }, { "content": "/// Reads the entire contents of a coordinate-sorted index (CSI).\n\n///\n\n/// This is a convenience function and is equivalent to opening the file at the given path and\n\n/// reading the index.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// # use std::io;\n\n/// use noodles_csi as csi;\n\n/// let index = csi::read(\"sample.bcf.csi\")?;\n\n/// # Ok::<(), io::Error>(())\n\n/// ```\n\npub fn read<P>(src: P) -> io::Result<Index>\n\nwhere\n\n P: AsRef<Path>,\n\n{\n\n let mut reader = File::open(src).map(Reader::new)?;\n\n reader.read_index()\n\n}\n\n\n", "file_path": "noodles-csi/src/lib.rs", "rank": 77, "score": 305868.42238277453 }, { "content": "pub fn rans_decode<R>(reader: &mut R) -> io::Result<Vec<u8>>\n\nwhere\n\n R: Read,\n\n{\n\n let (order, _, data_len) = read_header(reader)?;\n\n\n\n let mut buf = vec![0; data_len as usize];\n\n\n\n match order {\n\n Order::Zero => order_0::decode(reader, &mut buf)?,\n\n Order::One => order_1::decode(reader, &mut buf)?,\n\n }\n\n\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-cram/src/rans/decode.rs", "rank": 78, "score": 304723.2221404645 }, { "content": "fn read_block_content_ids<R>(reader: &mut R) -> io::Result<Vec<Itf8>>\n\nwhere\n\n R: Read,\n\n{\n\n let len = read_itf8(reader).map(|i| i as usize)?;\n\n let mut buf = Vec::with_capacity(len);\n\n\n\n for _ in 0..len {\n\n let value = read_itf8(reader)?;\n\n buf.push(value);\n\n }\n\n\n\n Ok(buf)\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/slice/header.rs", "rank": 79, "score": 303130.0366725197 }, { "content": "fn read_byte_array_stop_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let stop_byte = args_reader.read_u8()?;\n\n let block_content_id = read_itf8(&mut args_reader)?;\n\n\n\n Ok(Encoding::ByteArrayStop(stop_byte, block_content_id))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/encoding.rs", "rank": 80, "score": 302664.60208671115 }, { "content": "fn read_substitution_matrix<R>(reader: &mut R) -> io::Result<SubstitutionMatrix>\n\nwhere\n\n R: Read,\n\n{\n\n let mut buf = [0; 5];\n\n reader.read_exact(&mut buf[..])?;\n\n SubstitutionMatrix::try_from(buf).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/preservation_map.rs", "rank": 81, "score": 302664.6020867112 }, { "content": "fn read_byte_array_len_encoding<R>(reader: &mut R) -> io::Result<Encoding>\n\nwhere\n\n R: Read,\n\n{\n\n let args = read_args(reader)?;\n\n let mut args_reader = &args[..];\n\n\n\n let len_encoding = read_encoding(&mut args_reader)?;\n\n let value_encoding = read_encoding(&mut args_reader)?;\n\n\n\n Ok(Encoding::ByteArrayLen(\n\n Box::new(len_encoding),\n\n Box::new(value_encoding),\n\n ))\n\n}\n\n\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/encoding.rs", "rank": 82, "score": 302664.60208671115 }, { "content": "pub fn read_record<R>(\n\n reader: &mut R,\n\n header: &vcf::Header,\n\n string_map: &StringMap,\n\n) -> io::Result<(Site, Vec<Genotype>)>\n\nwhere\n\n R: Read,\n\n{\n\n let site = read_site(reader, header, string_map)?;\n\n\n\n let genotypes = if site.n_sample == 0 {\n\n Vec::new()\n\n } else {\n\n read_genotypes(\n\n reader,\n\n string_map,\n\n site.n_sample as usize,\n\n usize::from(site.n_fmt),\n\n )?\n\n };\n", "file_path": "noodles-bcf/src/reader/record.rs", "rank": 83, "score": 301526.3194521549 }, { "content": "pub fn write_header<W>(writer: &mut W, header: &slice::Header) -> io::Result<()>\n\nwhere\n\n W: Write,\n\n{\n\n let reference_sequence_id = i32::from(header.reference_sequence_id());\n\n write_itf8(writer, reference_sequence_id)?;\n\n\n\n let alignment_start = header.alignment_start().map(Itf8::from).unwrap_or_default();\n\n write_itf8(writer, alignment_start)?;\n\n\n\n write_itf8(writer, header.alignment_span())?;\n\n\n\n let record_count = Itf8::try_from(header.record_count())\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?;\n\n write_itf8(writer, record_count)?;\n\n\n\n write_ltf8(writer, header.record_counter())?;\n\n\n\n let block_count = Itf8::try_from(header.block_count())\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?;\n", "file_path": "noodles-cram/src/writer/data_container/slice/header.rs", "rank": 84, "score": 301050.35274883383 }, { "content": "/// A binning index.\n\npub trait BinningIndex<R>\n\nwhere\n\n R: BinningIndexReferenceSequence,\n\n{\n\n /// Returns a list of indexed reference sequences.\n\n fn reference_sequences(&self) -> &[R];\n\n\n\n /// Returns the number of unplaced, unmapped records in the associated file.\n\n fn unplaced_unmapped_record_count(&self) -> Option<u64>;\n\n\n\n /// Returns the chunks that overlap with the given region.\n\n fn query<B>(&self, reference_sequence_id: usize, interval: B) -> io::Result<Vec<Chunk>>\n\n where\n\n B: RangeBounds<i32> + Clone;\n\n\n\n /// Returns the start position of the first record in the last linear bin.\n\n ///\n\n /// This is the closest position to the unplaced, unmapped records, if any, that is available\n\n /// in an index.\n\n fn first_record_in_last_linear_bin_start_position(&self) -> Option<bgzf::VirtualPosition> {\n\n self.reference_sequences()\n\n .iter()\n\n .rev()\n\n .find_map(|rs| rs.first_record_in_last_linear_bin_start_position())\n\n }\n\n}\n\n\n", "file_path": "noodles-csi/src/binning_index.rs", "rank": 85, "score": 299308.96039219777 }, { "content": "pub fn read_site<R>(\n\n reader: &mut R,\n\n header: &vcf::Header,\n\n string_map: &StringMap,\n\n) -> io::Result<Site>\n\nwhere\n\n R: Read,\n\n{\n\n let chrom = reader.read_i32::<LittleEndian>()?;\n\n let pos = reader.read_i32::<LittleEndian>()?;\n\n\n\n let rlen = reader.read_i32::<LittleEndian>()?;\n\n\n\n let qual = reader.read_f32::<LittleEndian>().map(Float::from)?;\n\n\n\n let n_info = reader.read_u16::<LittleEndian>()?;\n\n let n_allele = reader.read_u16::<LittleEndian>()?;\n\n\n\n let n_fmt_sample = reader.read_u32::<LittleEndian>()?;\n\n let n_fmt = (n_fmt_sample >> 24) as u8;\n", "file_path": "noodles-bcf/src/reader/record/site.rs", "rank": 86, "score": 297460.0002910893 }, { "content": "pub fn read_frequencies_0<R>(\n\n reader: &mut R,\n\n freqs: &mut [u32],\n\n cumulative_freqs: &mut [u32],\n\n) -> io::Result<()>\n\nwhere\n\n R: Read,\n\n{\n\n let mut sym = reader.read_u8()?;\n\n let mut last_sym = sym;\n\n let mut rle = 0;\n\n\n\n loop {\n\n let f = read_itf8(reader)? as u32;\n\n\n\n freqs[sym as usize] = f;\n\n\n\n if rle > 0 {\n\n rle -= 1;\n\n sym += 1;\n", "file_path": "noodles-cram/src/rans/decode/order_0.rs", "rank": 87, "score": 297460.0002910893 }, { "content": "pub fn read_genotypes<R>(\n\n reader: &mut R,\n\n string_map: &StringMap,\n\n sample_count: usize,\n\n format_count: usize,\n\n) -> io::Result<Vec<Genotype>>\n\nwhere\n\n R: Read,\n\n{\n\n use vcf::record::genotype::Field;\n\n\n\n let mut genotypes = vec![Vec::new(); sample_count];\n\n\n\n for _ in 0..format_count {\n\n let key = read_genotype_field_key(reader, string_map)?;\n\n\n\n let values = if key == Key::Genotype {\n\n read_genotype_genotype_field_values(reader, sample_count)?\n\n } else {\n\n read_genotype_field_values(reader, sample_count)?\n", "file_path": "noodles-bcf/src/reader/record/genotypes.rs", "rank": 88, "score": 297460.0002910893 }, { "content": "fn read_tag_ids_dictionary<R>(reader: &mut R) -> io::Result<TagIdsDictionary>\n\nwhere\n\n R: Read,\n\n{\n\n let data_len = read_itf8(reader)?;\n\n let mut buf = vec![0; data_len as usize];\n\n reader.read_exact(&mut buf)?;\n\n\n\n let mut buf_reader = &buf[..];\n\n\n\n let mut dictionary = Vec::new();\n\n let mut keys_buf = Vec::new();\n\n\n\n loop {\n\n keys_buf.clear();\n\n\n\n match buf_reader.read_until(0x00, &mut keys_buf) {\n\n Ok(0) => break,\n\n Ok(_) => {}\n\n Err(e) => return Err(e),\n", "file_path": "noodles-cram/src/reader/data_container/compression_header/preservation_map.rs", "rank": 89, "score": 296427.94489740086 }, { "content": "// Reads all bytes until a line feed ('\\n') or EOF is reached.\n\n//\n\n// The buffer will not include the trailing newline ('\\n' or '\\r\\n').\n\nfn read_line<R>(reader: &mut R, buf: &mut String) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n match reader.read_line(buf) {\n\n Ok(0) => Ok(0),\n\n Ok(n) => {\n\n if buf.ends_with(LINE_FEED) {\n\n buf.pop();\n\n\n\n if buf.ends_with(CARRIAGE_RETURN) {\n\n buf.pop();\n\n }\n\n }\n\n\n\n Ok(n)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "noodles-vcf/src/reader.rs", "rank": 90, "score": 294837.82272447296 }, { "content": "fn read_record<R>(reader: &mut R, record: &mut Record) -> io::Result<usize>\n\nwhere\n\n R: Read,\n\n{\n\n let block_size = match reader.read_u32::<LittleEndian>() {\n\n Ok(bs) => usize::try_from(bs).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?,\n\n Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof => return Ok(0),\n\n Err(e) => return Err(e),\n\n };\n\n\n\n record.resize(block_size);\n\n reader.read_exact(record)?;\n\n\n\n Ok(block_size)\n\n}\n\n\n\npub(crate) fn bytes_with_nul_to_string(buf: &[u8]) -> io::Result<String> {\n\n CStr::from_bytes_with_nul(buf)\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n .and_then(|c_str| {\n", "file_path": "noodles-bam/src/reader.rs", "rank": 91, "score": 294832.41232944775 }, { "content": "fn read_record<R>(reader: &mut R, record: &mut Record) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n record.clear();\n\n\n\n let mut len = match read_name(reader, record.name_mut()) {\n\n Ok(0) => return Ok(0),\n\n Ok(n) => n,\n\n Err(e) => return Err(e),\n\n };\n\n\n\n len += read_line(reader, record.sequence_mut())?;\n\n len += consume_line(reader)?;\n\n len += read_line(reader, record.quality_scores_mut())?;\n\n\n\n Ok(len)\n\n}\n\n\n", "file_path": "noodles-fastq/src/reader.rs", "rank": 92, "score": 294832.41232944775 }, { "content": "fn read_line<R>(reader: &mut R, buf: &mut String) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n match reader.read_line(buf) {\n\n Ok(0) => Ok(0),\n\n Ok(n) => {\n\n if buf.ends_with(LINE_FEED) {\n\n buf.pop();\n\n\n\n if buf.ends_with(CARRIAGE_RETURN) {\n\n buf.pop();\n\n }\n\n }\n\n\n\n Ok(n)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "noodles-sam/src/reader.rs", "rank": 93, "score": 294832.41232944775 }, { "content": "fn read_line<R>(reader: &mut R, buf: &mut String) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n match reader.read_line(buf) {\n\n Ok(0) => Ok(0),\n\n Ok(n) => {\n\n if buf.ends_with(LINE_FEED) {\n\n buf.pop();\n\n\n\n if buf.ends_with(CARRIAGE_RETURN) {\n\n buf.pop();\n\n }\n\n }\n\n\n\n Ok(n)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "noodles-gff/src/reader.rs", "rank": 94, "score": 294832.41232944775 }, { "content": "/// Optimizes a list of chunks into a list of non-overlapping chunks.\n\n///\n\n/// Unlike [`merge_chunks`], `min_offset` (typically from the linear index) is given to remove\n\n/// chunks that cannot be in the query.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use noodles_bgzf as bgzf;\n\n/// use noodles_csi::{\n\n/// binning_index::optimize_chunks,\n\n/// index::reference_sequence::bin::Chunk,\n\n/// };\n\n///\n\n/// let chunks = [\n\n/// Chunk::new(bgzf::VirtualPosition::from(2), bgzf::VirtualPosition::from(3)),\n\n/// Chunk::new(bgzf::VirtualPosition::from(5), bgzf::VirtualPosition::from(8)),\n\n/// Chunk::new(bgzf::VirtualPosition::from(7), bgzf::VirtualPosition::from(13)),\n\n/// Chunk::new(bgzf::VirtualPosition::from(21), bgzf::VirtualPosition::from(34)),\n\n/// ];\n\n/// let min_offset = bgzf::VirtualPosition::from(5);\n\n///\n\n/// let actual = optimize_chunks(&chunks, min_offset);\n\n///\n\n/// let expected = [\n\n/// Chunk::new(bgzf::VirtualPosition::from(5), bgzf::VirtualPosition::from(13)),\n\n/// Chunk::new(bgzf::VirtualPosition::from(21), bgzf::VirtualPosition::from(34)),\n\n/// ];\n\n///\n\n/// assert_eq!(actual, expected);\n\n/// ```\n\npub fn optimize_chunks(chunks: &[Chunk], min_offset: bgzf::VirtualPosition) -> Vec<Chunk> {\n\n let mut chunks: Vec<_> = chunks\n\n .iter()\n\n .filter(|c| c.end() > min_offset)\n\n .copied()\n\n .collect();\n\n\n\n if chunks.is_empty() {\n\n return chunks;\n\n }\n\n\n\n chunks.sort_unstable_by_key(|c| c.start());\n\n\n\n // At worst, no chunks are merged, and the resulting list will be the same size as the input.\n\n let mut merged_chunks = Vec::with_capacity(chunks.len());\n\n\n\n // `chunks` is guaranteed to be non-empty.\n\n let mut current_chunk = chunks[0];\n\n\n\n for next_chunk in chunks.iter().skip(1) {\n", "file_path": "noodles-csi/src/binning_index.rs", "rank": 95, "score": 294343.0676105914 }, { "content": "pub fn read_info<R>(\n\n reader: &mut R,\n\n infos: &vcf::header::Infos,\n\n string_map: &StringMap,\n\n len: usize,\n\n) -> io::Result<vcf::record::Info>\n\nwhere\n\n R: Read,\n\n{\n\n let mut fields = Vec::with_capacity(len);\n\n\n\n for _ in 0..len {\n\n let key = read_info_field_key(reader, string_map)?;\n\n\n\n let info = infos.get(&key).ok_or_else(|| {\n\n io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"missing header INFO record for {}\", key),\n\n )\n\n })?;\n\n\n\n let value = read_info_field_value(reader, info)?;\n\n\n\n let field = vcf::record::info::Field::new(key, value);\n\n fields.push(field);\n\n }\n\n\n\n vcf::record::Info::try_from(fields).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))\n\n}\n\n\n", "file_path": "noodles-bcf/src/reader/record/site/info.rs", "rank": 96, "score": 293560.73245758493 }, { "content": "fn read_line<R>(reader: &mut R, buf: &mut String) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n match reader.read_line(buf) {\n\n Ok(0) => Ok(0),\n\n Ok(n) => {\n\n if buf.ends_with(LINE_FEED) {\n\n buf.pop();\n\n\n\n if buf.ends_with(CARRIAGE_RETURN) {\n\n buf.pop();\n\n }\n\n }\n\n\n\n Ok(n)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n", "file_path": "noodles-fastq/src/fai/reader.rs", "rank": 97, "score": 291936.36614824354 }, { "content": "fn read_line<R>(reader: &mut R, buf: &mut String) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n match reader.read_line(buf) {\n\n Ok(0) => Ok(0),\n\n Ok(n) => {\n\n buf.pop();\n\n Ok(n)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::{convert::TryFrom, io::Write};\n\n\n\n use flate2::write::GzEncoder;\n\n use noodles_bam as bam;\n", "file_path": "noodles-cram/src/crai/reader.rs", "rank": 98, "score": 291936.36614824354 }, { "content": "fn read_sequence<R>(reader: &mut R, buf: &mut Vec<u8>) -> io::Result<usize>\n\nwhere\n\n R: BufRead,\n\n{\n\n let mut bytes_read = 0;\n\n\n\n loop {\n\n let reader_buf = reader.fill_buf()?;\n\n\n\n if reader_buf.is_empty() || reader_buf[0] == DEFINITION_PREFIX {\n\n break;\n\n }\n\n\n\n let len = match memchr(NEWLINE, reader_buf) {\n\n Some(i) => {\n\n let line = &reader_buf[..i];\n\n\n\n if line.ends_with(&[CARRIAGE_RETURN as u8]) {\n\n let end = line.len() - 1;\n\n buf.extend(&line[..end]);\n", "file_path": "noodles-fasta/src/reader.rs", "rank": 99, "score": 289643.673007567 } ]
Rust
imxrt1062-pac/imxrt1062-semc/src/bmcr1.rs
Shock-1/teensy4-rs
effc3b290f1be3c7aef62a78e82dbfbc27aa6370
#[doc = "Reader of register BMCR1"] pub type R = crate::R<u32, super::BMCR1>; #[doc = "Writer for register BMCR1"] pub type W = crate::W<u32, super::BMCR1>; #[doc = "Register BMCR1 `reset()`'s with value 0"] impl crate::ResetValue for super::BMCR1 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `WQOS`"] pub type WQOS_R = crate::R<u8, u8>; #[doc = "Write proxy for field `WQOS`"] pub struct WQOS_W<'a> { w: &'a mut W, } impl<'a> WQOS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } #[doc = "Reader of field `WAGE`"] pub type WAGE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `WAGE`"] pub struct WAGE_W<'a> { w: &'a mut W, } impl<'a> WAGE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u32) & 0x0f) << 4); self.w } } #[doc = "Reader of field `WPH`"] pub type WPH_R = crate::R<u8, u8>; #[doc = "Write proxy for field `WPH`"] pub struct WPH_W<'a> { w: &'a mut W, } impl<'a> WPH_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8); self.w } } #[doc = "Reader of field `WRWS`"] pub type WRWS_R = crate::R<u8, u8>; #[doc = "Write proxy for field `WRWS`"] pub struct WRWS_W<'a> { w: &'a mut W, } impl<'a> WRWS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16); self.w } } #[doc = "Reader of field `WBR`"] pub type WBR_R = crate::R<u8, u8>; #[doc = "Write proxy for field `WBR`"] pub struct WBR_W<'a> { w: &'a mut W, } impl<'a> WBR_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 24)) | (((value as u32) & 0xff) << 24); self.w } } impl R { #[doc = "Bits 0:3 - Weight of QoS"] #[inline(always)] pub fn wqos(&self) -> WQOS_R { WQOS_R::new((self.bits & 0x0f) as u8) } #[doc = "Bits 4:7 - Weight of Aging"] #[inline(always)] pub fn wage(&self) -> WAGE_R { WAGE_R::new(((self.bits >> 4) & 0x0f) as u8) } #[doc = "Bits 8:15 - Weight of Page Hit"] #[inline(always)] pub fn wph(&self) -> WPH_R { WPH_R::new(((self.bits >> 8) & 0xff) as u8) } #[doc = "Bits 16:23 - Weight of Read/Write switch"] #[inline(always)] pub fn wrws(&self) -> WRWS_R { WRWS_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bits 24:31 - Weight of Bank Rotation"] #[inline(always)] pub fn wbr(&self) -> WBR_R { WBR_R::new(((self.bits >> 24) & 0xff) as u8) } } impl W { #[doc = "Bits 0:3 - Weight of QoS"] #[inline(always)] pub fn wqos(&mut self) -> WQOS_W { WQOS_W { w: self } } #[doc = "Bits 4:7 - Weight of Aging"] #[inline(always)] pub fn wage(&mut self) -> WAGE_W { WAGE_W { w: self } } #[doc = "Bits 8:15 - Weight of Page Hit"] #[inline(always)] pub fn wph(&mut self) -> WPH_W { WPH_W { w: self } } #[doc = "Bits 16:23 - Weight of Read/Write switch"] #[inline(always)] pub fn wrws(&mut self) -> WRWS_W { WRWS_W { w: self } } #[doc = "Bits 24:31 - Weight of Bank Rotation"] #[inline(always)] pub fn wbr(&mut self) -> WBR_W { WBR_W { w: self } } }
#[doc = "Reader of register BMCR1"] pub type R = crate::R<u32, super::BMCR1>; #[doc = "Writer for register BMCR1"] pub type W = crate::W<u32, super::BMCR1>; #[doc = "Register BMCR1 `reset()`'s with value 0"] impl crate::ResetValue for super::BMCR1 { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `WQOS`"] pub type WQOS_R = crate::R<u8, u8>; #[doc = "Write proxy for field `WQOS`"] pub struct WQOS_W<'a> { w: &'a mut W, } impl<'a> WQOS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x0f) | ((value as u32) & 0x0f); self.w } } #[doc = "Reader of field `WAGE`"] pub type WAGE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `WAGE`"] pub struct WAGE_W<'a> { w: &'a mut W, } impl<'a> WAGE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0x0f << 4)) | (((value as u32) & 0x0f) << 4); self.w } } #[doc = "Reader of field `WPH`"] pub type WPH_R = crate::R<u8, u8>; #[d
] #[inline(always)] pub fn wbr(&self) -> WBR_R { WBR_R::new(((self.bits >> 24) & 0xff) as u8) } } impl W { #[doc = "Bits 0:3 - Weight of QoS"] #[inline(always)] pub fn wqos(&mut self) -> WQOS_W { WQOS_W { w: self } } #[doc = "Bits 4:7 - Weight of Aging"] #[inline(always)] pub fn wage(&mut self) -> WAGE_W { WAGE_W { w: self } } #[doc = "Bits 8:15 - Weight of Page Hit"] #[inline(always)] pub fn wph(&mut self) -> WPH_W { WPH_W { w: self } } #[doc = "Bits 16:23 - Weight of Read/Write switch"] #[inline(always)] pub fn wrws(&mut self) -> WRWS_W { WRWS_W { w: self } } #[doc = "Bits 24:31 - Weight of Bank Rotation"] #[inline(always)] pub fn wbr(&mut self) -> WBR_W { WBR_W { w: self } } }
oc = "Write proxy for field `WPH`"] pub struct WPH_W<'a> { w: &'a mut W, } impl<'a> WPH_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 8)) | (((value as u32) & 0xff) << 8); self.w } } #[doc = "Reader of field `WRWS`"] pub type WRWS_R = crate::R<u8, u8>; #[doc = "Write proxy for field `WRWS`"] pub struct WRWS_W<'a> { w: &'a mut W, } impl<'a> WRWS_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 16)) | (((value as u32) & 0xff) << 16); self.w } } #[doc = "Reader of field `WBR`"] pub type WBR_R = crate::R<u8, u8>; #[doc = "Write proxy for field `WBR`"] pub struct WBR_W<'a> { w: &'a mut W, } impl<'a> WBR_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !(0xff << 24)) | (((value as u32) & 0xff) << 24); self.w } } impl R { #[doc = "Bits 0:3 - Weight of QoS"] #[inline(always)] pub fn wqos(&self) -> WQOS_R { WQOS_R::new((self.bits & 0x0f) as u8) } #[doc = "Bits 4:7 - Weight of Aging"] #[inline(always)] pub fn wage(&self) -> WAGE_R { WAGE_R::new(((self.bits >> 4) & 0x0f) as u8) } #[doc = "Bits 8:15 - Weight of Page Hit"] #[inline(always)] pub fn wph(&self) -> WPH_R { WPH_R::new(((self.bits >> 8) & 0xff) as u8) } #[doc = "Bits 16:23 - Weight of Read/Write switch"] #[inline(always)] pub fn wrws(&self) -> WRWS_R { WRWS_R::new(((self.bits >> 16) & 0xff) as u8) } #[doc = "Bits 24:31 - Weight of Bank Rotation"
random
[ { "content": "/// Migrate the `lib.rs` of the PAC subscrate, adding\n\n/// our necessary header to the top of the file.\n\nfn write_lib<R: Read>(crate_path: &Path, mut src: R) {\n\n static LIB_PRELUDE: &str = r#\"#![deny(warnings)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(clippy::all)]\n\n#![no_std]\n\n\n\nmod generic;\n\npub use generic::*;\n\n\n\n\"#;\n\n let mut crate_lib =\n\n fs::File::create(crate_path.join(\"src\").join(\"lib.rs\")).expect(\"Unable to create lib.rs\");\n\n crate_lib\n\n .write_all(LIB_PRELUDE.as_bytes())\n\n .expect(\"Unable to write lib.rs prelude\");\n\n io::copy(&mut src, &mut crate_lib).unwrap();\n\n}\n\n\n", "file_path": "tools/import/src/main.rs", "rank": 0, "score": 294576.1012728157 }, { "content": "/// Writes the buffer of data to the USB host\n\n///\n\n/// TODO error handling, return the number of bytes written, etc.\n\npub fn serial_write<B: AsRef<[u8]>>(buffer: &B) {\n\n unsafe {\n\n let buffer = buffer.as_ref();\n\n usb_serial_write(buffer.as_ptr(), buffer.len() as u32);\n\n }\n\n}\n", "file_path": "teensy4-bsp/teensy4-usb-sys/src/lib.rs", "rank": 1, "score": 268487.44199800125 }, { "content": "/// Computes the number of clock ticks that span the provide duration, given\n\n/// the clock frequency and clock divider. If there is no divider, use `Divider::default()`\n\n/// to specify an unused divider. Returns `Ok(ticks)` when the computation of\n\n/// clock ticks succeeds, or an error.\n\npub fn ticks<R: TicksRepr>(\n\n dur: Duration,\n\n freq: Frequency,\n\n div: Divider,\n\n) -> Result<Ticks<R>, TicksError> {\n\n // Ticks computed as\n\n //\n\n // ticks = (duration / clock_period) - 1\n\n //\n\n // where `clock_period` is the effective clock period: `freq / div`\n\n let delay_ns = u64::try_from(dur.as_nanos()).map_err(|_| TicksError::DurationOverflow)?;\n\n let effective_freq = freq\n\n .0\n\n .checked_div(div.0)\n\n .ok_or(TicksError::DurationOverflow)?;\n\n let clock_period_ns = 1_000_000_000u32\n\n .checked_div(effective_freq)\n\n .map(u64::from)\n\n .ok_or(TicksError::DivideByZero)?;\n\n delay_ns\n", "file_path": "imxrt1062-hal/src/ccm.rs", "rank": 2, "score": 244444.4879687826 }, { "content": "#[inline(always)]\n\nfn reg3_trg(mv: u32) -> u8 {\n\n ((mv - 800) / 25) as u8\n\n}\n\n\n", "file_path": "imxrt1062-hal/src/ccm/arm_clock.rs", "rank": 3, "score": 237588.7131381132 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "tools/import/src/generic.rs", "rank": 4, "score": 226075.04879826328 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-can3/src/generic.rs", "rank": 5, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-pmu/src/generic.rs", "rank": 6, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-wdog1/src/generic.rs", "rank": 7, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-csu/src/generic.rs", "rank": 8, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-ccm/src/generic.rs", "rank": 9, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-gpio1/src/generic.rs", "rank": 10, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-aipstz1/src/generic.rs", "rank": 11, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-cmp1/src/generic.rs", "rank": 12, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-usdhc1/src/generic.rs", "rank": 13, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-sai1/src/generic.rs", "rank": 14, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-usb1/src/generic.rs", "rank": 15, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-enet/src/generic.rs", "rank": 16, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-adc1/src/generic.rs", "rank": 17, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-kpp/src/generic.rs", "rank": 18, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-lcdif/src/generic.rs", "rank": 19, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-dmamux/src/generic.rs", "rank": 20, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-pgc/src/generic.rs", "rank": 21, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-dma0/src/generic.rs", "rank": 22, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-lpi2c1/src/generic.rs", "rank": 23, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-can1/src/generic.rs", "rank": 24, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-spdif/src/generic.rs", "rank": 25, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-dcdc/src/generic.rs", "rank": 26, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/generic.rs", "rank": 27, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-snvs/src/generic.rs", "rank": 28, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-gpc/src/generic.rs", "rank": 29, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-romc/src/generic.rs", "rank": 30, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-semc/src/generic.rs", "rank": 31, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-enc1/src/generic.rs", "rank": 32, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-flexspi/src/generic.rs", "rank": 33, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-gpt1/src/generic.rs", "rank": 34, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-csi/src/generic.rs", "rank": 35, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-flexio1/src/generic.rs", "rank": 36, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-lpuart1/src/generic.rs", "rank": 37, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-pxp/src/generic.rs", "rank": 38, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-flexram/src/generic.rs", "rank": 39, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-iomuxc/src/generic.rs", "rank": 40, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-tmr1/src/generic.rs", "rank": 41, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-pit/src/generic.rs", "rank": 42, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-dcp/src/generic.rs", "rank": 43, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-xbarb2/src/generic.rs", "rank": 44, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-xbara1/src/generic.rs", "rank": 45, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-trng/src/generic.rs", "rank": 46, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-pwm1/src/generic.rs", "rank": 47, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-xtalosc24m/src/generic.rs", "rank": 48, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-usbphy1/src/generic.rs", "rank": 49, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-src/src/generic.rs", "rank": 50, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-ewm/src/generic.rs", "rank": 51, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-tempmon/src/generic.rs", "rank": 52, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-ocotp/src/generic.rs", "rank": 53, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-bee/src/generic.rs", "rank": 54, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-usbnc1/src/generic.rs", "rank": 55, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-rtwdog/src/generic.rs", "rank": 56, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-lpspi1/src/generic.rs", "rank": 57, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-aoi1/src/generic.rs", "rank": 58, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-usb-analog/src/generic.rs", "rank": 59, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-adc-etc/src/generic.rs", "rank": 60, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-iomuxc-snvs/src/generic.rs", "rank": 61, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-system-control/src/generic.rs", "rank": 62, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-iomuxc-gpr/src/generic.rs", "rank": 63, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-ccm-analog/src/generic.rs", "rank": 64, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-iomuxc-snvs-gpr/src/generic.rs", "rank": 65, "score": 211103.60281969025 }, { "content": "/// Sets the main system clock to as close to `hz` as possible.\n\n/// Returns the `(ARM, IPG)` clock frequencies based on the input frequency\n\n/// and selected prescalars.\n\npub fn set_arm_clock(\n\n mut hz: u32,\n\n ccm: &pac::CCM,\n\n ccm_analog: &pac::CCM_ANALOG,\n\n dcdc: &pac::DCDC,\n\n) -> (u32, u32) {\n\n let millivolts: u32 = if hz > 528_000_000 {\n\n 1250 // 1.25V\n\n } else if hz <= 24_000_000 {\n\n 950 // 0.95V\n\n } else {\n\n 1150 // 1.15V, default\n\n };\n\n\n\n // Enable clocks to the DCDC module\n\n // Safety: CG3 field is two bits\n\n ccm.ccgr6.modify(|_, w| unsafe { w.cg3().bits(0x3) });\n\n\n\n // Set VDD_SOC, voltage for the chip\n\n if dcdc.reg3.read().trg().bits() < reg3_trg(millivolts) {\n", "file_path": "imxrt1062-hal/src/ccm/arm_clock.rs", "rank": 66, "score": 173158.03491427697 }, { "content": "type CatchAll = toml::value::Value;\n\n\n\n/// Support for Cargo.toml workspaces\n\nmod workspace {\n\n use std::path::Path;\n\n\n\n #[derive(serde::Deserialize, serde::Serialize)]\n\n struct Table {\n\n members: Vec<String>,\n\n exclude: Vec<String>,\n\n }\n\n\n\n /// A `serde` serializable and deserializable definition of a Cargo workspace\n\n #[derive(serde::Deserialize, serde::Serialize)]\n\n pub struct Workspace {\n\n workspace: Table,\n\n }\n\n\n\n impl Workspace {\n\n /// Add a new member to the workspace, then sort the collection of\n", "file_path": "tools/cargo-toml/src/lib.rs", "rank": 67, "score": 162878.75798978022 }, { "content": "#[doc(hidden)]\n\npub trait IntoRegister {\n\n fn into_reg() -> *const crate::pac::gpio1::RegisterBlock;\n\n}\n\n\n\nimpl IntoRegister for GPIO2 {\n\n fn into_reg() -> *const crate::pac::gpio1::RegisterBlock {\n\n crate::pac::GPIO2::ptr()\n\n }\n\n}\n\n\n\nimpl IntoRegister for GPIO7 {\n\n fn into_reg() -> *const crate::pac::gpio1::RegisterBlock {\n\n crate::pac::GPIO7::ptr()\n\n }\n\n}\n\n\n\nmacro_rules! _ios_impl {\n\n ($($io:ident)+) => {\n\n $(\n\n pub struct $io<GPIO, Dir> {\n", "file_path": "imxrt1062-hal/src/gpio.rs", "rank": 68, "score": 160874.62562360297 }, { "content": "/// Chain two timers together, returning a `ChainedPIT` timer that can\n\n/// count twice as many ticks.\n\n///\n\n/// The API enforces that channel 1 is chained to channel 0, or channel 2 is\n\n/// chained to channel 1, or channel 3 is chained to channel 2. Any other\n\n/// combination of chaining is prevented by the compiler.\n\n///\n\n/// We do not support chaining more than two timers.\n\npub fn chain<C1: channel::Channel>(\n\n lower: PIT<<C1 as channel::Channel>::ChainedTo>,\n\n upper: PIT<C1>,\n\n) -> ChainedPIT<<C1 as channel::Channel>::ChainedTo, C1> {\n\n ChainedPIT { lower, upper }\n\n}\n\n\n\nimpl<C0, C1> CountDown for ChainedPIT<C0, C1>\n\nwhere\n\n C0: channel::Channel,\n\n C1: channel::Channel,\n\n{\n\n type Time = core::time::Duration;\n\n fn start<T: Into<Self::Time>>(&mut self, time: T) {\n\n // clock_hz and divider are equal across all PITs\n\n let ticks: Ticks<u64> = match ticks(time.into(), self.lower.clock_hz, self.lower.divider) {\n\n Ok(ticks) => ticks,\n\n // Saturate the load value\n\n Err(TicksError::TicksOverflow) | Err(TicksError::DurationOverflow) => {\n\n Ticks(core::u64::MAX)\n", "file_path": "imxrt1062-hal/src/pit.rs", "rank": 69, "score": 158834.01211468678 }, { "content": "#[derive(Clone, Copy)]\n\nstruct Reg(&'static pac::pwm1::RegisterBlock);\n\nimpl core::ops::Deref for Reg {\n\n type Target = pac::pwm1::RegisterBlock;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n self.0\n\n }\n\n}\n\n\n\nimpl Reg {\n\n fn reset_ok<S, F, R>(&mut self, mut act: F) -> R\n\n where\n\n F: FnMut(&pac::pwm1::SM) -> R,\n\n S: submodule::Submodule,\n\n {\n\n let idx: usize = <S as submodule::Submodule>::IDX;\n\n self.0.mctrl.modify(|_, w| unsafe {\n\n // Safety, cldok is 4 bits, idx is bound [0, 4)\n\n w.cldok().bits(1 << idx)\n\n });\n", "file_path": "imxrt1062-hal/src/pwm.rs", "rank": 70, "score": 142477.2037029197 }, { "content": "#[no_mangle]\n\nfn r#yield() {\n\n // 'yield' is a Rust keyword. But, it needs to be called 'yield' for the C USB stack\n\n cortex_m::asm::delay(1024);\n\n}\n", "file_path": "teensy4-bsp/src/lib.rs", "rank": 71, "score": 116122.72182839812 }, { "content": "fn main() {\n\n // We're using serial NOR flash.\n\n let nor_cb = nor::ConfigurationBlock {\n\n page_size: nor::PageSize::new(256),\n\n sector_size: nor::SectorSize::new(4096),\n\n ip_cmd_serial_clk_freq: nor::SerialClockFrequency::MHz30,\n\n };\n\n // Load the lookup table with our magic numbers. Numbers\n\n // that are not specifed are set to `0`.\n\n let lookup_table = {\n\n let mut lookup = LookupTable::new();\n\n lookup.insert_u32(0, 0x0A18_04EB);\n\n lookup.insert_u32(1, 0x2604_3206);\n\n lookup.insert_u32(4, 0x2404_0405);\n\n lookup.insert_u32(12, 0x0000_0406);\n\n lookup.insert_u32(20, 0x0818_0420);\n\n lookup.insert_u32(32, 0x0818_04D8);\n\n lookup.insert_u32(36, 0x0818_0402);\n\n lookup.insert_u32(37, 0x0000_2004);\n\n lookup.insert_u32(44, 0x0000_0460);\n", "file_path": "teensy4-fcb/build.rs", "rank": 72, "score": 100503.20352860841 }, { "content": "fn main() {\n\n for &rerun_if_changed in RERUN_IF_CHANGED.iter().chain(C_SRCS.iter()) {\n\n println!(\"cargo:rerun-if-changed={}\", rerun_if_changed);\n\n }\n\n\n\n let out_dir = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n\n\n let link_x = include_bytes!(\"link.x\");\n\n let mut script = File::create(out_dir.join(\"link.x\")).unwrap();\n\n script.write_all(link_x).unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out_dir.display());\n\n\n\n let mut builder = cc::Build::new();\n\n builder.compiler(CC);\n\n builder.archiver(AR);\n\n builder.no_default_flags(true);\n\n builder.files(C_SRCS);\n\n for &flag in CFLAGS.iter() {\n\n builder.flag(flag);\n\n }\n\n builder.compile(\"boot\");\n\n}\n", "file_path": "imxrt1062-rt/build.rs", "rank": 73, "score": 100503.20352860841 }, { "content": "fn main() {\n\n for rerun_if_changed in &[\"device.x\", \"build.rs\"] {\n\n println!(\"cargo:rerun-if-changed={}\", rerun_if_changed);\n\n }\n\n let out_dir = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n let device_x = include_bytes!(\"device.x\");\n\n let mut script = File::create(out_dir.join(\"device.x\")).unwrap();\n\n script.write_all(device_x).unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out_dir.display());\n\n}\n", "file_path": "imxrt1062-pac/build.rs", "rank": 74, "score": 100503.20352860841 }, { "content": "PROGMEM const struct usb_string_descriptor_struct usb_string_product_name_default = {\n", "file_path": "teensy4-bsp/teensy4-usb-sys/src/usb_desc.c", "rank": 75, "score": 99455.64706977915 }, { "content": "struct Logger {\n\n /// Tracks if we are (not) enabled\n\n enabled: bool,\n\n /// A collection of targets that we are expected\n\n /// to filter. If this is empty, we allow everything\n\n filters: &'static [(&'static str, Option<::log::LevelFilter>)],\n\n}\n\n\n\nimpl Logger {\n\n /// Returns true if the target is in the filter, else false if the target is\n\n /// not in the list of kept targets. If the filter collection is empty, return\n\n /// true.\n\n fn filtered(&self, metadata: &::log::Metadata) -> bool {\n\n if self.filters.is_empty() {\n\n true\n\n } else if let Some(idx) = self\n\n .filters\n\n .iter()\n\n .position(|&(target, _)| target == metadata.target())\n\n {\n", "file_path": "teensy4-bsp/src/log.rs", "rank": 76, "score": 99070.89424329135 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let mut periphs = bsp::Peripherals::take().unwrap();\n\n let (_, ipg_hz) = periphs.ccm.pll1.set_arm_clock(\n\n bsp::hal::ccm::PLL1::ARM_HZ,\n\n &mut periphs.ccm.handle,\n\n &mut periphs.dcdc,\n\n );\n\n periphs.ccm.pll2.set(\n\n &mut periphs.ccm.handle,\n\n [\n\n Some(bsp::hal::ccm::pll2::MHZ_352),\n\n Some(bsp::hal::ccm::pll2::MHZ_594),\n\n Some(bsp::hal::ccm::pll2::MHZ_396),\n\n Some(bsp::hal::ccm::pll2::MHZ_297),\n\n ],\n\n );\n\n periphs.ccm.pll3.set(\n\n &mut periphs.ccm.handle,\n\n [\n\n Some(bsp::hal::ccm::pll3::MHZ_720),\n", "file_path": "teensy4-examples/src/pit.rs", "rank": 77, "score": 99012.10562235475 }, { "content": "#[rt::entry]\n\nfn main() -> ! {\n\n let mut p = bsp::Peripherals::take().unwrap();\n\n // Initialize logging with the default settings\n\n p.log.init(Default::default());\n\n bsp::delay(2000);\n\n p.ccm\n\n .pll1\n\n .set_arm_clock(bsp::hal::ccm::PLL1::ARM_HZ, &mut p.ccm.handle, &mut p.dcdc);\n\n let mut led = p.led;\n\n loop {\n\n log::error!(\"Something terrible happened!\");\n\n log::warn!(\"Something happened, but we fixed it\");\n\n log::info!(\"It's 31'C outside\");\n\n log::debug!(\"Sleeping for 1 second...\");\n\n log::trace!(\"{} + {} = {}\", 3, 2, 3 + 2);\n\n led.toggle().unwrap();\n\n bsp::delay(5000);\n\n }\n\n}\n", "file_path": "teensy4-examples/src/usb.rs", "rank": 78, "score": 99012.10562235475 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n let mut peripherals = bsp::Peripherals::take().unwrap();\n\n loop {\n\n peripherals.led.set_high().unwrap();\n\n wfi();\n\n }\n\n}\n", "file_path": "teensy4-examples/src/led.rs", "rank": 79, "score": 99012.10562235475 }, { "content": "fn main() {\n\n let output_pac: PathBuf = PathBuf::from(\"../\").join(OUTPUT_PAC_NAME);\n\n let mut args = env::args().skip(1);\n\n let svd_crate_path = match args.next() {\n\n Some(path) => PathBuf::from(path),\n\n None => {\n\n println!(\"usage: path/to/svd2rust/output module_name ...\");\n\n process::exit(1);\n\n }\n\n };\n\n\n\n let mut new_pac_crates: Vec<String> = Vec::new();\n\n for module_name in args {\n\n let module_name = &module_name;\n\n let peripheral_module_src = fs::File::open(\n\n svd_crate_path\n\n .join(\"src\")\n\n .join(format!(\"{}.rs\", module_name)),\n\n )\n\n .unwrap_or_else(|_| panic!(\"Unable to find main module for {}\", module_name));\n", "file_path": "tools/import/src/main.rs", "rank": 80, "score": 99012.10562235475 }, { "content": "#[rt::entry]\n\nfn main() -> ! {\n\n // Prepare all the BSP peripherals\n\n let mut p = bsp::Peripherals::take().unwrap();\n\n // Initialize the logging, so we can use it in the PWM loop below\n\n p.log.init(Default::default());\n\n // Delay is only to let a user set-up their USB serial connection...\n\n bsp::delay(5000);\n\n // Set the core and IPG clock. The IPG clock frequency drives the PWM (sub)modules\n\n let (_, ipg_hz) =\n\n p.ccm\n\n .pll1\n\n .set_arm_clock(bsp::hal::ccm::PLL1::ARM_HZ, &mut p.ccm.handle, &mut p.dcdc);\n\n bsp::delay(100);\n\n // Enable the clocks for the PWM2 module\n\n let mut pwm2 = p.pwm2.clock(&mut p.ccm.handle);\n\n // Get the outputs from the PWM2 module, submodule 2.\n\n // Set a 1KHz switching frequency, using a prescalar of 32.\n\n let (mut pin_a, mut pin_b) = pwm2\n\n .outputs(\n\n p.pins.p6.alt2(),\n", "file_path": "teensy4-examples/src/pwm.rs", "rank": 81, "score": 99012.10562235475 }, { "content": "#[rt::entry]\n\nfn main() -> ! {\n\n let p = bsp::Peripherals::take().unwrap();\n\n let mut led = p.led;\n\n\n\n loop {\n\n bsp::delay(LED_PERIOD_MS);\n\n led.toggle().unwrap();\n\n }\n\n}\n", "file_path": "teensy4-examples/src/systick.rs", "rank": 82, "score": 99012.10562235475 }, { "content": "#[crate::rt::interrupt]\n\nfn USB_OTG1() {\n\n unsafe {\n\n usbsys::isr();\n\n }\n\n}\n\n\n", "file_path": "teensy4-bsp/src/log.rs", "rank": 83, "score": 97581.32991587727 }, { "content": "fn main() {\n\n for &rerun_if_changed in C_SRCS.iter() {\n\n println!(\"cargo:rerun-if-changed={}\", rerun_if_changed);\n\n }\n\n\n\n let mut builder = cc::Build::new();\n\n builder.compiler(CC);\n\n builder.archiver(AR);\n\n builder.no_default_flags(true);\n\n builder.files(C_SRCS);\n\n builder.include(\"./src/\");\n\n // Add all flags\n\n for &flag in CFLAGS.iter().chain(CPPFLAGS.iter()) {\n\n builder.flag(flag);\n\n }\n\n builder.compile(\"usbsys\");\n\n}\n", "file_path": "teensy4-bsp/teensy4-usb-sys/build.rs", "rank": 84, "score": 96207.28849176235 }, { "content": "/// Custom cursor for writing into buffers\n\nstruct Cursor<'a> {\n\n buffer: &'a mut [u8],\n\n offset: usize,\n\n}\n\n\n\nimpl<'a> Cursor<'a> {\n\n fn new(buffer: &'a mut [u8]) -> Self {\n\n Cursor { buffer, offset: 0 }\n\n }\n\n\n\n fn clear(&mut self) {\n\n self.offset = 0;\n\n }\n\n}\n\n\n\nimpl<'a> fmt::Write for Cursor<'a> {\n\n fn write_str(&mut self, msg: &str) -> fmt::Result {\n\n let src = msg.as_bytes();\n\n let buf = &mut self.buffer[self.offset..];\n\n let len = core::cmp::min(buf.len(), src.len());\n", "file_path": "teensy4-bsp/src/log.rs", "rank": 85, "score": 93457.91441603214 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "tools/import/src/generic.rs", "rank": 86, "score": 91592.0281635963 }, { "content": "#[doc = \"This trait shows that register has `read` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Writable` can be also `modify`'ed\"]\n\npub trait Readable {}\n", "file_path": "tools/import/src/generic.rs", "rank": 87, "score": 91577.25159867067 }, { "content": " pub trait Channel {\n\n const IDX: usize;\n\n type ChainedTo: Channel;\n\n }\n\n\n\n impl Channel for _X {\n\n const IDX: usize = core::usize::MAX;\n\n type ChainedTo = _X;\n\n }\n\n\n\n impl Channel for _0 {\n\n const IDX: usize = 0;\n\n type ChainedTo = _X;\n\n }\n\n impl Channel for _1 {\n\n const IDX: usize = 1;\n\n type ChainedTo = _0;\n\n }\n\n impl Channel for _2 {\n\n const IDX: usize = 2;\n", "file_path": "imxrt1062-hal/src/pit.rs", "rank": 88, "score": 91559.44897784069 }, { "content": " /// Describes a PWM pin's associated submodule\n\n ///\n\n /// Each PWM module contains four submodules, 0 through 3 (inclusive).\n\n pub trait Submodule {\n\n /// The submodule's number represented as a value\n\n const IDX: usize;\n\n /// Returns the submodule register block associated with this\n\n /// PWM module.\n\n fn submodule(pwm: &pwm1::RegisterBlock) -> &pwm1::SM;\n\n }\n\n pub struct _0;\n\n impl Submodule for _0 {\n\n const IDX: usize = 0;\n\n fn submodule(pwm: &pwm1::RegisterBlock) -> &pwm1::SM {\n\n &pwm.sm0\n\n }\n\n }\n\n pub struct _1;\n\n impl Submodule for _1 {\n\n const IDX: usize = 1;\n\n fn submodule(pwm: &pwm1::RegisterBlock) -> &pwm1::SM {\n\n &pwm.sm1\n\n }\n", "file_path": "imxrt1062-hal/src/iomuxc/pwm.rs", "rank": 89, "score": 90185.87502893533 }, { "content": "/// Describes a pin that might be used for PWM functions\n\npub trait Pin {\n\n /// The pin's output designation; either 'A' or 'B'\n\n type Output: output::Output;\n\n /// The associated PWM module, base 0\n\n type Module: module::Module;\n\n /// The submodule of the PWM module\n\n type Submodule: submodule::Submodule;\n\n}\n\n\n\nimpl Pin for GPIO_B0_10<Alt2> {\n\n type Output = output::A;\n\n type Module = module::_2; // FlexPWM2\n\n type Submodule = submodule::_2; // FlexPWM2\n\n}\n\n\n\nimpl Pin for GPIO_B0_11<Alt2> {\n\n type Output = output::B;\n\n type Module = module::_2; // FlexPWM2\n\n type Submodule = submodule::_2; // FlexPWM2\n\n}\n", "file_path": "imxrt1062-hal/src/iomuxc/pwm.rs", "rank": 90, "score": 90185.87502893533 }, { "content": " /// Describes a PWM pin's associated module\n\n pub trait Module {\n\n /// Numeric value (1, 2, 3, 4) representing the module\n\n const IDX: usize;\n\n }\n\n pub struct _1;\n\n impl Module for _1 {\n\n const IDX: usize = 1;\n\n }\n\n pub struct _2;\n\n impl Module for _2 {\n\n const IDX: usize = 2;\n\n }\n\n pub struct _3;\n\n impl Module for _3 {\n\n const IDX: usize = 3;\n\n }\n\n pub struct _4;\n\n impl Module for _4 {\n\n const IDX: usize = 4;\n\n }\n\n}\n\n\n\n/// Type tags for PWM submodules\n\n///\n\n/// Each PWM modules has four submodules, each having\n\n/// two output pins.\n\npub mod submodule {\n\n use crate::pac::pwm1;\n\n\n", "file_path": "imxrt1062-hal/src/iomuxc/pwm.rs", "rank": 91, "score": 90185.87502893533 }, { "content": " /// Describes an output pin\n\n pub trait Output {}\n\n /// PWM output pin 'A'\n\n pub struct A;\n\n impl Output for A {}\n\n /// PWM output pin 'B'\n\n pub struct B;\n\n impl Output for B {}\n\n}\n\n\n\n/// Type tags that designate a PWM module\n\npub mod module {\n", "file_path": "imxrt1062-hal/src/iomuxc/pwm.rs", "rank": 92, "score": 90185.87502893533 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "imxrt1062-pac/imxrt1062-flexram/src/generic.rs", "rank": 93, "score": 88898.28746466822 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "imxrt1062-pac/imxrt1062-flexio1/src/generic.rs", "rank": 94, "score": 88898.28746466822 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "imxrt1062-pac/imxrt1062-dcp/src/generic.rs", "rank": 95, "score": 88898.28746466822 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "imxrt1062-pac/imxrt1062-flexspi/src/generic.rs", "rank": 96, "score": 88898.28746466822 }, { "content": "#[doc = \"This trait shows that register has `write`, `write_with_zero` and `reset` method\"]\n\n#[doc = \"\"]\n\n#[doc = \"Registers marked with `Readable` can be also `modify`'ed\"]\n\npub trait Writable {}\n", "file_path": "imxrt1062-pac/imxrt1062-ewm/src/generic.rs", "rank": 97, "score": 88898.28746466822 }, { "content": "#[doc = \"Reader of register STDR\"]\n\npub type R = crate::R<u32, super::STDR>;\n\n#[doc = \"Writer for register STDR\"]\n\npub type W = crate::W<u32, super::STDR>;\n\n#[doc = \"Register STDR `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::STDR {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DATA`\"]\n\npub struct DATA_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DATA_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "imxrt1062-pac/imxrt1062-lpi2c1/src/stdr.rs", "rank": 99, "score": 116.03159452824661 } ]
Rust
src/git.rs
jhzn/purs
574971936795c484acf3d3785c6ed6bbbc15ee13
use ansi_term::Colour::{Cyan, Green, Purple, Red}; use ansi_term::{ANSIGenericString, ANSIStrings}; use git2::{self, Repository, StatusOptions}; use std::fmt; pub struct Info { detailed: bool, head_description: Option<String>, current_action: Option<String>, index_change: usize, conflicted: usize, working_tree_modified: usize, untracked: usize, origin_ahead: usize, origin_behind: usize, } impl fmt::Display for Info { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut out = vec![]; if let Some(hash) = &self.head_description { out.push(Red.paint(format!("{}", hash))); } if self.detailed { if self.origin_ahead > 0 { out.push(Cyan.paint(format!("↑ {}", self.origin_ahead))); } if self.origin_behind > 0 { out.push(Cyan.paint(format!("↓ {}", self.origin_behind))); } if self.index_change == 0 && self.working_tree_modified == 0 && self.conflicted == 0 && self.untracked == 0 { out.push(Green.paint("✔")); } else { if self.index_change > 0 { out.push(Green.paint(format!("♦ {}", self.index_change))); } if self.conflicted > 0 { out.push(Red.paint(format!("✖ {}", self.conflicted))); } if self.working_tree_modified > 0 { out.push(ANSIGenericString::from(format!( "✚ {}", self.working_tree_modified ))); } if self.untracked > 0 { out.push(ANSIGenericString::from("…")); } } } else { if self.index_change > 0 || self.working_tree_modified > 0 || self.conflicted > 0 || self.untracked > 0 { out.push(Red.bold().paint("*")); } } if let Some(action) = &self.current_action { out.push(Purple.paint(format!(" {}", action))); } write!(f, "({})", ANSIStrings(&out).to_string()) } } pub fn get_status(r: &Repository, detailed: bool) -> Option<Info> { let (index_change, wt_change, conflicted, untracked) = count_files_statuses(r).unwrap_or((0,0,0,0)); let (ahead, behind) = get_ahead_behind(r).unwrap_or((0, 0)); Some(Info { detailed, head_description: get_head_shortname(r), current_action: get_action(r), conflicted, index_change, working_tree_modified: wt_change, untracked, origin_ahead: ahead, origin_behind: behind, }) } fn get_ahead_behind(r: &Repository) -> Option<(usize, usize)> { let head = r.head().ok()?; if !head.is_branch() { return None; } let head_name = head.shorthand()?; let head_branch = r.find_branch(head_name, git2::BranchType::Local).ok()?; let upstream = head_branch.upstream().ok()?; let head_oid = head.target()?; let upstream_oid = upstream.get().target()?; r.graph_ahead_behind(head_oid, upstream_oid).ok() } fn get_head_shortname(r: &Repository) -> Option<String> { let head = r.head().ok()?; if let Some(shorthand) = head.shorthand() { if shorthand != "HEAD" { return Some(shorthand.to_string()); } } let object = head.peel(git2::ObjectType::Commit).ok()?; let short_id = object.short_id().ok()?; Some(format!( ":{}", short_id.iter().map(|ch| *ch as char).collect::<String>() )) } fn count_files_statuses(r: &Repository) -> Option<(usize, usize, usize, usize)> { let mut opts = StatusOptions::new(); opts.include_untracked(true); fn count_files(statuses: &git2::Statuses<'_>, status: git2::Status) -> usize { statuses .iter() .filter(|entry| entry.status().intersects(status)) .count() } let statuses = r.statuses(Some(&mut opts)).ok()?; Some(( count_files( &statuses, git2::Status::INDEX_NEW | git2::Status::INDEX_MODIFIED | git2::Status::INDEX_DELETED | git2::Status::INDEX_RENAMED | git2::Status::INDEX_TYPECHANGE, ), count_files( &statuses, git2::Status::WT_MODIFIED | git2::Status::WT_DELETED | git2::Status::WT_TYPECHANGE | git2::Status::WT_RENAMED, ), count_files(&statuses, git2::Status::CONFLICTED), count_files(&statuses, git2::Status::WT_NEW), )) } fn get_action(r: &Repository) -> Option<String> { let gitdir = r.path(); for tmp in &[ gitdir.join("rebase-apply"), gitdir.join("rebase"), gitdir.join("..").join(".dotest"), ] { if tmp.join("rebasing").exists() { return Some("rebase".to_string()); } if tmp.join("applying").exists() { return Some("am".to_string()); } if tmp.exists() { return Some("am/rebase".to_string()); } } for tmp in &[ gitdir.join("rebase-merge").join("interactive"), gitdir.join(".dotest-merge").join("interactive"), ] { if tmp.exists() { return Some("rebase-i".to_string()); } } for tmp in &[gitdir.join("rebase-merge"), gitdir.join(".dotest-merge")] { if tmp.exists() { return Some("rebase-m".to_string()); } } if gitdir.join("MERGE_HEAD").exists() { return Some("merge".to_string()); } if gitdir.join("BISECT_LOG").exists() { return Some("bisect".to_string()); } if gitdir.join("CHERRY_PICK_HEAD").exists() { if gitdir.join("sequencer").exists() { return Some("cherry-seq".to_string()); } else { return Some("cherry".to_string()); } } if gitdir.join("sequencer").exists() { return Some("cherry-or-revert".to_string()); } None }
use ansi_term::Colour::{Cyan, Green, Purple, Red}; use ansi_term::{ANSIGenericString, ANSIStrings}; use git2::{self, Repository, StatusOptions}; use std::fmt; pub struct Info { detailed: bool, head_description: Option<String>, current_action: Option<String>, index_change: usize, conflicted: usize, working_tree_modified: usize, untracked: usize, origin_ahead: usize, origin_behind: usize, } impl fmt::Display for Info { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let mut out = vec![]; if let Some(hash) = &self.head_description { out.push(Red.paint(format!("{}", hash))); } if self.detailed { if self.origin_ahead > 0 { out.push(Cyan.paint(format!("↑ {}", self.origin_ahead))); } if self.origin_behind > 0 { out.push(Cyan.paint(format!("↓ {}", self.origin_behind))); } if self.index_change == 0 && self.working_tree_modified == 0 && self.conflicted == 0 && self.untracked == 0 { out.push(Green.paint("✔")); } else { if self.index_change > 0 { out.push(Green.paint(format!("♦ {}", self.index_change))); } if self.conflicted > 0 { out.push(Red.paint(format!("✖ {}", self.conflicted))); } if self.working_tree_modified > 0 { out.push(ANSIGenericString::from(format!( "✚ {}", self.working_tree_modified ))); } if self.untracked > 0 { out.push(ANSIGenericString::from("…")); } } } else { if self.index_change > 0 || self.working_tree_modified > 0 || self.conflicted > 0 || self.untracked > 0 { out.push(Red.bold().paint("*")); } } if let Some(action) = &self.current_action { out.push(Purple.paint(format!(" {}", action))); } write!(f, "({})", ANSIStrings(&out).to_string()) } } pub fn get_status(r: &Repository, detailed: bool) -> Option<Info> { let (index_change, wt_change, conflicted, untracked) = count_files_statuses(r).unwrap_or((0,0,0,0)); let (ahead, behind) = get_ahead_behind(r).unwrap_or((0, 0)); Some(Info { detailed, head_description: get_head_shortname(r), current_action: get_action(r), conflicted, index_change, working_tree_modified: wt_change, untracked, origin_ahead: ahead, origin_behind: behind, }) } fn get_ahead_behind(r: &Repository) -> Option<(usize, usize)> { let head = r.head().ok()?; if !head.is_branch() { return None; } let head_name = head.shorthand()?; let head_branch = r.find_branch(head_name, git2::BranchType::Local).ok()?; let upstream = head_branch.upstream().ok()?; let head_oid = head.target()?; let upstream_oid = upstream.get().target()?; r.graph_ahead_behind(head_oid, upstream_oid).ok() } fn get_head_shortname(r: &Repository) -> Option<String> { let head = r.head().ok()?; if let Some(shorthand) = head.shorthand() { if shorthand != "HEAD" { return Some(shorthand.to_string()); } } let object = head.peel(git2::ObjectType::Commit).ok()?; let short_id = object.short_id().ok()?; Some(format!( ":{}", short_id.iter().map(|ch| *ch as char).collect::<String>() )) } fn count_files_statuses(r: &Repository) -> Option<(usize, usize, usize, usize)> { let mut opts = StatusOptions::new(); opts.include_untracked(true); fn count_files(statuses: &git2::Statuses<'_>, status: git2::Status) -> usize { statuses .iter() .filter(|entry| entry.status().intersects(status)) .count() } let statuses = r.statuses(Some(&mut opts)).ok()?; Some(( count_files( &statuses, git2::Status::INDEX_NEW | git2::Status::INDEX_MODIFIED | git2::Status::INDEX_DELETED | git2::Status::INDEX_RENAMED | git2::Status::INDEX_TYPECHANGE, ), count_files( &statuses, git2::Status::WT_MODIFIED | git2::Status::WT_DELETED | git2::Status::WT_TYPECHANGE | git2::Status::WT_RENAMED, ), count_files(&statuses, git2::Status::CONFLICTED), count_files(&statuses, git2::Status::WT_NEW), )) }
fn get_action(r: &Repository) -> Option<String> { let gitdir = r.path(); for tmp in &[ gitdir.join("rebase-apply"), gitdir.join("rebase"), gitdir.join("..").join(".dotest"), ] { if tmp.join("rebasing").exists() { return Some("rebase".to_string()); } if tmp.join("applying").exists() { return Some("am".to_string()); } if tmp.exists() { return Some("am/rebase".to_string()); } } for tmp in &[ gitdir.join("rebase-merge").join("interactive"), gitdir.join(".dotest-merge").join("interactive"), ] { if tmp.exists() { return Some("rebase-i".to_string()); } } for tmp in &[gitdir.join("rebase-merge"), gitdir.join(".dotest-merge")] { if tmp.exists() { return Some("rebase-m".to_string()); } } if gitdir.join("MERGE_HEAD").exists() { return Some("merge".to_string()); } if gitdir.join("BISECT_LOG").exists() { return Some("bisect".to_string()); } if gitdir.join("CHERRY_PICK_HEAD").exists() { if gitdir.join("sequencer").exists() { return Some("cherry-seq".to_string()); } else { return Some("cherry".to_string()); } } if gitdir.join("sequencer").exists() { return Some("cherry-or-revert".to_string()); } None }
function_block-full_function
[ { "content": "pub fn cli_arguments<'a>() -> App<'a, 'a> {\n\n let v = &[\n\n Arg::with_name(\"git-detailed\")\n\n .long(\"git-detailed\")\n\n .help(\"Prints detailed git status\"),\n\n Arg::with_name(\"shortened-path\")\n\n .long(\"shortened-path\")\n\n .help(\"~/c/rust instead of ~/.config/rust\"),\n\n ];\n\n SubCommand::with_name(\"precmd\").args(v)\n\n}\n", "file_path": "src/precmd.rs", "rank": 5, "score": 39572.85381106782 }, { "content": "pub fn cli_arguments<'a>() -> App<'a, 'a> {\n\n SubCommand::with_name(\"prompt\")\n\n .arg(\n\n Arg::with_name(\"last_return_code\")\n\n .short(\"r\")\n\n .takes_value(true),\n\n )\n\n .arg(Arg::with_name(\"keymap\").short(\"k\").takes_value(true))\n\n .arg(Arg::with_name(\"venv\").long(\"venv\").takes_value(true))\n\n}\n", "file_path": "src/prompt.rs", "rank": 6, "score": 39572.85381106782 }, { "content": "pub fn display(sub_matches: &ArgMatches<'_>) {\n\n let last_return_code = sub_matches.value_of(\"last_return_code\").unwrap_or(\"0\");\n\n let keymap = sub_matches.value_of(\"keymap\").unwrap_or(\"US\");\n\n let venv_name = sub_matches.value_of(\"venv\").unwrap_or(\"\");\n\n\n\n let symbol = match keymap {\n\n COMMAND_KEYMAP => COMMAND_SYMBOL,\n\n _ => INSERT_SYMBOL,\n\n };\n\n let shell_color = match (symbol, last_return_code) {\n\n (COMMAND_SYMBOL, _) => 3,\n\n (_, NO_ERROR) => 2, //green\n\n _ => 9, //red\n\n };\n\n\n\n let venv = match venv_name.len() {\n\n 0 => String::from(\"\"),\n\n _ => format!(\"%F{{11}}|{}|%f \", venv_name),\n\n };\n\n\n\n print!(\"{}%F{{{}}}{}%f \", venv, shell_color, symbol);\n\n}\n\n\n", "file_path": "src/prompt.rs", "rank": 7, "score": 38165.27474949879 }, { "content": "pub fn display(sub_matches: &ArgMatches<'_>) {\n\n let cwd = env::current_dir().unwrap();\n\n let git_repo = match Repository::discover(&cwd) {\n\n Ok(repo) => git::get_status(&repo, sub_matches.is_present(\"git-detailed\")),\n\n Err(_e) => None,\n\n };\n\n let cwd_style = match sub_matches.is_present(\"shortened-path\") {\n\n true => shorten_path(cwd.to_str().unwrap().to_string()),\n\n false => friendly_path(cwd.to_str().unwrap()),\n\n };\n\n\n\n let precmd = Precmd {\n\n time: Local::now(),\n\n git_status: git_repo,\n\n user: env::var(\"USER\").unwrap(),\n\n host: sys_info::hostname().unwrap(),\n\n cwd: cwd_style,\n\n };\n\n\n\n println!(\"{} \", precmd);\n\n}\n\n\n", "file_path": "src/precmd.rs", "rank": 8, "score": 38165.27474949879 }, { "content": "struct Precmd {\n\n time: chrono::DateTime<chrono::Local>,\n\n user: String,\n\n host: String,\n\n cwd: String,\n\n git_status: Option<git::Info>,\n\n}\n\n\n\nimpl fmt::Display for Precmd {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let git_status = match &self.git_status {\n\n Some(status) => format!(\"{}\", status),\n\n None => \"\".to_string(),\n\n };\n\n\n\n write!(\n\n f,\n\n \"{} {}@{} {} {}\",\n\n Yellow.paint(format!(\"{}\", self.time.format(\"%Y-%m-%d %H:%M:%S\"))),\n\n Cyan.paint(&self.user),\n\n Purple.paint(&self.host),\n\n White.bold().paint(&self.cwd),\n\n git_status,\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/precmd.rs", "rank": 9, "score": 33570.20969795341 }, { "content": "fn main() {\n\n let matches = App::new(\"Purs\")\n\n .setting(AppSettings::SubcommandRequired)\n\n .subcommand(precmd::cli_arguments())\n\n .subcommand(prompt::cli_arguments())\n\n .get_matches();\n\n\n\n match matches.subcommand() {\n\n (\"precmd\", Some(sub_matches)) => precmd::display(sub_matches),\n\n (\"prompt\", Some(sub_matches)) => prompt::display(sub_matches),\n\n _ => (),\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 10, "score": 20881.366596347296 }, { "content": "fn friendly_path(cwd: &str) -> String {\n\n match dirs::home_dir() {\n\n Some(path) => String::from(\n\n Regex::new(path.to_str().unwrap())\n\n .unwrap()\n\n .replace(cwd, \"~\"),\n\n ),\n\n _ => String::from(\"\"),\n\n }\n\n}\n\n\n", "file_path": "src/precmd.rs", "rank": 11, "score": 16521.12758528564 }, { "content": "fn shorten_path(cwd: String) -> String {\n\n tico(&friendly_path(&cwd))\n\n}\n\n\n", "file_path": "src/precmd.rs", "rank": 12, "score": 16521.12758528564 }, { "content": "use super::git;\n\nuse ansi_term::Colour::{Cyan, Purple, White, Yellow};\n\nuse chrono::Local;\n\nuse clap::{App, Arg, ArgMatches, SubCommand};\n\nuse git2::{self, Repository};\n\nuse regex::Regex;\n\nuse std::env;\n\nuse std::fmt;\n\nuse sys_info;\n\nuse tico::tico;\n\n\n", "file_path": "src/precmd.rs", "rank": 18, "score": 4.427497566449462 }, { "content": "use clap::{App, AppSettings};\n\n\n\nmod git;\n\nmod precmd;\n\nmod prompt;\n\n\n", "file_path": "src/main.rs", "rank": 20, "score": 1.3279443084646223 }, { "content": "use clap::{App, Arg, ArgMatches, SubCommand};\n\n\n\nconst INSERT_SYMBOL: &str = \" ▶\";\n\n//const INSERT_SYMBOL: &str = \"❯❯\";\n\n//const INSERT_SYMBOL: &str = \"»\";\n\nconst COMMAND_SYMBOL: &str = \"⬢\";\n\nconst COMMAND_KEYMAP: &str = \"vicmd\";\n\nconst NO_ERROR: &str = \"0\";\n\n\n", "file_path": "src/prompt.rs", "rank": 21, "score": 0.9864729148594338 }, { "content": "# Purs\n\n\n\nA [Pure](https://github.com/sindresorhus/pure)-inspired prompt in [Rust](https://www.rust-lang.org/).\n\n\n\nEven more minimal, definitively faster and at least as pretty as the original Pure by [Sindre Sohrus](https://github.com/sindresorhus).\n\n\n\nFork of https://github.com/xcambar/purs to suit my tastes :)\n\n\n\n![Screenshot of Purs in action](./static/imgs/prompt.png)\n\n\n\n## Installation — Usage\n\n\n\n1. `$ cargo build --release`\n\n1. Add the following to your ZSH configuration:\n\n\n\n```\n\nfunction zle-line-init zle-keymap-select {\n\n PROMPT=`/PATH/TO/PURS/target/release/purs prompt -k \"$KEYMAP\" -r \"$?\" --venv \"${${VIRTUAL_ENV:t}%-*}\"`\n\n zle reset-prompt\n\n}\n\nzle -N zle-line-init\n\nzle -N zle-keymap-select\n\n\n\nautoload -Uz add-zsh-hook\n\n\n\nfunction _prompt_purs_precmd() {\n\n /PATH/TO/PURS/target/release/purs precmd\n\n}\n\nadd-zsh-hook precmd _prompt_purs_precmd\n\n\n\n```\n\n\n\n# License\n\n\n\nMIT, see LICENSE file.\n", "file_path": "README.md", "rank": 23, "score": 0.41833179200451687 } ]
Rust
src/app/widgets/tag.rs
AngelOfSol/art-organize
ed4ad1d936a585a80e9f98b672d7875edb553f5d
use db::{ commands::{AttachCategory, EditTag}, Db, Tag, TagId, }; use imgui::{im_str, Selectable, StyleColor, Ui}; use super::{category, combo_box, confirm::confirm_delete_popup, date}; pub fn view(tag_id: TagId, db: &Db, ui: &Ui<'_>) { let tag = &db[tag_id]; ui.text_wrapped(&im_str!("Name: {}", tag.name)); ui.text_wrapped(&im_str!("Description: {}", tag.description)); date::view("Date Added", &tag.added, ui); if let Some(category_id) = db.category_for_tag(tag_id) { category::link(ui, &db[category_id]); } } pub enum EditTagResponse { None, Delete, Edit(EditTag), AttachCategory(AttachCategory), } pub fn edit(tag_id: TagId, db: &Db, ui: &Ui<'_>) -> EditTagResponse { let tag = &db[tag_id]; let mut buf = tag.name.clone().into(); ui.input_text(im_str!("Name"), &mut buf) .resize_buffer(true) .build(); if ui.is_item_deactivated_after_edit() { return EditTagResponse::Edit(EditTag { id: tag_id, data: Tag { name: buf.to_string(), ..tag.clone() }, }); } let mut buf = tag.description.clone().into(); ui.input_text_multiline(im_str!("Description"), &mut buf, [0.0, 100.0]) .resize_buffer(true) .build(); if ui.is_item_deactivated_after_edit() { return EditTagResponse::Edit(EditTag { id: tag_id, data: Tag { description: buf.to_string(), ..tag.clone() }, }); } if let Some(added) = date::edit(im_str!("Date Added"), &tag.added, ui) { return EditTagResponse::Edit(EditTag { id: tag_id, data: Tag { added, ..tag.clone() }, }); } let x = std::iter::once(None).chain(db.categories().map(|(id, _)| Some(id))); if let Some(new_id) = combo_box( ui, &im_str!("Category"), x, &db.category_for_tag(tag_id), |id| match id { Some(category_id) => im_str!("{}", &db[category_id].name), None => Default::default(), }, ) { return EditTagResponse::AttachCategory(AttachCategory { src: tag_id, dest: new_id, }); } if ui.button(im_str!("Delete")) { ui.open_popup(im_str!("Confirm Delete")); } if confirm_delete_popup(ui) { EditTagResponse::Delete } else { EditTagResponse::None } } pub enum ItemViewResponse { None, Add, AddNegated, Open, } pub fn item_view(ui: &Ui, db: &Db, tag_id: TagId) -> ItemViewResponse { item_view_with_count(ui, db, tag_id, db.pieces_for_tag(tag_id).count()) } pub fn item_view_with_count(ui: &Ui, db: &Db, tag_id: TagId, count: usize) -> ItemViewResponse { let tag = &db[tag_id]; let button_size = [ ui.calc_text_size(&im_str!("+"))[0], ui.text_line_height_with_spacing(), ]; let label = im_str!("{}", tag.name); let _id = ui.push_id(&label); if Selectable::new(im_str!("+")).size(button_size).build(ui) { return ItemViewResponse::Add; } if ui.is_item_hovered() { ui.tooltip_text(&im_str!("Unimplemented.")); } ui.same_line(); if Selectable::new(im_str!("!")).size(button_size).build(ui) { return ItemViewResponse::AddNegated; } if ui.is_item_hovered() { ui.tooltip_text(&im_str!("Unimplemented.")); } ui.same_line(); let result = { let color = if let Some(category_id) = db.category_for_tag(tag_id) { db[category_id].raw_color() } else { ui.style_color(StyleColor::Text) }; let _color = ui.push_style_color(StyleColor::Text, color); if Selectable::new(&label) .size([0.0, ui.text_line_height_with_spacing()]) .build(ui) { ItemViewResponse::Open } else { ItemViewResponse::None } }; if ui.is_item_hovered() && tag.description.chars().any(|c| !c.is_whitespace()) { ui.tooltip(|| ui.text(&im_str!("{}", tag.description))); } ui.same_line(); ui.text_colored([0.4, 0.4, 0.4, 1.0], &im_str!("{}", count)); result } pub enum InPieceViewResponse { None, Open, Remove, } pub fn in_piece_view(ui: &Ui, db: &Db, tag_id: TagId) -> InPieceViewResponse { let tag = &db[tag_id]; let button_size = [ui.text_line_height_with_spacing(); 2]; let label = im_str!("{}", tag.name); let _id = ui.push_id(&label); if Selectable::new(im_str!("-")).size(button_size).build(ui) { return InPieceViewResponse::Remove; } ui.same_line(); let color = if let Some(category_id) = db.category_for_tag(tag_id) { db[category_id].raw_color() } else { ui.style_color(StyleColor::Text) }; let result = { let _color = ui.push_style_color(StyleColor::Text, color); if Selectable::new(&label).build(ui) { InPieceViewResponse::Open } else { InPieceViewResponse::None } }; if ui.is_item_hovered() && tag.description.chars().any(|c| !c.is_whitespace()) { ui.tooltip(|| ui.text(&im_str!("{}", tag.description))); } result }
use db::{ commands::{AttachCategory, EditTag}, Db, Tag, TagId, }; use imgui::{im_str, Selectable, StyleColor, Ui}; use super::{category, combo_box, confirm::confirm_delete_popup, date}; pub fn view(tag_id: TagId, db: &Db, ui: &Ui<'_>) { let tag = &db[tag_id]; ui.text_wrapped(&im_str!("Name: {}", tag.name)); ui.text_wrapped(&im_str!("Description: {}", tag.description)); date::view("Date Added", &tag.added, ui); if let Some(category_id) = db.category_for_tag(tag_id) { category::link(ui, &db[category_id]); } } pub enum EditTagResponse { None, Delete, Edit(EditTag), AttachCategory(AttachCategory), } pub fn edit(tag_id: TagId, db: &Db, ui: &Ui<'_>) -> EditTagResponse { let tag = &db[tag_id]; let mut buf = tag.name.clone().into(); ui.input_text(im_str!("Name"), &mut buf) .resize_buffer(true) .build(); if ui.is_item_deactivated_after_edit() { return EditTagResponse::Edit(EditTag { id: tag_id, data: Tag { name: buf.to_string(), ..tag.clone() }, }); } let mut buf = tag.description.clone().into(); ui.input_text_multiline(im_str!("Description"), &mut buf, [0.0, 100.0]) .resize_buffer(true) .build(); if ui.is_item_deactivated_after_edit() { return EditTagResponse::Edit(EditTag { id: tag_id, data: Tag { description: buf.to_string(), ..tag.clone() }, }); } if let Some(added) = date::edit(im_str!("Date Added"), &tag.added, ui) { return EditTagResponse::Edit(EditTag { id: tag_id, data: Tag { added, ..tag.clone() }, }); } let x = std::iter::once(None).chain(db.categories().map(|(id, _)| Some(id))); if let Some(new_id) =
{ return EditTagResponse::AttachCategory(AttachCategory { src: tag_id, dest: new_id, }); } if ui.button(im_str!("Delete")) { ui.open_popup(im_str!("Confirm Delete")); } if confirm_delete_popup(ui) { EditTagResponse::Delete } else { EditTagResponse::None } } pub enum ItemViewResponse { None, Add, AddNegated, Open, } pub fn item_view(ui: &Ui, db: &Db, tag_id: TagId) -> ItemViewResponse { item_view_with_count(ui, db, tag_id, db.pieces_for_tag(tag_id).count()) } pub fn item_view_with_count(ui: &Ui, db: &Db, tag_id: TagId, count: usize) -> ItemViewResponse { let tag = &db[tag_id]; let button_size = [ ui.calc_text_size(&im_str!("+"))[0], ui.text_line_height_with_spacing(), ]; let label = im_str!("{}", tag.name); let _id = ui.push_id(&label); if Selectable::new(im_str!("+")).size(button_size).build(ui) { return ItemViewResponse::Add; } if ui.is_item_hovered() { ui.tooltip_text(&im_str!("Unimplemented.")); } ui.same_line(); if Selectable::new(im_str!("!")).size(button_size).build(ui) { return ItemViewResponse::AddNegated; } if ui.is_item_hovered() { ui.tooltip_text(&im_str!("Unimplemented.")); } ui.same_line(); let result = { let color = if let Some(category_id) = db.category_for_tag(tag_id) { db[category_id].raw_color() } else { ui.style_color(StyleColor::Text) }; let _color = ui.push_style_color(StyleColor::Text, color); if Selectable::new(&label) .size([0.0, ui.text_line_height_with_spacing()]) .build(ui) { ItemViewResponse::Open } else { ItemViewResponse::None } }; if ui.is_item_hovered() && tag.description.chars().any(|c| !c.is_whitespace()) { ui.tooltip(|| ui.text(&im_str!("{}", tag.description))); } ui.same_line(); ui.text_colored([0.4, 0.4, 0.4, 1.0], &im_str!("{}", count)); result } pub enum InPieceViewResponse { None, Open, Remove, } pub fn in_piece_view(ui: &Ui, db: &Db, tag_id: TagId) -> InPieceViewResponse { let tag = &db[tag_id]; let button_size = [ui.text_line_height_with_spacing(); 2]; let label = im_str!("{}", tag.name); let _id = ui.push_id(&label); if Selectable::new(im_str!("-")).size(button_size).build(ui) { return InPieceViewResponse::Remove; } ui.same_line(); let color = if let Some(category_id) = db.category_for_tag(tag_id) { db[category_id].raw_color() } else { ui.style_color(StyleColor::Text) }; let result = { let _color = ui.push_style_color(StyleColor::Text, color); if Selectable::new(&label).build(ui) { InPieceViewResponse::Open } else { InPieceViewResponse::None } }; if ui.is_item_hovered() && tag.description.chars().any(|c| !c.is_whitespace()) { ui.tooltip(|| ui.text(&im_str!("{}", tag.description))); } result }
combo_box( ui, &im_str!("Category"), x, &db.category_for_tag(tag_id), |id| match id { Some(category_id) => im_str!("{}", &db[category_id].name), None => Default::default(), }, )
call_expression
[ { "content": "pub fn view_tags(piece_id: PieceId, db: &Db, ui: &Ui<'_>) -> Option<(TagId, ItemViewResponse)> {\n\n for category_id in db\n\n .tags_for_piece(piece_id)\n\n .flat_map(|tag| db.category_for_tag(tag))\n\n .sorted_by_key(|category_id| &db[category_id].name)\n\n .dedup()\n\n {\n\n ui.text(&im_str!(\"{}\", db[category_id].name));\n\n\n\n for tag_id in db\n\n .tags_for_piece(piece_id)\n\n .filter(|tag_id| db.category_for_tag(*tag_id) == Some(category_id))\n\n .sorted_by_key(|tag_id| &db[tag_id].name)\n\n {\n\n match tag::item_view(ui, db, tag_id) {\n\n ItemViewResponse::None => {}\n\n response => return Some((tag_id, response)),\n\n }\n\n }\n\n ui.spacing();\n", "file_path": "src/app/widgets/piece.rs", "rank": 5, "score": 277706.54229325376 }, { "content": "#[allow(clippy::or_fun_call)]\n\npub fn edit_tags(piece_id: PieceId, db: &Db, ui: &Ui<'_>) -> Option<EditPieceResponse> {\n\n let mut ret = None;\n\n\n\n for category_id in db\n\n .categories()\n\n .map(|(id, _)| id)\n\n .sorted_by_key(|category_id| &db[category_id].name)\n\n .dedup()\n\n {\n\n let _id = ui.push_id(&im_str!(\"{}\", category_id));\n\n ui.text(&im_str!(\"{}\", db[category_id].name));\n\n\n\n for tag_id in db\n\n .tags_for_piece(piece_id)\n\n .filter(|tag_id| db.category_for_tag(*tag_id) == Some(category_id))\n\n .sorted_by_key(|tag_id| &db[tag_id].name)\n\n {\n\n match tag::in_piece_view(ui, db, tag_id) {\n\n InPieceViewResponse::None => (),\n\n InPieceViewResponse::Open => {\n", "file_path": "src/app/widgets/piece.rs", "rank": 6, "score": 256609.09022254968 }, { "content": "pub fn tooltip(blob_id: BlobId, db: &Db, ui: &Ui<'_>) {\n\n let blob = &db[blob_id];\n\n ui.text(&im_str!(\"File Name: {}\", blob.file_name));\n\n ui.text(&im_str!(\"Blob Type: {}\", blob.blob_type));\n\n ui.text(&im_str!(\"Hash: {:x}\", blob.hash));\n\n date::view(\"Added\", &blob.added, ui);\n\n}\n\n\n", "file_path": "src/app/widgets/blob.rs", "rank": 7, "score": 254888.6484130959 }, { "content": "pub fn view(blob_id: BlobId, db: &Db, ui: &Ui<'_>) {\n\n let blob = &db[blob_id];\n\n ui.text_wrapped(&im_str!(\"File Name: {}\", blob.file_name));\n\n ui.text_wrapped(&im_str!(\"Blob Type: {}\", blob.blob_type));\n\n ui.text_wrapped(&im_str!(\"Hash: {:x}\", blob.hash));\n\n date::view(\"Added\", &blob.added, ui);\n\n}\n\n\n\npub enum EditBlobResponse {\n\n None,\n\n Changed(EditBlob),\n\n Deleted(BlobId),\n\n}\n\n\n", "file_path": "src/app/widgets/blob.rs", "rank": 8, "score": 254888.6484130959 }, { "content": "pub fn tooltip(piece_id: PieceId, db: &Db, ui: &Ui<'_>) {\n\n let piece = &db[piece_id];\n\n ui.text(&im_str!(\"Name: {}\", piece.name));\n\n ui.text(&im_str!(\"Source Type: {}\", piece.source_type));\n\n ui.text(&im_str!(\"Media Type: {}\", piece.media_type));\n\n date::view(\"Date Added\", &piece.added, ui);\n\n\n\n if let Some(price) = piece.base_price {\n\n ui.text(im_str!(\"Price: ${}\", price));\n\n }\n\n if let Some(price) = piece.tip_price {\n\n ui.text(im_str!(\"Tipped: ${}\", price));\n\n }\n\n}\n", "file_path": "src/app/widgets/piece.rs", "rank": 9, "score": 254888.6484130959 }, { "content": "pub fn view(category_id: CategoryId, db: &Db, ui: &Ui<'_>) {\n\n let category = &db[category_id];\n\n ui.text_wrapped(&im_str!(\"Name: {}\", category.name));\n\n ui.text_wrapped(&im_str!(\"Description: {}\", category.description));\n\n ui.text(im_str!(\"Color: \"));\n\n ui.same_line();\n\n ui.text_colored(\n\n category.raw_color(),\n\n &im_str!(\n\n \"#{:02X}{:02X}{:02X}\",\n\n category.color[0],\n\n category.color[1],\n\n category.color[2]\n\n ),\n\n );\n\n date::view(\"Date Added\", &category.added, ui);\n\n}\n\n\n", "file_path": "src/app/widgets/category.rs", "rank": 10, "score": 254888.6484130959 }, { "content": "pub fn view(piece_id: PieceId, db: &Db, ui: &Ui<'_>) {\n\n let piece = &db[piece_id];\n\n ui.text_wrapped(&im_str!(\"Name: {}\", piece.name));\n\n ui.text_wrapped(&im_str!(\"Source Type: {}\", piece.source_type));\n\n ui.text_wrapped(&im_str!(\"Media Type: {}\", piece.media_type));\n\n date::view(\"Date Added\", &piece.added, ui);\n\n\n\n if let Some(price) = piece.base_price {\n\n ui.text(im_str!(\"Price: ${}\", price));\n\n }\n\n if let Some(price) = piece.tip_price {\n\n ui.text(im_str!(\"Tipped: ${}\", price));\n\n }\n\n}\n", "file_path": "src/app/widgets/piece.rs", "rank": 11, "score": 254888.6484130959 }, { "content": "pub fn edit(category_id: CategoryId, db: &Db, ui: &Ui<'_>) -> EditCategoryResponse {\n\n let category = &db[category_id];\n\n\n\n let mut buf = category.name.clone().into();\n\n ui.input_text(im_str!(\"Name\"), &mut buf)\n\n .resize_buffer(true)\n\n .build();\n\n\n\n if ui.is_item_deactivated_after_edit() {\n\n return EditCategoryResponse::Edit(EditCategory {\n\n id: category_id,\n\n data: Category {\n\n name: buf.to_string(),\n\n ..category.clone()\n\n },\n\n });\n\n }\n\n\n\n let mut buf = category.description.clone().into();\n\n ui.input_text_multiline(im_str!(\"Description\"), &mut buf, [0.0, 100.0])\n", "file_path": "src/app/widgets/category.rs", "rank": 12, "score": 242049.83804129087 }, { "content": "pub fn edit(blob_id: BlobId, db: &Db, ui: &Ui<'_>) -> EditBlobResponse {\n\n let blob = &db[blob_id];\n\n\n\n let mut buf = blob.file_name.clone().into();\n\n ui.input_text(im_str!(\"File Name\"), &mut buf)\n\n .resize_buffer(true)\n\n .build();\n\n\n\n if ui.is_item_deactivated_after_edit() {\n\n return EditBlobResponse::Changed(EditBlob {\n\n id: blob_id,\n\n data: Blob {\n\n file_name: buf.to_string(),\n\n ..blob.clone()\n\n },\n\n });\n\n }\n\n\n\n if let Some(blob_type) = enum_combo_box(ui, im_str!(\"Blob Type\"), &blob.blob_type) {\n\n return EditBlobResponse::Changed(EditBlob {\n", "file_path": "src/app/widgets/blob.rs", "rank": 13, "score": 242049.83804129087 }, { "content": "pub fn edit(piece_id: PieceId, db: &Db, ui: &Ui<'_>) -> Option<EditPieceResponse> {\n\n let mut ret = None;\n\n\n\n let piece = &db[piece_id];\n\n\n\n let mut buf = piece.name.clone().into();\n\n ui.input_text(im_str!(\"Name\"), &mut buf)\n\n .resize_buffer(true)\n\n .build();\n\n\n\n if ui.is_item_deactivated_after_edit() {\n\n ret.get_or_insert(EditPieceResponse::Edit(EditPiece {\n\n id: piece_id,\n\n data: Piece {\n\n name: buf.to_string(),\n\n ..piece.clone()\n\n },\n\n }));\n\n }\n\n\n", "file_path": "src/app/widgets/piece.rs", "rank": 14, "score": 235715.2464241285 }, { "content": "pub fn data_file(mut path: PathBuf) -> PathBuf {\n\n path.push(\"data.aodb\");\n\n path\n\n}\n\n\n\nimpl DbBackend {\n\n pub async fn save(&self) -> anyhow::Result<()> {\n\n fs::write(\n\n data_file(self.root.clone()),\n\n bincode::serialize::<Db>(self)?,\n\n )\n\n .await?;\n\n Ok(())\n\n }\n\n\n\n pub async fn from_directory(root: PathBuf) -> anyhow::Result<Self> {\n\n let db = bincode::deserialize::<Db>(&fs::read(data_file(root.clone())).await?)?;\n\n Ok(Self {\n\n root,\n\n inner: UndoStack::new(db),\n", "file_path": "src/backend.rs", "rank": 15, "score": 208258.82356672199 }, { "content": "pub fn confirm_delete_popup(ui: &Ui<'_>) -> bool {\n\n let mut ret = false;\n\n PopupModal::new(im_str!(\"Confirm Delete\"))\n\n .movable(false)\n\n .resizable(false)\n\n .collapsible(false)\n\n .always_auto_resize(true)\n\n .build(ui, || {\n\n ui.text(im_str!(\"Are you sure you want to delete this?\"));\n\n\n\n if ui.button(im_str!(\"Yes, delete.\")) {\n\n ret = true;\n\n ui.close_current_popup();\n\n }\n\n ui.same_line();\n\n\n\n if ui.button(im_str!(\"Cancel\")) {\n\n ui.close_current_popup();\n\n }\n\n });\n\n\n\n ret\n\n}\n", "file_path": "src/app/widgets/confirm.rs", "rank": 16, "score": 204717.26185370015 }, { "content": "pub fn view(label: &str, value: &NaiveDate, ui: &Ui<'_>) {\n\n ui.text(im_str!(\"{}: {}\", label, value.format(DATE_FORMAT)));\n\n}\n\n\n", "file_path": "src/app/widgets/date.rs", "rank": 17, "score": 197795.37352817736 }, { "content": "pub fn edit(label: &ImStr, value: &NaiveDate, ui: &Ui<'_>) -> Option<NaiveDate> {\n\n let mut buf = im_str!(\"{}\", value.format(DATE_FORMAT));\n\n\n\n if ui\n\n .input_text(label, &mut buf)\n\n .enter_returns_true(true)\n\n .build()\n\n {\n\n NaiveDate::parse_from_str(buf.to_str(), \"%m/%d/%Y\").ok()\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/app/widgets/date.rs", "rank": 18, "score": 187517.26251336723 }, { "content": "pub fn link(ui: &imgui::Ui, category: &db::Category) -> bool {\n\n let _color = ui.push_style_color(StyleColor::Text, category.raw_color());\n\n Selectable::new(&im_str!(\"{}\", category.name))\n\n .span_all_columns(false)\n\n .build(ui)\n\n}\n\n\n\npub enum EditCategoryResponse {\n\n None,\n\n Delete,\n\n Edit(EditCategory),\n\n}\n", "file_path": "src/app/widgets/category.rs", "rank": 19, "score": 187176.90591074817 }, { "content": "pub fn modify_style(style: &mut Style) {\n\n style.tab_rounding = 0.0;\n\n style.grab_rounding = 0.0;\n\n style.child_rounding = 0.0;\n\n style.frame_rounding = 0.0;\n\n style.popup_rounding = 0.0;\n\n style.window_rounding = 0.0;\n\n style.scrollbar_rounding = 0.0;\n\n\n\n let save = style[ModalWindowDimBg];\n\n\n\n for color in style.colors.iter_mut() {\n\n let alpha = color[3];\n\n color[3] = 1.0;\n\n\n\n for rgb in color.iter_mut().take(3) {\n\n *rgb *= alpha;\n\n }\n\n }\n\n // unactive windows shoudl still havethe active style\n", "file_path": "src/style.rs", "rank": 20, "score": 151406.3791217151 }, { "content": "pub fn thumbnail_button(label: &ImStr, thumbnail: &TextureImage, ui: &Ui<'_>) -> ThumbnailResponse {\n\n let mut response = ThumbnailResponse::None;\n\n imgui::ChildWindow::new(label)\n\n .size([THUMBNAIL_SIZE + IMAGE_BUFFER; 2])\n\n .draw_background(false)\n\n .build(ui, || {\n\n let (size, padding) = rescale(thumbnail, [THUMBNAIL_SIZE; 2]);\n\n ui.set_cursor_pos([\n\n ui.cursor_pos()[0] + padding[0] / 2.0 + IMAGE_BUFFER / 2.0,\n\n ui.cursor_pos()[1] + padding[1] / 2.0 + IMAGE_BUFFER / 2.0,\n\n ]);\n\n\n\n if imgui::ImageButton::new(thumbnail.data, size).build(ui) {\n\n response = ThumbnailResponse::Clicked\n\n } else if ui.is_item_hovered() {\n\n response = ThumbnailResponse::Hovered\n\n }\n\n });\n\n\n\n response\n\n}\n\n\n", "file_path": "src/app/widgets/blob.rs", "rank": 21, "score": 149949.16726887098 }, { "content": "/// Returns Some(bool) evaluating the condition, returning None\n\n/// if the condition doesn't make sense (non-existent category, or tag for example)\n\nfn evaluate_test(test: &Condition, (id, piece): &(PieceId, &Piece), db: &Db) -> Option<bool> {\n\n match test {\n\n Condition::Tag(tag_name) => {\n\n let (searched, _) = db.tags().find(|(_, tag)| &tag.name == tag_name)?;\n\n // either the piece contains the tag\n\n // OR\n\n // no tags of this name exist in the database\n\n Some(db.tags_for_piece(*id).any(|tag| tag == searched))\n\n }\n\n Condition::TagWithCategory(category_name, tag_name) => {\n\n let (searched, _) =\n\n db.tags()\n\n .filter(|(_, tag)| &tag.name == tag_name)\n\n .find(|(searched, _)| {\n\n db.category_for_tag(*searched)\n\n .map(|category_id| &db[category_id].name)\n\n == category_name.as_ref()\n\n })?;\n\n // either the piece contains the category:tag\n\n // OR\n", "file_path": "search/src/execute.rs", "rank": 22, "score": 145324.31069801567 }, { "content": "pub fn render<'a, I: Iterator<Item = BlobId>, T: Fn(BlobId), L: Fn(BlobId) -> &'a str>(\n\n ui: &Ui,\n\n blobs: I,\n\n gui_handle: &GuiHandle,\n\n thumbnails: &BTreeMap<BlobId, TextureImage>,\n\n loading: L,\n\n tooltip: T,\n\n) -> Option<BlobId> {\n\n let mut ret = None;\n\n\n\n for blob in blobs {\n\n let label = im_str!(\"##{:?}\", blob);\n\n if let Some(thumbnail) = thumbnails.get(&blob) {\n\n // TODO integrate loading button into it\n\n match blob::thumbnail_button(&label, thumbnail, ui) {\n\n blob::ThumbnailResponse::None => {}\n\n blob::ThumbnailResponse::Hovered => {\n\n ui.tooltip(|| {\n\n tooltip(blob);\n\n });\n", "file_path": "src/app/widgets/gallery.rs", "rank": 23, "score": 131984.26552713432 }, { "content": "fn items(ui: &Ui<'_>, labels: &[&str]) {\n\n for label in labels {\n\n wrapped_bullet(ui, label)\n\n }\n\n}\n\n\n", "file_path": "src/app/gui_state/help.rs", "rank": 24, "score": 130361.16684131057 }, { "content": "fn wrapped_bullet(ui: &Ui<'_>, s: &str) {\n\n ui.bullet();\n\n ui.same_line();\n\n ui.text_wrapped(&im_str!(\"{}\", s));\n\n}\n", "file_path": "src/app/gui_state/help.rs", "rank": 25, "score": 130361.16684131057 }, { "content": "pub trait DeleteFrom {\n\n fn delete_from(self, db: &mut Db) -> bool;\n\n}\n\n\n", "file_path": "db/src/traits.rs", "rank": 26, "score": 126269.2501266599 }, { "content": "fn sub_heading<F: FnOnce()>(ui: &Ui<'_>, label: &str, f: F) {\n\n TreeNode::new(&im_str!(\"{}\", label))\n\n .label(&im_str!(\"{}\", label))\n\n .build(ui, f);\n\n}\n\n\n", "file_path": "src/app/gui_state/help.rs", "rank": 27, "score": 123762.29381941754 }, { "content": "pub fn start_db_task(backend: Arc<RwLock<DbBackend>>) -> DbHandle {\n\n let (tx, rx) = mpsc::unbounded_channel();\n\n let (send_dirty, recv_dirty) = watch::channel(());\n\n\n\n tokio::spawn(db_actor(rx, Arc::new(send_dirty), backend.clone()));\n\n tokio::spawn(save_db_actor(recv_dirty, backend.clone()));\n\n\n\n DbHandle {\n\n backend,\n\n outgoing: tx,\n\n }\n\n}\n\n\n\nasync fn db_actor(\n\n mut incoming: mpsc::UnboundedReceiver<AppAction>,\n\n dirty: Arc<watch::Sender<()>>,\n\n data: Arc<RwLock<DbBackend>>,\n\n) {\n\n while let Some(action) = incoming.recv().await {\n\n match action {\n", "file_path": "src/backend/actor.rs", "rank": 28, "score": 123394.68271385256 }, { "content": "pub fn enum_combo_box<T: IntoEnumIterator + Display + Eq + Debug>(\n\n ui: &Ui<'_>,\n\n label: &'_ ImStr,\n\n value: &T,\n\n) -> Option<T> {\n\n combo_box(ui, label, T::iter(), value, |item| item.to_string().into())\n\n}\n\n\n", "file_path": "src/app/widgets.rs", "rank": 29, "score": 123349.20891226169 }, { "content": "pub trait IdExist {\n\n fn exists_in(self, db: &Db) -> bool;\n\n}\n", "file_path": "db/src/traits.rs", "rank": 30, "score": 122844.27429844928 }, { "content": "#[test]\n\npub fn test_layout() {\n\n let layout = Column::default()\n\n .push(LayoutIds::MenuBar, Dimension::Pixels(20.0))\n\n .push(LayoutIds::SearchBar, Dimension::Pixels(100.0))\n\n .push(\n\n Row::default()\n\n .push(LayoutIds::Tags, Dimension::Pixels(240.0))\n\n .push(LayoutIds::Main, Dimension::Flex(1.0)),\n\n Dimension::Flex(1.0),\n\n );\n\n\n\n let mut data = HashMap::new();\n\n\n\n layout.layout(\n\n LayoutRectangle {\n\n position: Vec2::ZERO,\n\n size: Vec2::new(1280.0, 720.0),\n\n },\n\n &mut data,\n\n );\n", "file_path": "src/layout.rs", "rank": 31, "score": 112310.70686752617 }, { "content": "pub fn run_event_loop(\n\n event_loop: EventLoop<()>,\n\n mut context: GuiContext,\n\n outgoing_files: mpsc::Sender<std::path::PathBuf>,\n\n mut app: App,\n\n) {\n\n event_loop.run(move |event, _, control_flow| {\n\n *control_flow = ControlFlow::Poll;\n\n\n\n app.update(&mut context);\n\n\n\n match &event {\n\n Event::WindowEvent {\n\n event: WindowEvent::Resized(_),\n\n ..\n\n } => {\n\n let size = context.window.inner_size();\n\n if size.width != 0 && size.height != 0 {\n\n let sc_desc = wgpu::SwapChainDescriptor {\n\n usage: wgpu::TextureUsage::RENDER_ATTACHMENT,\n", "file_path": "src/gui.rs", "rank": 32, "score": 110201.5728860361 }, { "content": "pub fn install() -> Result<()> {\n\n let hkcu = RegKey::predef(HKEY_CURRENT_USER);\n\n let classes =\n\n hkcu.open_subkey_with_flags(r\"SOFTWARE\\Classes\", winreg::enums::KEY_ALL_ACCESS)?;\n\n\n\n create_background_folder_submenu(&classes)?;\n\n create_background_folder_context_menu(&classes)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "contextual/src/windows.rs", "rank": 33, "score": 106968.1045914112 }, { "content": "pub fn remove() -> Result<()> {\n\n let hkcu = RegKey::predef(HKEY_CURRENT_USER);\n\n let classes =\n\n hkcu.open_subkey_with_flags(r\"SOFTWARE\\Classes\", winreg::enums::KEY_ALL_ACCESS)?;\n\n let _ = classes.delete_subkey_all(r\"Directory\\Background\\shell\\ArtOrganize\");\n\n let _ = classes.delete_subkey_all(r\"ArtOrganize.Background\");\n\n\n\n Ok(())\n\n}\n", "file_path": "contextual/src/windows.rs", "rank": 34, "score": 106968.1045914112 }, { "content": "pub fn start_gui_task(\n\n db: DbHandle,\n\n gui_state: Arc<RwLock<GuiState>>,\n\n outgoing_images: std_mpsc::Sender<(BlobId, RawImage, bool)>,\n\n incoming_files: std_mpsc::Receiver<PathBuf>,\n\n) -> GuiHandle {\n\n let (tx, rx) = mpsc::unbounded_channel();\n\n\n\n tokio::spawn(gui_actor(rx, db.clone(), gui_state, outgoing_images));\n\n\n\n GuiHandle {\n\n outgoing: GuiActionHandle { outgoing: tx, db },\n\n incoming_files,\n\n }\n\n}\n\n\n\nasync fn gui_actor(\n\n mut incoming: mpsc::UnboundedReceiver<GuiAction>,\n\n db: DbHandle,\n\n gui_state: Arc<RwLock<GuiState>>,\n", "file_path": "src/app/gui_state.rs", "rank": 35, "score": 106355.74846458787 }, { "content": "pub fn combo_box<I: Iterator<Item = T>, T: Eq + Debug, F: Fn(&T) -> ImString>(\n\n ui: &Ui<'_>,\n\n label: &'_ ImStr,\n\n items: I,\n\n value: &T,\n\n f: F,\n\n) -> Option<T> {\n\n let mut ret = None;\n\n ComboBox::new(label)\n\n .preview_mode(ComboBoxPreviewMode::Full)\n\n .preview_value(&im_str!(\"{}\", f(value)))\n\n .build(ui, || {\n\n for (count, item) in items.enumerate() {\n\n let _id = ui.push_id(count as i32);\n\n if Selectable::new(&im_str!(\"{}\", f(&item)))\n\n .selected(value == &item)\n\n .build(ui)\n\n {\n\n ret = Some(item);\n\n }\n\n }\n\n });\n\n ret\n\n}\n", "file_path": "src/app/widgets.rs", "rank": 36, "score": 102845.28180905909 }, { "content": "pub fn try_connect() -> Option<IpcSender> {\n\n let stream = LocalSocketStream::connect(ADDRESS).ok()?;\n\n Some(IpcSender { stream })\n\n}\n", "file_path": "ipc/src/lib.rs", "rank": 37, "score": 98565.56918148295 }, { "content": "pub fn update_app() -> anyhow::Result<Status> {\n\n let releases = self_update::backends::github::ReleaseList::configure()\n\n .repo_owner(\"AngelOfSol\")\n\n .repo_name(\"art-organize\")\n\n .with_target(get_target())\n\n .build()?\n\n .fetch()?;\n\n\n\n let status = github::Update::configure()\n\n .repo_owner(\"AngelOfSol\")\n\n .repo_name(\"art-organize\")\n\n .bin_name(\"art-organize\")\n\n .current_version(cargo_crate_version!())\n\n .show_output(false)\n\n .target_version_tag(&releases[0].version)\n\n .no_confirm(true)\n\n .build()?\n\n .update()?;\n\n Ok(status)\n\n}\n", "file_path": "src/updater.rs", "rank": 38, "score": 97877.42001373382 }, { "content": "pub fn check_for_new_releases() -> anyhow::Result<Option<String>> {\n\n let mut releases = self_update::backends::github::ReleaseList::configure()\n\n .repo_owner(\"AngelOfSol\")\n\n .repo_name(\"art-organize\")\n\n .with_target(get_target())\n\n .build()?\n\n .fetch()?;\n\n\n\n Ok(\n\n self_update::version::bump_is_greater(cargo_crate_version!(), &releases[0].version)?\n\n .then(|| releases.remove(0).version),\n\n )\n\n}\n", "file_path": "src/updater.rs", "rank": 39, "score": 92302.94523661025 }, { "content": "pub trait EditFrom {\n\n fn edit_from(self, db: &mut Db) -> bool;\n\n}\n", "file_path": "db/src/traits.rs", "rank": 40, "score": 91632.04209585104 }, { "content": "pub fn parse_search(input: &str) -> IResult<&str, Search> {\n\n alt((parse_and, parse_or, parse_paren, parse_negate, parse_test))(input)\n\n}\n\n\n", "file_path": "search/src/parse.rs", "rank": 41, "score": 88817.50710379823 }, { "content": "fn get_file() -> PathBuf {\n\n let mut config_file = PROJECT.config_dir().to_path_buf();\n\n config_file.push(\"config.toml\");\n\n\n\n config_file\n\n}\n\n\n\nimpl Config {\n\n pub fn load() -> anyhow::Result<Self> {\n\n let value = toml::from_str(&std::fs::read_to_string(get_file())?)?;\n\n Ok(value)\n\n }\n\n\n\n pub fn save(&self) -> anyhow::Result<()> {\n\n if !PROJECT.config_dir().exists() {\n\n std::fs::create_dir_all(PROJECT.config_dir())?;\n\n }\n\n\n\n std::fs::write(get_file(), &toml::to_string_pretty(self)?)?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 42, "score": 86831.92053118642 }, { "content": "fn add_tag_widget(\n\n db: &Db,\n\n piece_id: PieceId,\n\n category_id: Option<CategoryId>,\n\n ui: &Ui,\n\n) -> Option<EditPieceResponse> {\n\n let unused_tags = db\n\n .tags()\n\n .filter(|(tag_id, _)| {\n\n !db.tags_for_piece(piece_id)\n\n .any(|piece_tag_id| piece_tag_id == *tag_id)\n\n && db.category_for_tag(*tag_id) == category_id\n\n })\n\n .sorted_by_key(|(_, tag)| &tag.name)\n\n .collect_vec();\n\n if !unused_tags.is_empty() {\n\n let (first_tag, _) = unused_tags[0];\n\n\n\n if let Some(tag_id) = super::combo_box(\n\n ui,\n", "file_path": "src/app/widgets/piece.rs", "rank": 43, "score": 86114.7165205252 }, { "content": "pub fn start_server<T: Send + Sync + 'static + for<'de> Deserialize<'de> + Debug>(\n\n) -> anyhow::Result<IpcReceiver<T>> {\n\n let socket = LocalSocketListener::bind(ADDRESS)?;\n\n\n\n let (tx, rx) = mpsc::channel(4);\n\n\n\n task::spawn_blocking(move || {\n\n for incoming in socket.incoming().filter_map(Result::ok) {\n\n let tx = tx.clone();\n\n let _ = handle_connection(tx, incoming);\n\n }\n\n });\n\n\n\n Ok(rx)\n\n}\n\n\n\npub struct IpcSender {\n\n stream: LocalSocketStream,\n\n}\n\n\n\nimpl IpcSender {\n\n pub fn send<T: Serialize>(mut self, value: T) -> anyhow::Result<()> {\n\n self.stream.write_all(&bincode::serialize(&value)?)?;\n\n self.stream.flush()?;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "ipc/src/lib.rs", "rank": 44, "score": 78668.42988321722 }, { "content": "fn parse_added(input: &str) -> IResult<&str, Condition> {\n\n map_opt(parse_whole, |(lhs, rhs)| {\n\n Some(Condition::DateAdded(\n\n match lhs {\n\n \"after\" => DateOp::After,\n\n \"before\" => DateOp::Before,\n\n _ => return None,\n\n },\n\n NaiveDate::parse_from_str(rhs, \"%m/%d/%Y\").ok()?,\n\n ))\n\n })(input)\n\n}\n", "file_path": "search/src/parse.rs", "rank": 45, "score": 74576.47500577585 }, { "content": "fn parse_tag(input: &str) -> IResult<&str, Condition> {\n\n map(verify(parse_item, |x: &str| !x.is_empty()), |value| {\n\n Condition::Tag(value.to_string())\n\n })(input)\n\n}\n", "file_path": "search/src/parse.rs", "rank": 46, "score": 74131.13556332221 }, { "content": "fn parse_tag_with_category(input: &str) -> IResult<&str, Condition> {\n\n map(parse_whole, |(category, tag)| {\n\n Condition::TagWithCategory(\n\n (!category.trim().is_empty()).then(|| category.to_string()),\n\n tag.to_string(),\n\n )\n\n })(input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::model::*;\n\n use Condition::*;\n\n use Search::*;\n\n\n\n #[test]\n\n fn test_tag() {\n\n assert_eq!(parse_tag(\"input\"), Ok((\"\", Tag(\"input\".to_owned()))));\n\n assert!(parse_tag(\"\").is_err())\n", "file_path": "search/src/parse.rs", "rank": 47, "score": 72555.69501893602 }, { "content": "use crate::table::TableId;\n\nuse chrono::{Local, NaiveDate};\n\nuse serde::{Deserialize, Serialize};\n\n\n\npub type TagId = TableId<Tag>;\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)]\n\npub struct Tag {\n\n pub name: String,\n\n pub description: String,\n\n pub added: NaiveDate,\n\n pub links: Vec<String>,\n\n}\n\nimpl Default for Tag {\n\n fn default() -> Self {\n\n Self {\n\n name: \"new_tag\".to_string(),\n\n description: String::new(),\n\n added: Local::today().naive_local(),\n\n links: Vec::new(),\n\n }\n\n }\n\n}\n", "file_path": "db/src/tag.rs", "rank": 48, "score": 72494.3450008476 }, { "content": "use crate::{tag::TagId, tag_category::CategoryId, BlobId, Db, PieceId};\n\n\n\nuse super::DeleteFrom;\n\n\n\nimpl DeleteFrom for PieceId {\n\n fn delete_from(self, db: &mut Db) -> bool {\n\n if db.exists(self) {\n\n db.pieces.remove(self);\n\n db.media.retain(|(piece, _)| *piece != self);\n\n db.piece_tags.retain(|(piece, _)| *piece != self);\n\n\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n\nimpl DeleteFrom for BlobId {\n\n fn delete_from(self, db: &mut Db) -> bool {\n", "file_path": "db/src/traits/delete.rs", "rank": 49, "score": 70529.6032248692 }, { "content": " }\n\n}\n\n\n\nimpl DeleteFrom for CategoryId {\n\n fn delete_from(self, db: &mut Db) -> bool {\n\n if db.exists(self) {\n\n db.categories.remove(self);\n\n db.tag_category.retain(|_, tag| *tag != self);\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n}\n", "file_path": "db/src/traits/delete.rs", "rank": 50, "score": 70524.81334404231 }, { "content": " if db.exists(self) {\n\n db.blobs.remove(self);\n\n db.media.retain(|(_, blob)| *blob != self);\n\n true\n\n } else {\n\n false\n\n }\n\n }\n\n}\n\n\n\nimpl DeleteFrom for TagId {\n\n fn delete_from(self, db: &mut Db) -> bool {\n\n if db.exists(self) {\n\n db.tags.remove(self);\n\n db.piece_tags.retain(|(_, tag)| *tag != self);\n\n db.tag_category.remove(&self);\n\n true\n\n } else {\n\n false\n\n }\n", "file_path": "db/src/traits/delete.rs", "rank": 51, "score": 70523.97938650692 }, { "content": "use chrono::{Local, NaiveDate};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::table::TableId;\n\n\n\npub type CategoryId = TableId<Category>;\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)]\n\npub struct Category {\n\n pub name: String,\n\n pub description: String,\n\n pub color: [u8; 4],\n\n pub added: NaiveDate,\n\n}\n\n\n\nimpl Category {\n\n pub fn raw_color(&self) -> [f32; 4] {\n\n [\n\n self.color[0] as f32 / 255.0,\n\n self.color[1] as f32 / 255.0,\n", "file_path": "db/src/tag_category.rs", "rank": 52, "score": 70071.50792673456 }, { "content": " self.color[2] as f32 / 255.0,\n\n self.color[3] as f32 / 255.0,\n\n ]\n\n }\n\n}\n\n\n\nimpl Default for Category {\n\n fn default() -> Self {\n\n Self {\n\n name: \"New Tag Category\".to_string(),\n\n description: String::new(),\n\n color: [0, 0, 0, 255],\n\n added: Local::today().naive_local(),\n\n }\n\n }\n\n}\n", "file_path": "db/src/tag_category.rs", "rank": 53, "score": 70061.35169148762 }, { "content": "enum Popup {\n\n CleanBlobs,\n\n}\n\n\n\nimpl App {\n\n pub fn update(&mut self, gui: &mut GuiContext) {\n\n if let Ok((blob_id, raw, is_thumbnail)) = self.incoming_images.try_recv() {\n\n let image = TextureImage {\n\n data: gui.load(&raw),\n\n width: raw.width,\n\n height: raw.height,\n\n hash: raw.hash,\n\n };\n\n\n\n self.gui_handle.forward_image(blob_id, image, is_thumbnail);\n\n } else {\n\n let mut gui_state = self.gui_state.write().unwrap();\n\n let db = self.gui_handle.read().unwrap();\n\n let invalid = gui_state\n\n .images\n", "file_path": "src/app.rs", "rank": 54, "score": 66437.22789190442 }, { "content": "#[test]\n\nfn test_undo_stack() {\n\n let mut new = UndoStack::new(0);\n\n new.undo_checkpoint();\n\n *new = 4;\n\n assert_eq!(*new, 4);\n\n assert_eq!(new.history.len(), 2);\n\n new.undo();\n\n assert_eq!(*new, 0);\n\n assert_eq!(new.history.len(), 2);\n\n new.redo();\n\n assert_eq!(*new, 4);\n\n assert_eq!(new.history.len(), 2);\n\n\n\n new.undo_checkpoint();\n\n assert_eq!(new.history.len(), 3);\n\n new.undo();\n\n new.undo();\n\n new.undo_checkpoint();\n\n assert_eq!(new.history.len(), 2);\n\n}\n", "file_path": "src/undo.rs", "rank": 55, "score": 56023.362401029925 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let runtime = Builder::new_multi_thread()\n\n .worker_threads(4)\n\n .max_blocking_threads(12)\n\n .thread_keep_alive(Duration::from_secs(1))\n\n .enable_all()\n\n .build()?;\n\n\n\n runtime.block_on(async_main())\n\n}\n\n\n\nasync fn async_main() -> anyhow::Result<()> {\n\n let mut config: Config = if let Ok(config) = Config::load() {\n\n config\n\n } else {\n\n let config = Config::default();\n\n\n\n config.save().unwrap();\n\n config\n\n };\n", "file_path": "src/main.rs", "rank": 56, "score": 52771.70543731695 }, { "content": "pub trait GuiView: Sync + Send + Debug {\n\n fn update(&self, gui_handle: &GuiHandle);\n\n\n\n fn draw_main(&mut self, gui_handle: &GuiHandle, gui_state: &InnerGuiState, ui: &imgui::Ui<'_>);\n\n fn draw_explorer(\n\n &mut self,\n\n gui_handle: &GuiHandle,\n\n gui_state: &InnerGuiState,\n\n ui: &imgui::Ui<'_>,\n\n );\n\n\n\n fn label(&self) -> &'static str;\n\n}\n\n\n\n#[derive(Default, Clone, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct SearchState {\n\n pub text: String,\n\n pub auto_complete: Vec<String>,\n\n pub selected: Option<usize>,\n\n}\n", "file_path": "src/app/gui_state.rs", "rank": 57, "score": 48770.54676503651 }, { "content": "fn create_background_folder_submenu(classes: &RegKey) -> Result<()> {\n\n let (background, _) = classes.create_subkey(r\"Directory\\Background\\shell\\ArtOrganize\")?;\n\n background.set_value(r\"MUIVerb\", &String::from(r\"ArtOrganize\"))?;\n\n background.set_value(\n\n r\"ExtendedSubCommandsKey\",\n\n &String::from(r\"ArtOrganize.Background\"),\n\n )?;\n\n Ok(())\n\n}\n", "file_path": "contextual/src/windows.rs", "rank": 58, "score": 46043.82689794584 }, { "content": "fn parse_and(input: &str) -> IResult<&str, Search> {\n\n map(\n\n verify(\n\n separated_list1(\n\n space1,\n\n alt((parse_or, parse_paren, parse_negate, parse_test)),\n\n ),\n\n |inner: &Vec<_>| inner.len() >= 2,\n\n ),\n\n Search::And,\n\n )(input)\n\n}\n\n\n", "file_path": "search/src/parse.rs", "rank": 59, "score": 45512.42891044398 }, { "content": "fn parse_or(input: &str) -> IResult<&str, Search> {\n\n map(\n\n verify(\n\n separated_list1(tag(\"|\"), alt((parse_paren, parse_negate, parse_test))),\n\n |inner: &Vec<_>| inner.len() >= 2,\n\n ),\n\n Search::Or,\n\n )(input)\n\n}\n\n\n", "file_path": "search/src/parse.rs", "rank": 60, "score": 45512.42891044398 }, { "content": "fn create_background_folder_context_menu(classes: &RegKey) -> Result<()> {\n\n let (background, _) = classes.create_subkey(r\"ArtOrganize.Background\\shell\")?;\n\n\n\n create_shell_entry(\n\n &background,\n\n ShellEntry {\n\n name: \"init\",\n\n label: \"Initialize DB\",\n\n command: make_command(r#\"init \"%V\"\"#)?,\n\n },\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "contextual/src/windows.rs", "rank": 61, "score": 45305.64285903485 }, { "content": "fn parse_condition(input: &str) -> IResult<&str, Condition> {\n\n alt((\n\n parse_source,\n\n parse_media,\n\n parse_added,\n\n parse_price,\n\n parse_tag_with_category,\n\n parse_tag,\n\n ))(input)\n\n}\n\n\n", "file_path": "search/src/parse.rs", "rank": 62, "score": 44688.65419365638 }, { "content": "fn parse_media(input: &str) -> IResult<&str, Condition> {\n\n map_opt(\n\n verify(parse_whole, |(lhs, _)| *lhs == \"media\"),\n\n |(_, rhs)| {\n\n Some(match rhs {\n\n \"image\" => Condition::Media(MediaType::Image),\n\n \"text\" => Condition::Media(MediaType::Text),\n\n _ => return None,\n\n })\n\n },\n\n )(input)\n\n}\n\n\n", "file_path": "search/src/parse.rs", "rank": 63, "score": 44688.65419365638 }, { "content": "fn parse_negate(input: &str) -> IResult<&str, Search> {\n\n map(\n\n preceded(tag(\"!\"), alt((parse_paren, parse_negate, parse_test))),\n\n |value| Search::Negate(Box::new(value)),\n\n )(input)\n\n}\n\n\n", "file_path": "search/src/parse.rs", "rank": 64, "score": 44688.65419365638 }, { "content": "fn parse_price(input: &str) -> IResult<&str, Condition> {\n\n map_opt(\n\n tuple((parse_item, alt((tag(\"<=\"), tag(\">=\"))), parse_item)),\n\n |(price_type, operation, value)| {\n\n let price_type = match price_type {\n\n \"total\" => PriceType::Total,\n\n \"base\" => PriceType::Base,\n\n \"tip\" => PriceType::Tip,\n\n _ => return None,\n\n };\n\n let operation = match operation {\n\n \">=\" => PriceOp::GreaterEqual,\n\n \"<=\" => PriceOp::LesserEqual,\n\n _ => unreachable!(),\n\n };\n\n let value = value.parse().ok()?;\n\n Some(Condition::Price(price_type, operation, value))\n\n },\n\n )(input)\n\n}\n\n\n", "file_path": "search/src/parse.rs", "rank": 65, "score": 44688.65419365638 }, { "content": "fn parse_paren(input: &str) -> IResult<&str, Search> {\n\n terminated(preceded(tag(\"(\"), parse_search), tag(\")\"))(input)\n\n}\n\n\n", "file_path": "search/src/parse.rs", "rank": 66, "score": 44688.65419365638 }, { "content": "fn make_command(subcommand: &'static str) -> Result<String> {\n\n let local_exe = std::env::current_exe()?;\n\n Ok(format!(\n\n r#\"\"{}\" {}\"#,\n\n local_exe.to_str().unwrap(),\n\n subcommand\n\n ))\n\n}\n\n\n", "file_path": "contextual/src/windows.rs", "rank": 67, "score": 44688.65419365638 }, { "content": "fn parse_test(input: &str) -> IResult<&str, Search> {\n\n map(parse_condition, Search::Test)(input)\n\n}\n\n\n", "file_path": "search/src/parse.rs", "rank": 68, "score": 44688.65419365638 }, { "content": "fn parse_item(input: &str) -> IResult<&str, &str> {\n\n complete::take_while(|c: char| {\n\n !c.is_whitespace() && !matches!(c, ':' | '(' | ')' | '!' | '|' | '>' | '=' | '<')\n\n })(input)\n\n}\n\n\n", "file_path": "search/src/parse.rs", "rank": 69, "score": 44688.65419365638 }, { "content": "fn parse_source(input: &str) -> IResult<&str, Condition> {\n\n map_opt(\n\n verify(parse_whole, |(lhs, _)| *lhs == \"source\"),\n\n |(_, rhs)| {\n\n Some(match rhs {\n\n \"fan\" => Condition::Source(SourceType::FanCreation),\n\n \"commission\" => Condition::Source(SourceType::Commission),\n\n \"official\" => Condition::Source(SourceType::Official),\n\n _ => return None,\n\n })\n\n },\n\n )(input)\n\n}\n", "file_path": "search/src/parse.rs", "rank": 70, "score": 44688.65419365638 }, { "content": "fn parse_whole(input: &str) -> IResult<&str, (&str, &str)> {\n\n separated_pair(parse_item, complete::tag(\":\"), parse_item)(input)\n\n}\n\n\n", "file_path": "search/src/parse.rs", "rank": 71, "score": 43021.34545376024 }, { "content": "fn create_shell_entry(background: &RegKey, entry: ShellEntry) -> Result<()> {\n\n let (backup, _) = background.create_subkey(entry.name)?;\n\n backup.set_value(\"\", &String::from(entry.label))?;\n\n let (backup_command, _) = backup.create_subkey(\"command\")?;\n\n backup_command.set_value(\"\", &entry.command)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "contextual/src/windows.rs", "rank": 72, "score": 42470.94369612442 }, { "content": "fn handle_connection<T: Send + Sync + 'static + for<'de> Deserialize<'de> + Debug>(\n\n tx: mpsc::Sender<T>,\n\n mut incoming: LocalSocketStream,\n\n) -> anyhow::Result<()> {\n\n let mut buffer = vec![0; 1000];\n\n let size = incoming.read(&mut buffer)?;\n\n buffer.truncate(size);\n\n\n\n let message = bincode::deserialize(&buffer)?;\n\n\n\n tx.blocking_send(message)?;\n\n\n\n Ok(())\n\n}\n\n\n\npub type IpcReceiver<T> = mpsc::Receiver<T>;\n\n\n", "file_path": "ipc/src/lib.rs", "rank": 73, "score": 39429.09849168803 }, { "content": "fn rescale(image: &TextureImage, max_size: [f32; 2]) -> ([f32; 2], [f32; 2]) {\n\n rescale_with_zoom(image, max_size, 1.0)\n\n}\n", "file_path": "src/app/widgets/blob.rs", "rank": 74, "score": 39429.09849168803 }, { "content": "fn rescale_with_zoom(image: &TextureImage, max_size: [f32; 2], zoom: f32) -> ([f32; 2], [f32; 2]) {\n\n let size = [image.width as f32 * zoom, image.height as f32 * zoom];\n\n let aspect_ratio = size[0] / size[1];\n\n let new_aspect_ratio = max_size[0] / max_size[1];\n\n\n\n let size = if size[0] <= max_size[0] && size[1] <= max_size[1] {\n\n size\n\n } else {\n\n let use_width = aspect_ratio >= new_aspect_ratio;\n\n\n\n if use_width {\n\n [max_size[0], size[1] * max_size[0] / size[0]]\n\n } else {\n\n [size[0] * max_size[1] / size[1], max_size[1]]\n\n }\n\n };\n\n\n\n (size, [max_size[0] - size[0], max_size[1] - size[1]])\n\n}\n", "file_path": "src/app/widgets/blob.rs", "rank": 75, "score": 36406.37446892063 }, { "content": "use std::{fmt::Display, path::PathBuf};\n\n\n\nuse chrono::NaiveDate;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::table::TableId;\n\nuse strum::EnumIter;\n\n\n\npub type BlobId = TableId<Blob>;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]\n\npub struct Blob {\n\n pub file_name: String,\n\n pub hash: u64,\n\n pub blob_type: BlobType,\n\n pub added: NaiveDate,\n\n}\n\n\n\nimpl Blob {\n\n pub fn storage_name(&self, id: BlobId) -> PathBuf {\n", "file_path": "db/src/blob.rs", "rank": 76, "score": 35937.93949822788 }, { "content": " tag_category: BTreeMap<TagId, CategoryId>,\n\n}\n\n\n\nimpl Db {\n\n pub fn create_blob(&mut self, data: Blob) -> BlobId {\n\n self.blobs.insert(data)\n\n }\n\n\n\n pub fn attach_blob(&mut self, AttachBlob { src, dest }: AttachBlob) -> bool {\n\n self.media.insert((src, dest))\n\n }\n\n\n\n pub fn attach_category(&mut self, AttachCategory { src, dest }: AttachCategory) -> bool {\n\n match dest {\n\n Some(new_category) => {\n\n self.tag_category.insert(src, new_category);\n\n true\n\n }\n\n None => self.tag_category.remove(&src).is_some(),\n\n }\n", "file_path": "db/src/lib.rs", "rank": 77, "score": 35933.34894114973 }, { "content": " }\n\n\n\n pub fn exists<Id: IdExist>(&self, id: Id) -> bool {\n\n id.exists_in(self)\n\n }\n\n\n\n pub fn delete<Id: DeleteFrom>(&mut self, id: Id) -> bool {\n\n id.delete_from(self)\n\n }\n\n\n\n pub fn edit<Data: EditFrom>(&mut self, data: Data) -> bool {\n\n data.edit_from(self)\n\n }\n\n}\n", "file_path": "db/src/lib.rs", "rank": 78, "score": 35932.36366792462 }, { "content": " }\n\n pub fn attach_tag(&mut self, AttachTag { src, dest }: AttachTag) -> bool {\n\n self.piece_tags.insert((src, dest))\n\n }\n\n\n\n pub fn remove_tag(&mut self, AttachTag { src, dest }: AttachTag) -> bool {\n\n self.piece_tags.remove(&(src, dest))\n\n }\n\n\n\n pub fn create_piece(&mut self, data: Piece) -> PieceId {\n\n self.pieces.insert(data)\n\n }\n\n pub fn create_tag(&mut self, data: Tag) -> TagId {\n\n self.tags.insert(data)\n\n }\n\n pub fn create_category(&mut self, data: Category) -> CategoryId {\n\n self.categories.insert(data)\n\n }\n\n\n\n pub fn blobs_for_piece(&self, piece: PieceId) -> impl Iterator<Item = BlobId> + Clone + '_ {\n", "file_path": "db/src/lib.rs", "rank": 79, "score": 35931.865857521174 }, { "content": "use crate::table::TableId;\n\n\n\nuse super::{media_type::MediaType, source_type::SourceType};\n\nuse chrono::{Local, NaiveDate};\n\nuse serde::{Deserialize, Serialize};\n\n\n\npub type PieceId = TableId<Piece>;\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone)]\n\npub struct Piece {\n\n pub name: String,\n\n pub source_type: SourceType,\n\n pub media_type: MediaType,\n\n pub added: NaiveDate,\n\n pub links: Vec<String>,\n\n pub base_price: Option<i64>,\n\n pub tip_price: Option<i64>,\n\n}\n\n\n\nimpl Default for Piece {\n", "file_path": "db/src/piece.rs", "rank": 80, "score": 35930.24357209777 }, { "content": "#![feature(btree_retain)]\n\n\n\npub use self::{\n\n blob::{Blob, BlobId, BlobType},\n\n contained_piece::ContainedPiece,\n\n media_type::MediaType,\n\n piece::{Piece, PieceId},\n\n source_type::SourceType,\n\n tag::{Tag, TagId},\n\n tag_category::{Category, CategoryId},\n\n};\n\nuse commands::{AttachBlob, AttachCategory, AttachTag};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::{\n\n collections::{BTreeMap, BTreeSet},\n\n path::PathBuf,\n\n};\n\nuse table::Table;\n\nuse traits::{DeleteFrom, EditFrom, IdExist};\n\n\n", "file_path": "db/src/lib.rs", "rank": 81, "score": 35929.98294810752 }, { "content": "use crate::{Blob, BlobId, Category, CategoryId, Piece, PieceId, Tag, TagId};\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub struct Attach<Left, Right> {\n\n pub src: Left,\n\n pub dest: Right,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub struct Edit<DataId, Data> {\n\n pub id: DataId,\n\n pub data: Data,\n\n}\n\n\n\npub type AttachBlob = Attach<PieceId, BlobId>;\n\npub type AttachCategory = Attach<TagId, Option<CategoryId>>;\n\npub type AttachTag = Attach<PieceId, TagId>;\n\n\n\npub type EditPiece = Edit<PieceId, Piece>;\n\npub type EditBlob = Edit<BlobId, Blob>;\n\npub type EditTag = Edit<TagId, Tag>;\n\npub type EditCategory = Edit<CategoryId, Category>;\n", "file_path": "db/src/commands.rs", "rank": 82, "score": 35929.84881291093 }, { "content": "\n\n pub fn category_for_tag(&self, tag: TagId) -> Option<CategoryId> {\n\n self.tag_category.get(&tag).copied()\n\n }\n\n\n\n pub fn pieces(&self) -> impl Iterator<Item = (PieceId, &'_ Piece)> {\n\n self.pieces.iter()\n\n }\n\n pub fn blobs(&self) -> impl Iterator<Item = (BlobId, &'_ Blob)> {\n\n self.blobs.iter()\n\n }\n\n pub fn tags(&self) -> impl Iterator<Item = (TagId, &'_ Tag)> {\n\n self.tags.iter()\n\n }\n\n pub fn categories(&self) -> impl Iterator<Item = (CategoryId, &'_ Category)> {\n\n self.categories.iter()\n\n }\n\n\n\n pub fn storage_for(&self, id: BlobId) -> PathBuf {\n\n self[id].storage_name(id)\n", "file_path": "db/src/lib.rs", "rank": 83, "score": 35928.84265246455 }, { "content": " pub fn get_mut(&mut self, index: TableId<T>) -> Option<&mut T> {\n\n self.data.get_mut(index.0)\n\n }\n\n\n\n pub fn iter(&self) -> impl Iterator<Item = (TableId<T>, &T)> {\n\n self.data.iter().map(|(id, value)| (id.into(), value))\n\n }\n\n pub fn iter_mut(&mut self) -> impl Iterator<Item = (TableId<T>, &mut T)> {\n\n self.data.iter_mut().map(|(id, value)| (id.into(), value))\n\n }\n\n\n\n pub fn values(&self) -> impl Iterator<Item = &T> {\n\n self.data.iter().map(|(_, value)| value)\n\n }\n\n pub fn values_mut(&mut self) -> impl Iterator<Item = &mut T> {\n\n self.data.iter_mut().map(|(_, value)| value)\n\n }\n\n\n\n pub fn keys(&self) -> impl Iterator<Item = TableId<T>> + '_ {\n\n self.data.iter().map(|(id, _)| id.into())\n", "file_path": "db/src/table.rs", "rank": 84, "score": 35928.04954233973 }, { "content": " MaybeEntry::Id(id.into())\n\n } else {\n\n MaybeEntry::Value(data)\n\n }\n\n }\n\n\n\n pub fn has(&self, index: TableId<T>) -> bool {\n\n self.data.contains(index.0)\n\n }\n\n\n\n pub fn insert(&mut self, data: T) -> TableId<T> {\n\n self.data.insert(data).into()\n\n }\n\n pub fn remove(&mut self, data: TableId<T>) -> T {\n\n self.data.remove(data.0)\n\n }\n\n\n\n pub fn get(&self, index: TableId<T>) -> Option<&T> {\n\n self.data.get(index.0)\n\n }\n", "file_path": "db/src/table.rs", "rank": 85, "score": 35927.492382493096 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse slab::Slab;\n\nuse std::{collections::BTreeMap, fmt::Display, marker::PhantomData, ops::Index};\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct TableId<T>(usize, PhantomData<T>);\n\n\n\npub enum MaybeEntry<T> {\n\n Id(TableId<T>),\n\n Value(T),\n\n}\n\n\n\nimpl<T> Clone for TableId<T> {\n\n fn clone(&self) -> Self {\n\n Self(self.0, self.1)\n\n }\n\n}\n\nimpl<T> Copy for TableId<T> {}\n\n\n\nimpl<T> From<usize> for TableId<T> {\n", "file_path": "db/src/table.rs", "rank": 86, "score": 35925.219490787575 }, { "content": "mod blob;\n\npub mod commands;\n\nmod contained_piece;\n\nmod media_type;\n\nmod piece;\n\nmod source_type;\n\nmod table;\n\nmod tag;\n\nmod tag_category;\n\npub mod traits;\n\n\n\n#[derive(Debug, Serialize, Deserialize, Default, PartialEq, Eq, Clone)]\n\npub struct Db {\n\n pieces: Table<Piece>,\n\n blobs: Table<Blob>,\n\n tags: Table<Tag>,\n\n categories: Table<Category>,\n\n\n\n media: BTreeSet<(PieceId, BlobId)>,\n\n piece_tags: BTreeSet<(PieceId, TagId)>,\n", "file_path": "db/src/lib.rs", "rank": 87, "score": 35924.703862469156 }, { "content": " format!(\"[{}] {}\", id, self.file_name).parse().unwrap()\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, Clone, Copy, EnumIter, PartialOrd, Ord)]\n\npub enum BlobType {\n\n Canon,\n\n Variant,\n\n Raw,\n\n Draft,\n\n}\n\n\n\nimpl Display for BlobType {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n BlobType::Canon => {\n\n \"Canon\"\n", "file_path": "db/src/blob.rs", "rank": 88, "score": 35924.11484314929 }, { "content": " .filter(move |(id, _)| id == &piece)\n\n .map(|(_, id)| *id)\n\n }\n\n\n\n pub fn pieces_for_tag(&self, tag: TagId) -> impl Iterator<Item = PieceId> + Clone + '_ {\n\n self.piece_tags\n\n .iter()\n\n .filter(move |(_, id)| id == &tag)\n\n .map(|(id, _)| *id)\n\n }\n\n\n\n pub fn tags_for_category(\n\n &self,\n\n category: CategoryId,\n\n ) -> impl Iterator<Item = TagId> + Clone + '_ {\n\n self.tag_category\n\n .iter()\n\n .filter(move |(_, id)| **id == category)\n\n .map(|(id, _)| *id)\n\n }\n", "file_path": "db/src/lib.rs", "rank": 89, "score": 35923.47867340728 }, { "content": "impl<T: Eq> Eq for Table<T> {}\n\n\n\nimpl<T> Default for Table<T> {\n\n fn default() -> Self {\n\n Self { data: Slab::new() }\n\n }\n\n}\n\n\n\nimpl<T> Index<TableId<T>> for Table<T> {\n\n type Output = T;\n\n\n\n fn index(&self, index: TableId<T>) -> &Self::Output {\n\n self.get(index).unwrap()\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl<T> Table<T> {\n\n pub fn check<F: Fn(&T, &T) -> bool>(&self, data: T, f: F) -> MaybeEntry<T> {\n\n if let Some((id, _)) = self.data.iter().find(|(_, item)| f(*item, &data)) {\n", "file_path": "db/src/table.rs", "rank": 90, "score": 35923.15325385143 }, { "content": " self.media\n\n .iter()\n\n .filter(move |(id, _)| id == &piece)\n\n .map(|(_, id)| *id)\n\n }\n\n pub fn pieces_for_blob(&self, blob: BlobId) -> impl Iterator<Item = PieceId> + Clone + '_ {\n\n self.media\n\n .iter()\n\n .filter(move |(_, id)| id == &blob)\n\n .map(|(id, _)| *id)\n\n }\n\n\n\n pub fn primary_blob_for_piece(&self, piece: PieceId) -> Option<BlobId> {\n\n self.blobs_for_piece(piece)\n\n .find(|blob_id| self[blob_id].blob_type == BlobType::Canon)\n\n }\n\n\n\n pub fn tags_for_piece(&self, piece: PieceId) -> impl Iterator<Item = TagId> + Clone + '_ {\n\n self.piece_tags\n\n .iter()\n", "file_path": "db/src/lib.rs", "rank": 91, "score": 35922.64217341877 }, { "content": "use crate::Db;\n\n\n\nmod delete;\n\nmod edit;\n\nmod exists;\n\nmod index;\n\n\n", "file_path": "db/src/traits.rs", "rank": 92, "score": 35921.87314678008 }, { "content": " fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n self.0.cmp(&other.0)\n\n }\n\n}\n\n\n\nimpl<T> Display for TableId<T> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Table<T> {\n\n data: Slab<T>,\n\n}\n\nimpl<T: PartialEq> PartialEq for Table<T> {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.iter().zip(other.iter()).all(|(lhs, rhs)| lhs == rhs)\n\n }\n\n}\n", "file_path": "db/src/table.rs", "rank": 93, "score": 35921.08484567632 }, { "content": " fn default() -> Self {\n\n Self {\n\n name: \"New Piece\".to_string(),\n\n source_type: SourceType::Commission,\n\n media_type: MediaType::Image,\n\n added: Local::today().naive_local(),\n\n base_price: None,\n\n tip_price: None,\n\n links: Vec::new(),\n\n }\n\n }\n\n}\n", "file_path": "db/src/piece.rs", "rank": 94, "score": 35921.0683245752 }, { "content": " fn from(value: usize) -> Self {\n\n TableId(value, PhantomData)\n\n }\n\n}\n\n\n\nimpl<T> PartialEq for TableId<T> {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.0.eq(&other.0)\n\n }\n\n}\n\n\n\nimpl<T> Eq for TableId<T> {}\n\n\n\nimpl<T> PartialOrd for TableId<T> {\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n self.0.partial_cmp(&other.0)\n\n }\n\n}\n\n\n\nimpl<T> Ord for TableId<T> {\n", "file_path": "db/src/table.rs", "rank": 95, "score": 35917.75098389997 }, { "content": " }\n\n}\n\n\n\nimpl<'de, T: Deserialize<'de>> Deserialize<'de> for Table<T> {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: serde::Deserializer<'de>,\n\n {\n\n let data = BTreeMap::<usize, T>::deserialize(deserializer)?;\n\n let table = Table {\n\n data: data.into_iter().collect(),\n\n };\n\n\n\n Ok(table)\n\n }\n\n}\n\n\n\nimpl<T: Serialize> Serialize for Table<T> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n let data: BTreeMap<_, _> = self.data.iter().collect();\n\n\n\n data.serialize(serializer)\n\n }\n\n}\n", "file_path": "db/src/table.rs", "rank": 96, "score": 35915.48566936662 }, { "content": " }\n\n BlobType::Variant => {\n\n \"Variant\"\n\n }\n\n BlobType::Raw => {\n\n \"Raw\"\n\n }\n\n BlobType::Draft => {\n\n \"Draft\"\n\n }\n\n }\n\n )\n\n }\n\n}\n", "file_path": "db/src/blob.rs", "rank": 97, "score": 35911.1012408566 }, { "content": "use chrono::NaiveDate;\n\nuse imgui::{im_str, ImStr, Ui};\n\n\n\npub const DATE_FORMAT: &str = \"%-m/%-d/%-Y\";\n\n\n", "file_path": "src/app/widgets/date.rs", "rank": 98, "score": 35845.21565078884 } ]
Rust
src/writer/string_buffer.rs
efharkin/swc2dot
f08a38e7ae18344c4b9da8730ada01050ac2c496
#[derive(Clone, Debug)] pub struct StringBuffer { buf: String, empty_buf: String, has_been_written_to: bool, indent_level: u8, line_width: u32, cursor_position: u32, } impl StringBuffer { pub fn new(leading_newline: bool, indent: Indent, capacity: usize) -> StringBuffer { let mut buf = String::with_capacity((32 + INDENT_SIZE * indent.first) as usize + capacity); if leading_newline { buf.push_str("\n"); } buf.push_str(&get_indent(indent.first)); let string_buffer = StringBuffer { buf: buf, empty_buf: "".to_string(), has_been_written_to: false, indent_level: indent.main, line_width: 80, cursor_position: (INDENT_SIZE * indent.first) as u32, }; string_buffer.assert_cursor_is_within_line(); return string_buffer; } pub fn push_str(&mut self, string: &str) { self.has_been_written_to = true; self.weak_push_str(string); } pub fn newline(&mut self) { self.buf.push_str("\n"); self.buf.push_str(&get_indent(self.indent_level)); self.cursor_position = self.newline_cursor_position(); self.assert_cursor_is_within_line(); } pub fn weak_push_str(&mut self, string: &str) { if (string.len() > self.remaining_space_on_line() as usize) & (self.cursor_position > self.newline_cursor_position()) | (self.remaining_space_on_line() <= 0) { if !string.starts_with("\n") { self.newline(); } } self.buf.push_str(string); self.cursor_position += string.len() as u32; } pub fn to_string(&self) -> String { if self.has_been_written_to { let mut result = self.buf.clone(); result.shrink_to_fit(); result } else { self.empty_buf.clone() } } pub fn len(&self) -> usize { if self.has_been_written_to { self.buf.len() } else { self.empty_buf.len() } } #[inline] fn newline_cursor_position(&self) -> u32 { (self.indent_level * INDENT_SIZE) as u32 } #[inline] fn remaining_space_on_line(&self) -> i32 { (self.line_width as i64 - self.cursor_position as i64) as i32 } #[inline] fn assert_cursor_is_within_line(&self) { assert!( self.cursor_position <= self.line_width, "Cursor position {} greater than line width {}.", self.cursor_position, self.line_width ); } } impl AsRef<String> for StringBuffer { fn as_ref(&self) -> &String { if self.has_been_written_to { &self.buf } else { &self.empty_buf } } } impl AsRef<str> for StringBuffer { fn as_ref(&self) -> &str { if self.has_been_written_to { &self.buf } else { &self.empty_buf } } } #[cfg(test)] mod string_buffer_tests { use super::*; #[test] fn returns_empty_if_push_str_is_never_called() { let string = StringBuffer::new(true, Indent::flat(5), 32); assert_eq!("".to_string(), string.to_string()) } #[test] fn returns_indented_if_push_str_is_called() { let mut string = StringBuffer::new(false, Indent::flat(1), 32); string.push_str("something"); assert_eq!(" something".to_string(), string.to_string()); } #[test] fn returns_newline_if_push_str_is_called() { let mut string = StringBuffer::new(true, Indent::flat(0), 32); string.push_str("something"); assert_eq!("\nsomething".to_string(), string.to_string()); } fn compare_str_ref(a: &str, b: &str) -> bool { a == b } #[test] fn asref_str_empty_if_push_str_is_never_called() { let string = StringBuffer::new(true, Indent::flat(5), 32); if !compare_str_ref(string.as_ref(), "") { panic!("Failed"); } } #[test] fn asref_str_indented_if_push_str_is_called() { let mut string = StringBuffer::new(false, Indent::flat(1), 32); string.push_str("something"); if !compare_str_ref(string.as_ref(), " something") { panic!("Failed"); } } #[test] fn asref_str_newline_if_push_str_is_called() { let mut string = StringBuffer::new(true, Indent::flat(0), 32); string.push_str("something"); if !compare_str_ref(string.as_ref(), "\nsomething") { panic!("Failed"); } } #[test] fn hard_wrap_short_line_without_indent() { let mut string = StringBuffer::new(false, Indent::flat(0), 32); string.line_width = 4; string.push_str("123"); string.push_str("456"); assert_eq!("123\n456".to_string(), string.to_string()); } #[test] fn hard_wrap_full_line_without_indent() { let mut string = StringBuffer::new(false, Indent::flat(0), 32); string.line_width = 4; string.push_str("1234"); string.push_str("5678"); assert_eq!("1234\n5678".to_string(), string.to_string()); } #[test] fn hard_wrap_short_line_with_indent() { let mut string = StringBuffer::new(false, Indent::flat(1), 32); string.line_width = 8; string.push_str("123"); string.push_str("456"); assert_eq!(" 123\n 456".to_string(), string.to_string()); } #[test] fn hard_wrap_full_line_with_indent() { let mut string = StringBuffer::new(false, Indent::flat(1), 32); string.line_width = 8; string.push_str("1234"); string.push_str("5678"); assert_eq!(" 1234\n 5678".to_string(), string.to_string()); } #[test] fn hard_wrap_long_line_without_indent() { let mut string = StringBuffer::new(false, Indent::flat(0), 32); string.line_width = 4; string.push_str("12345"); string.push_str("678"); assert_eq!("12345\n678".to_string(), string.to_string()); } #[test] fn hard_wrap_long_second_line_with_indent() { let mut string = StringBuffer::new(false, Indent::flat(1), 32); string.line_width = 8; string.push_str("123"); string.push_str("456789"); string.push_str("0"); assert_eq!(" 123\n 456789\n 0".to_string(), string.to_string()); } } /* pub enum Indent { /// Set the indent of the first line to a different level than subsequent lines. /// /// `AbsoluteFirstLine(first_line_level, subsequent_lines)` AbsoluteFirstLine(u8, u8), /// Set the indent level of the first line relative to subsequent lines. /// /// `RelativeFirstLine(relative_first_line_level, subsequent_lines)` /// /// # Note /// /// `RelativeFirstLine(-1, 1)` is equivalent to `AbsoluteFirstLine(0, 1)`. RelativeFirstLine(i8, u8), /// All lines are indented to the same level. Flat(u8) } */ #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Indent { pub first: u8, pub main: u8 } impl Indent { pub fn absolute_first_line(first_line_level: u8, main_indent_level: u8) -> Indent { Indent{ first: first_line_level, main: main_indent_level } } pub fn relative_first_line(first_line_level: i8, main_indent_level: u8) -> Indent { if (first_line_level as i32) + (main_indent_level as i32) < 0 { panic!( "Minimum allowed relative first line indent level for main indent level {} is -{}, got {}", main_indent_level, main_indent_level, first_line_level ); } Indent { first: (main_indent_level as i16 + first_line_level as i16) as u8, main: main_indent_level } } pub fn flat(indent_level: u8) -> Indent { Indent { first: indent_level, main: indent_level } } pub fn zero() -> Indent { Indent { first: 0, main: 0 } } } #[cfg(test)] mod indent_tests { use super::*; #[test] fn zero() { let indent = Indent::zero(); assert_eq!(indent.first, 0); assert_eq!(indent.main, 0); } #[test] fn flat() { for indent_level in [0, 4, 89, 223].iter() { let indent = Indent::flat(*indent_level); assert_eq!(indent.first, *indent_level); assert_eq!(indent.main, *indent_level); } } #[test] fn absolute_first_line() { for (first_indent_level, main_indent_level) in [(0, 0), (0, 8), (43, 0), (76, 20)].iter() { let indent = Indent::absolute_first_line(*first_indent_level, *main_indent_level); assert_eq!(indent.first, *first_indent_level); assert_eq!(indent.main, *main_indent_level); } } #[test] fn relative_first_line() { for (first_indent_level, main_indent_level) in [(0, 0), (-2, 5), (7, 9)].iter() { let indent = Indent::relative_first_line(*first_indent_level, *main_indent_level); assert_eq!( indent.first, (*first_indent_level + (*main_indent_level as i8)) as u8 ); assert_eq!(indent.main, *main_indent_level as u8); } } #[test] #[should_panic] fn invalid_relative_first_line_panics() { let indent = Indent::relative_first_line(-2, 1); println!("Expected invalid relative indent with negative first line to panic, got {:?} indent instead", indent); } } static INDENT_SIZE: u8 = 4; pub fn get_indent(level: u8) -> String { let mut buf = String::with_capacity((level * INDENT_SIZE) as usize); for _ in 0..level { buf.push_str(" "); } return buf; }
#[derive(Clone, Debug)] pub struct StringBuffer { buf: String, empty_buf: String, has_been_written_to: bool, indent_level: u8, line_width: u32, cursor_position: u32, } impl StringBuffer { pub fn new(leading_newline: bool, indent: Indent, capacity: usize) -> StringBuffer { let mut buf = String::with_capacity((32 + INDENT_SIZE * indent.first) as usize + capacity); if leading_newline { buf.push_str("\n"); } buf.push_str(&get_indent(indent.first)); let string_buffer = StringBuffer { buf: buf, empty_buf: "".to_string(), has_been_written_to: false, indent_level: indent.main, line_width: 80, cursor_position: (INDENT_SIZE * indent.first) as u32, }; string_buffer.assert_cursor_is_within_line(); return string_buffer; } pub fn push_str(&mut self, string: &str) { self.has_been_written_to = true; self.weak_push_str(string); } pub fn newline(&mut self) { self.buf.push_str("\n"); self.buf.push_str(&get_indent(self.indent_level)); self.cursor_position = self.newline_cursor_position(); self.assert_cursor_is_within_line(); } pub fn weak_push_str(&mut self, string: &str) { if (string.len() > self.remaining_space_on_line() as usize) & (self.cursor_position > self.newline_cursor_position()) | (self.remaining_space_on_line() <= 0) { if !string.starts_with("\n") { self.newline(); } } self.buf.push_str(string); self.cursor_position += string.len() as u32; } pub fn to_string(&self) -> String { if self.has_been_written_to { let mut result = self.buf.clone(); result.shrink_to_fit(); result } else { self.empty_buf.clone() } } pub fn len(&self) -> usize { if self.has_been_written_to { self.buf.len() } else { self.empty_buf.len() } } #[inline] fn newline_cursor_position(&self) -> u32 { (self.indent_level * INDENT_SIZE) as u32 } #[inline] fn remaining_space_on_line(&self) -> i32 { (self.line_width as i64 - self.cursor_position as i64) as i32 } #[inline] fn assert_cursor_is_within_line(&self) { assert!( self.cursor_position <= self.line_width, "Cursor position {} greater than line width {}.", self.cursor_position, self.line_width ); } } impl AsRef<String> for StringBuffer { fn as_ref(&self) -> &String { if self.has_been_written_to { &self.buf } else { &self.empty_buf } } } impl AsRef<str> for StringBuffer { fn as_ref(&self) -> &str { if self.has_been_written_to { &self.buf } else { &self.empty_buf } } } #[cfg(test)] mod string_buffer_tests { use super::*; #[test] fn returns_empty_if_push_str_is_never_called() { let string = StringBuffer::new(true, Indent::flat(5), 32); assert_eq!("".to_string(), string.to_string()) } #[test] fn returns_indented_if_push_str_is_called() { let mut string = StringBuffer::new(false, Indent::flat(1), 32); string.push_str("something"); assert_eq!(" something".to_string(), string.to_string()); } #[test] fn returns_newline_if_push_str_is_called() { let mut string = StringBuffer::new(true, Indent::flat(0), 32); string.push_str("something"); assert_eq!("\nsomething".to_string(), string.to_string()); } fn compare_str_ref(a: &str, b: &str) -> bool { a == b } #[test] fn asref_str_empty_if_push_str_is_never_called() { let string = StringBuffer::new(true, Indent::flat(5), 32); if !compare_str_ref(string.as_ref(), "") { panic!("Failed"); } } #[test] fn asref_str_indented_if_push_str_is_called() { let mut string = StringBuffer::new(false, Indent::flat(1), 32); string.push_str("something"); if !compare_str_ref(string.as_ref(), " something") { panic!("Failed"); } } #[test] fn asref_str_newline_if_push_str_is_called() { let mut string = StringBuffer::new(true, Indent::flat(0), 32); string.push_str("something"); if !compare_str_ref(string.as_ref(), "\nsomething") { panic!("Failed"); } } #[test] fn hard_wrap_short_line_without_indent() { let mut string = StringBuffer::new(false, Indent::flat(0), 32); string.line_width = 4; string.push_str("123"); string.push_str("456"); assert_eq!("123\n456".to_string(), string.to_string()); } #[test] fn hard_wrap_full_line_without_indent() { let mut string = StringBuffer::new(false, Indent::flat(0), 32); string.line_width = 4; string.push_str("1234"); string.push_str("5678"); assert_eq!("1234\n5678".to_string(), string.to_string()); } #[test] fn hard_wrap_short_line_with_indent() { let mut string = StringBuffer::new(false, Indent::flat(1), 32); string.line_width = 8; string.push_str("123"); string.push_str("456"); assert_eq!(" 123\n 456".to_string(), string.to_string()); } #[test] fn hard_wrap_full_line_with_indent() { let mut string = StringBuffer::new(false, Indent::flat(1), 32); string.line_width = 8; string.push_str("1234"); string.push_str("5678"); assert_eq!(" 1234\n 5678".to_string(), string.to_string()); } #[test] fn hard_wrap_long_line_without_indent() { let mut string = StringBuffer::new(false, Inden
} #[test] fn hard_wrap_long_second_line_with_indent() { let mut string = StringBuffer::new(false, Indent::flat(1), 32); string.line_width = 8; string.push_str("123"); string.push_str("456789"); string.push_str("0"); assert_eq!(" 123\n 456789\n 0".to_string(), string.to_string()); } } /* pub enum Indent { /// Set the indent of the first line to a different level than subsequent lines. /// /// `AbsoluteFirstLine(first_line_level, subsequent_lines)` AbsoluteFirstLine(u8, u8), /// Set the indent level of the first line relative to subsequent lines. /// /// `RelativeFirstLine(relative_first_line_level, subsequent_lines)` /// /// # Note /// /// `RelativeFirstLine(-1, 1)` is equivalent to `AbsoluteFirstLine(0, 1)`. RelativeFirstLine(i8, u8), /// All lines are indented to the same level. Flat(u8) } */ #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Indent { pub first: u8, pub main: u8 } impl Indent { pub fn absolute_first_line(first_line_level: u8, main_indent_level: u8) -> Indent { Indent{ first: first_line_level, main: main_indent_level } } pub fn relative_first_line(first_line_level: i8, main_indent_level: u8) -> Indent { if (first_line_level as i32) + (main_indent_level as i32) < 0 { panic!( "Minimum allowed relative first line indent level for main indent level {} is -{}, got {}", main_indent_level, main_indent_level, first_line_level ); } Indent { first: (main_indent_level as i16 + first_line_level as i16) as u8, main: main_indent_level } } pub fn flat(indent_level: u8) -> Indent { Indent { first: indent_level, main: indent_level } } pub fn zero() -> Indent { Indent { first: 0, main: 0 } } } #[cfg(test)] mod indent_tests { use super::*; #[test] fn zero() { let indent = Indent::zero(); assert_eq!(indent.first, 0); assert_eq!(indent.main, 0); } #[test] fn flat() { for indent_level in [0, 4, 89, 223].iter() { let indent = Indent::flat(*indent_level); assert_eq!(indent.first, *indent_level); assert_eq!(indent.main, *indent_level); } } #[test] fn absolute_first_line() { for (first_indent_level, main_indent_level) in [(0, 0), (0, 8), (43, 0), (76, 20)].iter() { let indent = Indent::absolute_first_line(*first_indent_level, *main_indent_level); assert_eq!(indent.first, *first_indent_level); assert_eq!(indent.main, *main_indent_level); } } #[test] fn relative_first_line() { for (first_indent_level, main_indent_level) in [(0, 0), (-2, 5), (7, 9)].iter() { let indent = Indent::relative_first_line(*first_indent_level, *main_indent_level); assert_eq!( indent.first, (*first_indent_level + (*main_indent_level as i8)) as u8 ); assert_eq!(indent.main, *main_indent_level as u8); } } #[test] #[should_panic] fn invalid_relative_first_line_panics() { let indent = Indent::relative_first_line(-2, 1); println!("Expected invalid relative indent with negative first line to panic, got {:?} indent instead", indent); } } static INDENT_SIZE: u8 = 4; pub fn get_indent(level: u8) -> String { let mut buf = String::with_capacity((level * INDENT_SIZE) as usize); for _ in 0..level { buf.push_str(" "); } return buf; }
t::flat(0), 32); string.line_width = 4; string.push_str("12345"); string.push_str("678"); assert_eq!("12345\n678".to_string(), string.to_string());
function_block-random_span
[ { "content": "fn parse_line(line: String) -> Result<SWCLine, String> {\n\n let trimmed_line = line.trim(); // Remove leading and trailing whitespace.\n\n\n\n let parse_result: SWCLine;\n\n if trimmed_line.is_empty() {\n\n // Line is empty.\n\n parse_result = SWCLine::Blank;\n\n } else {\n\n // Line is not empty.\n\n\n\n if trimmed_line.chars().next().unwrap() == '#' {\n\n // Parse line as a comment, causing parse_result to be\n\n // SWCLine::Comment\n\n parse_result = SWCLine::Comment(trimmed_line.to_string());\n\n } else {\n\n // Parse line as a compartment, causing parse_result to be\n\n // SWCLine::SWCCompartment\n\n parse_result = SWCLine::SWCCompartment(parse_line_as_compartment(trimmed_line.to_string())?);\n\n }\n\n }\n\n\n\n return Ok(parse_result);\n\n}\n\n\n", "file_path": "src/swc_parser.rs", "rank": 1, "score": 122703.52917715648 }, { "content": "fn parse_line_as_compartment(line: String) -> Result<SWCCompartment, String> {\n\n let specs: Vec<&str> = line.split_whitespace().collect();\n\n\n\n // Check number of space-delimited items.\n\n if specs.len() != 7 {\n\n return Err(format!(\n\n \"Expected 7 space-delimited items in compartment line,\n\n got {} items instead.\",\n\n specs.len()\n\n ));\n\n }\n\n\n\n let id: usize;\n\n match specs[0].parse::<usize>() {\n\n Ok(parsed_id) => id = parsed_id,\n\n Err(_) => return Err(format!(\"Could not parse {} as a compartment id.\", specs[0])),\n\n }\n\n let compartment_kind = SWCCompartmentKind::from(\n\n specs[1]\n\n .parse::<usize>()\n", "file_path": "src/swc_parser.rs", "rank": 2, "score": 115470.88970080334 }, { "content": "fn parse_lines(reader: BufReader<File>) -> Result<SWCNeuron, String> {\n\n let mut neuron = SWCNeuron::new();\n\n\n\n for line in reader.lines() {\n\n match parse_line(line.expect(\"Could not read line.\"))? {\n\n SWCLine::SWCCompartment(compartment) => neuron.try_insert(compartment)?,\n\n SWCLine::Comment(_) => {},\n\n SWCLine::Blank => {}\n\n }\n\n }\n\n\n\n return Ok(neuron);\n\n}\n\n\n", "file_path": "src/swc_parser.rs", "rank": 3, "score": 97869.66806981766 }, { "content": "/// Get a filename with the extension removed.\n\n///\n\n/// If the file does not have an extension, the whole filename is returned.\n\npub fn get_filename_without_extension(filename: String) -> String {\n\n let extension_start_position: usize;\n\n match filename.rfind('.') {\n\n Some(position) => extension_start_position = position,\n\n None => extension_start_position = filename.len(),\n\n }\n\n return filename[0..extension_start_position].to_string();\n\n}\n", "file_path": "src/cli_parser.rs", "rank": 4, "score": 85212.73333831452 }, { "content": "pub fn parse_file(file_name: String) -> SWCNeuron {\n\n let reader = get_file_reader(file_name);\n\n match parse_lines(reader) {\n\n Ok(neuron) => neuron,\n\n Err(msg) => panic!(msg),\n\n }\n\n}\n\n\n", "file_path": "src/swc_parser.rs", "rank": 5, "score": 73913.29506007665 }, { "content": "fn get_file_reader(file_name: String) -> BufReader<File> {\n\n let f = File::open(file_name).expect(\"Could not open file.\");\n\n let reader = BufReader::new(f);\n\n return reader;\n\n}\n\n\n", "file_path": "src/swc_parser.rs", "rank": 6, "score": 62795.27713053426 }, { "content": "/// Get a `String` representation of an object in DOT format.\n\npub trait ToDot {\n\n fn to_dot(&self, leading_newline: bool, indent: Indent) -> String;\n\n}\n\n\n\nimpl ToDot for Vertex {\n\n /// Get a DOT representation of a single vertex.\n\n fn to_dot(&self, leading_newline: bool, indent: Indent) -> String {\n\n let mut vertex_str = StringBuffer::new(leading_newline, indent, 32);\n\n vertex_str.push_str(&self.get_id().to_string());\n\n vertex_str.push_str(\"; \");\n\n return vertex_str.to_string();\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod vertex_todot_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn formatted_id_appears_in_output() {\n", "file_path": "src/writer/mod.rs", "rank": 7, "score": 55499.12130968059 }, { "content": "/// Get a configured `String` representation of an object in DOT format.\n\n///\n\n/// # See also\n\n///\n\n/// - `ToDot` trait\n\npub trait ConfiguredToDot {\n\n fn to_dot(&self, leading_newline: bool, indent: Indent, config: &Config) -> String;\n\n}\n\n\n\nimpl ConfiguredToDot for Graph {\n\n fn to_dot(&self, _leading_newline: bool, indent: Indent, config: &Config) -> String {\n\n let mut graph_string =\n\n String::with_capacity(max(64 * self.len(), GRAPH_STRING_MAX_BUFSIZE));\n\n\n\n graph_string.push_str(\"graph{\");\n\n\n\n // Node configuration\n\n use vertex_config_formatter::VertexConfigFormatter;\n\n let mut buffers = VertexConfigFormatter::new(true, Indent::flat(indent.main + 2), 256);\n\n\n\n for kind in SWCCompartmentKind::iter() {\n\n buffers.weak_push_config_str(kind, &config.get_config(kind).to_dot(false, Indent::zero()));\n\n buffers.weak_push_config_str(kind, \" \");\n\n }\n\n for (_, vertex) in self.iter_vertices() {\n", "file_path": "src/writer/mod.rs", "rank": 8, "score": 53256.256613906255 }, { "content": "pub fn get_cli_arguments<'a>() -> ArgMatches<'a> {\n\n App::new(\"swc2dot\")\n\n .version(\"0.1.2\")\n\n .author(\"Emerson Harkin <[email protected]>\")\n\n .about(\"Convert SWC neuron morphologies to DOT graph language.\")\n\n .arg(\n\n Arg::with_name(\"output\")\n\n .short(\"o\")\n\n .long(\"output\")\n\n .help(\"Output file for morphology in DOT format\")\n\n .value_name(\"FILE\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"INPUT\")\n\n .help(\"SWC neuron morphology file to use as input\")\n\n .index(1)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"config\")\n\n .short(\"c\")\n\n .long(\"config\")\n\n .help(\"Configuration file for node attributes.\")\n\n .value_name(\"FILE\")\n\n .takes_value(true),\n\n )\n\n .get_matches()\n\n}\n\n\n", "file_path": "src/cli_parser.rs", "rank": 9, "score": 52389.36455106862 }, { "content": "fn main() {\n\n let cli_matches = get_cli_arguments();\n\n let mut config: Config;\n\n match Config::new() {\n\n Ok(c) => config = c,\n\n _ => panic!(\"Could not load default config\"),\n\n }\n\n match cli_matches.value_of(\"config\") {\n\n Some(config_file) => {\n\n config.try_overload_from_file(&config_file.to_string());\n\n }\n\n None => {}\n\n }\n\n\n\n let input_file_name = cli_matches\n\n .value_of(\"INPUT\")\n\n .expect(\"Required argument INPUT is missing.\")\n\n .to_string();\n\n let swcneuron = parse_file(input_file_name.clone());\n\n let graphneuron = Graph::from(swcneuron);\n", "file_path": "src/main.rs", "rank": 10, "score": 35497.46812731214 }, { "content": "/// Parse the entries in a `Yaml::Hash` into a `ConfigOptionGroup`.\n\n///\n\n/// `Yaml::Null` variants result in a value of `Option::None`, and all other\n\n/// valid `Yaml` variants are coerced to `Option::Some(String)`.\n\nfn parse_config_entries(\n\n entries: &mut Entries<Yaml, Yaml>,\n\n) -> Result<ConfigOptionGroup, YamlParseError> {\n\n let mut group = ConfigOptionGroup::new();\n\n for entry in entries {\n\n let key: String = entry\n\n .key()\n\n .as_str()\n\n .expect(\"Could not get Yaml key as String.\")\n\n .to_string();\n\n let val: Option<String>;\n\n match entry.get() {\n\n Yaml::Null => val = None,\n\n Yaml::String(string) => val = Some(string.clone()),\n\n Yaml::Real(num) => val = Some(num.as_str().to_string()),\n\n Yaml::Integer(num) => val = Some(num.to_string()),\n\n Yaml::Boolean(bool_value) => val = Some(bool_value.to_string()),\n\n _ => {\n\n return Err(YamlParseError::WrongType(format!(\n\n \"Expected value of YAML {} to be Null or String-like.\",\n", "file_path": "src/config.rs", "rank": 11, "score": 32846.91018420307 }, { "content": " }\n\n\n\n #[cfg(test)]\n\n mod vertexconfigformatter_todot_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn weak_push_yields_empty_string() {\n\n let mut formatter = VertexConfigFormatter::new(true, Indent::flat(1), 1024);\n\n\n\n // Push content that does not need to be printed.\n\n for kind in SWCCompartmentKind::iter() {\n\n formatter.weak_push_config_str(kind, \"unnecessary content\");\n\n }\n\n\n\n assert_eq!(formatter.to_dot(true, Indent::flat(1)), \"\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/writer/mod.rs", "rank": 12, "score": 24014.256375014844 }, { "content": " pub struct VertexConfigFormatter {\n\n vertex_config_strings: HashMap<SWCCompartmentKind, StringBuffer>,\n\n }\n\n\n\n impl VertexConfigFormatter {\n\n pub fn new(\n\n leading_newline: bool,\n\n indent: Indent,\n\n capacity: usize,\n\n ) -> VertexConfigFormatter {\n\n let mut vertex_config_strings = HashMap::with_capacity(6);\n\n\n\n for compartment_kind in SWCCompartmentKind::iter() {\n\n // Allocate buffer for vertex configuration settings for this compartment type.\n\n let mut compartment_config_string =\n\n StringBuffer::new(leading_newline, indent, capacity);\n\n\n\n // Add a descriptive header.\n\n compartment_config_string.weak_push_str(&format!(\n\n \"/* Configuration for {} vertices. */\",\n", "file_path": "src/writer/mod.rs", "rank": 13, "score": 24012.42185656181 }, { "content": "impl ToDot for ShortTree {\n\n /// Get DOT representation of a rooted tree of depth 1.\n\n ///\n\n /// Rooted trees of depth 1 can be written in one line in DOT.\n\n fn to_dot(&self, leading_newline: bool, indent: Indent) -> String {\n\n let mut tree_buf = StringBuffer::new(leading_newline, indent, 128);\n\n\n\n tree_buf.push_str(&self.get_root_id().to_string());\n\n match self.get_child_ids().len() {\n\n 0 => {}\n\n 1 => tree_buf.push_str(&format!(\" -- {}\", self.get_child_ids()[0])),\n\n _ => tree_buf.push_str(&format!(\n\n \" -- {{{}}}\",\n\n self.get_child_ids().iter().format(\", \")\n\n )),\n\n }\n\n tree_buf.push_str(\";\");\n\n return tree_buf.to_string();\n\n }\n\n}\n", "file_path": "src/writer/mod.rs", "rank": 14, "score": 24011.366129137485 }, { "content": " .get_mut(&vertex_kind)\n\n .expect(&format!(\n\n \"{:?} not found in VertexConfigBuffers instance\",\n\n vertex_kind\n\n ));\n\n config_buffer.weak_push_str(string);\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n let mut total_length: usize = 0;\n\n for val in self.vertex_config_strings.values() {\n\n total_length += val.len();\n\n }\n\n return total_length;\n\n }\n\n }\n\n\n\n impl ToDot for VertexConfigFormatter {\n\n /// Get node configuration in DOT language\n\n fn to_dot(&self, leading_newline: bool, indent: Indent) -> String {\n", "file_path": "src/writer/mod.rs", "rank": 15, "score": 24010.681183888926 }, { "content": " buffers.push_config_str(vertex.get_kind(), &vertex.to_dot(false, Indent::zero()));\n\n }\n\n\n\n graph_string.push_str(&buffers.to_dot(false, Indent::flat(indent.main + 1)));\n\n\n\n // Write edges\n\n for short_tree in self.iter_short_trees() {\n\n graph_string.push_str(&short_tree.to_dot(true, Indent::flat(indent.main + 1)));\n\n }\n\n graph_string.push_str(\"\\n}\");\n\n\n\n graph_string.shrink_to_fit();\n\n return graph_string;\n\n }\n\n}\n\n\n\nmod vertex_config_formatter {\n\n use super::*;\n\n\n\n /// Pretty formatting of `Vertex` attributes in DOT language.\n", "file_path": "src/writer/mod.rs", "rank": 16, "score": 24009.515201638005 }, { "content": "use std::cmp::max;\n\nuse std::collections::HashMap;\n\n\n\nuse itertools::Itertools;\n\n\n\nuse crate::components::{Graph, ShortTree, Vertex};\n\nuse crate::config::Config;\n\nuse crate::swc_parser::SWCCompartmentKind;\n\n\n\nmod string_buffer;\n\n\n\npub use string_buffer::{StringBuffer, Indent, get_indent};\n\n\n\n/// Get a `String` representation of an object in DOT format.\n", "file_path": "src/writer/mod.rs", "rank": 17, "score": 24007.07890482373 }, { "content": " vertex_kind\n\n ));\n\n config_buffer.push_str(string);\n\n }\n\n\n\n /// Add an optional component to the config string.\n\n ///\n\n /// Config strings that only contain optional components are ignored by\n\n /// `to_dot()`.\n\n ///\n\n /// # Recommended use case\n\n ///\n\n /// Use this method to add configuration details for compartment types that may or may\n\n /// not exist in the current graph, and use `push_config_str()` to add the names of all\n\n /// compartments of the given type (if any exist). If there are no compartments of the\n\n /// given type, `push_config_str()` will never be called, and the configuration details\n\n /// added using `weak_push_config_str()` will be left out of the output of `to_dot()`.\n\n pub fn weak_push_config_str(&mut self, vertex_kind: SWCCompartmentKind, string: &str) {\n\n let config_buffer: &mut StringBuffer = self\n\n .vertex_config_strings\n", "file_path": "src/writer/mod.rs", "rank": 18, "score": 24006.280438090525 }, { "content": " let vertex = get_test_vertex();\n\n assert!(vertex\n\n .to_dot(false, Indent::zero())\n\n .contains(&format!(\"{}; \", vertex.get_id())));\n\n }\n\n\n\n #[test]\n\n fn leading_newline_zero_indent() {\n\n let vertex = get_test_vertex();\n\n assert_eq!(\n\n vertex.to_dot(true, Indent::zero()).chars().next().unwrap(),\n\n '\\n',\n\n \"Expected first char to be newline when argument `newline=true`\"\n\n )\n\n }\n\n\n\n #[test]\n\n fn no_leading_newline_zero_indent() {\n\n let vertex = get_test_vertex();\n\n assert!(\n", "file_path": "src/writer/mod.rs", "rank": 19, "score": 24006.174427071608 }, { "content": " let mut full_config_string =\n\n StringBuffer::new(leading_newline, indent, self.len() + 64);\n\n\n\n for config_string in self.vertex_config_strings.values() {\n\n if config_string.len() > 0 {\n\n // Opening brace on a new line\n\n full_config_string.newline();\n\n full_config_string.push_str(\"{\");\n\n\n\n // Configuration for the current compartment type\n\n full_config_string.push_str(config_string.as_ref());\n\n\n\n // Closing brace on a new line\n\n full_config_string.newline();\n\n full_config_string.push_str(\"}\");\n\n }\n\n }\n\n\n\n return full_config_string.to_string();\n\n }\n", "file_path": "src/writer/mod.rs", "rank": 20, "score": 24005.866168599434 }, { "content": " compartment_kind\n\n ));\n\n compartment_config_string.newline();\n\n\n\n // Insert it into HashMap that will be stored in the VertexConfigFormatter.\n\n vertex_config_strings.insert(compartment_kind, compartment_config_string);\n\n }\n\n\n\n // Construct the new VertexConfigFormatter\n\n VertexConfigFormatter {\n\n vertex_config_strings: vertex_config_strings,\n\n }\n\n }\n\n\n\n pub fn push_config_str(&mut self, vertex_kind: SWCCompartmentKind, string: &str) {\n\n let config_buffer: &mut StringBuffer = self\n\n .vertex_config_strings\n\n .get_mut(&vertex_kind)\n\n .expect(&format!(\n\n \"{:?} not found in VertexConfigBuffers instance\",\n", "file_path": "src/writer/mod.rs", "rank": 21, "score": 24003.746127305916 }, { "content": " vertex.to_dot(false, Indent::zero()).chars().next().unwrap() != '\\n',\n\n \"Expected first char to not be newline when arguemnt `newline=false`\"\n\n )\n\n }\n\n\n\n fn get_test_vertex() -> Vertex {\n\n use crate::swc_parser::{Point, SWCCompartment};\n\n let vertex = Vertex::from(SWCCompartment::new(\n\n 64,\n\n SWCCompartmentKind::Dendrite,\n\n Point {\n\n x: 1.0,\n\n y: 1.0,\n\n z: 1.0,\n\n },\n\n 1.0,\n\n None,\n\n ));\n\n return vertex;\n\n }\n\n}\n\n\n\nstatic GRAPH_STRING_MAX_BUFSIZE: usize = 5242880;\n\n\n", "file_path": "src/writer/mod.rs", "rank": 22, "score": 24003.5148395939 }, { "content": "#[derive(Debug, PartialEq)]\n\nenum SWCLine {\n\n SWCCompartment(SWCCompartment),\n\n Comment(String),\n\n Blank,\n\n}\n\n\n\n#[cfg(test)]\n\nmod parse_line_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty_line() {\n\n assert_eq!(parse_line(\"\".to_string()).unwrap(), SWCLine::Blank);\n\n }\n\n\n\n #[test]\n\n fn line_with_only_spaces() {\n\n assert_eq!(parse_line(\" \".to_string()).unwrap(), SWCLine::Blank);\n\n }\n\n\n", "file_path": "src/swc_parser.rs", "rank": 23, "score": 21811.80950456741 }, { "content": " }\n\n\n\n #[test]\n\n fn extra_infix_spaces_do_not_trigger_error() {\n\n let line = \"2 3 4 5 6 7 1\".to_string();\n\n match parse_line_as_compartment(line) {\n\n Ok(_) => assert!(true),\n\n Err(_) => assert!(false)\n\n }\n\n }\n\n }\n\n\n\n #[cfg(test)]\n\n mod id {\n\n use super::*;\n\n\n\n #[test]\n\n fn first_item_is_id () {\n\n let trailing_values = \" 2 3 4 5 6 7\";\n\n for id in [10, 645, 938274].iter() {\n", "file_path": "src/swc_parser.rs", "rank": 44, "score": 18.860015144178146 }, { "content": " fn parse_bool_true_as_string() {\n\n let mut yaml = load_hash_from_str(\"key: true\");\n\n let parsed;\n\n match parse_config_entries(&mut yaml.entries()) {\n\n Ok(result) => parsed = result,\n\n Err(_) => panic!(\"Could not parse hash entry as String key and Option<String> value\"),\n\n }\n\n\n\n assert!(\n\n parsed.options.contains_key(\"key\"),\n\n \"Resulting HashMap does not contain expected key 'key'\"\n\n );\n\n assert_eq!(\n\n parsed.options[\"key\"],\n\n Some(\"true\".to_string()),\n\n \"Expected value associated with key 'key' to be 'true'\"\n\n );\n\n }\n\n\n\n #[test]\n", "file_path": "src/config.rs", "rank": 45, "score": 17.14410404189389 }, { "content": " #[test]\n\n #[allow(non_snake_case)]\n\n fn parse_bool_TRUE_as_string() {\n\n let mut yaml = load_hash_from_str(\"key: TRUE\");\n\n let parsed;\n\n match parse_config_entries(&mut yaml.entries()) {\n\n Ok(result) => parsed = result,\n\n Err(_) => panic!(\"Could not parse hash entry as String key and Option<String> value\"),\n\n }\n\n\n\n assert!(\n\n parsed.options.contains_key(\"key\"),\n\n \"Resulting HashMap does not contain expected key 'key'\"\n\n );\n\n assert_eq!(\n\n parsed.options[\"key\"],\n\n Some(\"TRUE\".to_string()),\n\n \"Expected value associated with key 'key' to be 'TRUE'\"\n\n );\n\n }\n", "file_path": "src/config.rs", "rank": 46, "score": 16.73199247203017 }, { "content": " let swc_line = format!(\"1 1 3 3 3 3 {}\", parent_id);\n\n let swc_compartment = parse_line_as_compartment(swc_line.clone()).unwrap();\n\n match swc_compartment.parent_id {\n\n Some(_) => assert!(false, \"A negative parent is no parent at all! Parent is not None for swc string `{}`\", swc_line),\n\n None => assert!(true)\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct SWCNeuron {\n\n compartments: BTreeMap<usize, SWCCompartment>,\n\n}\n\n\n\nimpl SWCNeuron {\n\n fn new() -> SWCNeuron {\n\n SWCNeuron {\n\n compartments: BTreeMap::<usize, SWCCompartment>::new(),\n\n }\n", "file_path": "src/swc_parser.rs", "rank": 47, "score": 15.92882584786128 }, { "content": "\n\n /// An SWC line should have exactly seven space-delimited items. These\n\n /// tests ensure that lines are parsed as the correct length.\n\n mod line_length_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn too_many_space_delimited_items_raises_error() {\n\n let line = \"2 3 4 5 6 7 1 1\".to_string();\n\n match parse_line_as_compartment(line) {\n\n Ok(_) => assert!(false),\n\n Err(msg) => assert!(msg.contains(\"got 8 items\"))\n\n }\n\n }\n\n\n\n #[test]\n\n fn too_few_space_delimited_items_raises_error() {\n\n let line = \"2 3 4 5 6 7\".to_string();\n\n match parse_line_as_compartment(line) {\n\n Ok(_) => assert!(false),\n", "file_path": "src/swc_parser.rs", "rank": 48, "score": 15.805683432535238 }, { "content": " use super::*;\n\n\n\n fn load_hash_from_str(string: &str) -> LinkedHashMap<Yaml, Yaml> {\n\n let doc = YamlLoader::load_from_str(string)\n\n .expect(&format!(\"Could not load {} as a yaml string\", string))[0]\n\n .clone();\n\n doc.into_hash().expect(\"Could not create yaml hash\")\n\n }\n\n\n\n #[test]\n\n fn parse_single_string_entry() {\n\n let mut yaml = load_hash_from_str(\"key: value\");\n\n let parsed;\n\n match parse_config_entries(&mut yaml.entries()) {\n\n Ok(result) => parsed = result,\n\n Err(_) => panic!(\"Could not parse hash entry as String key and Option<String> value\"),\n\n }\n\n\n\n assert!(\n\n parsed.options.contains_key(\"key\"),\n", "file_path": "src/config.rs", "rank": 49, "score": 15.475938702041313 }, { "content": " #[allow(non_snake_case)]\n\n fn parse_bool_True_as_string() {\n\n let mut yaml = load_hash_from_str(\"key: True\");\n\n let parsed;\n\n match parse_config_entries(&mut yaml.entries()) {\n\n Ok(result) => parsed = result,\n\n Err(_) => panic!(\"Could not parse hash entry as String key and Option<String> value\"),\n\n }\n\n\n\n assert!(\n\n parsed.options.contains_key(\"key\"),\n\n \"Resulting HashMap does not contain expected key 'key'\"\n\n );\n\n assert_eq!(\n\n parsed.options[\"key\"],\n\n Some(\"True\".to_string()),\n\n \"Expected value associated with key 'key' to be 'True'\"\n\n );\n\n }\n\n\n", "file_path": "src/config.rs", "rank": 50, "score": 15.399975816955578 }, { "content": " let mut swc_line = id.to_string();\n\n swc_line.push_str(trailing_values);\n\n let swc_compartment = parse_line_as_compartment(swc_line).unwrap();\n\n assert_eq!(swc_compartment.id, *id);\n\n }\n\n }\n\n\n\n #[test]\n\n fn position() {\n\n for (x, y, z) in [(1.2, 2.2, 3.7), (4.5, 5.5, 6.5), (-32.0, 125.333, -3.4)].iter() {\n\n let swc_line = format!(\"10 1 {} {} {} 5 6\", x, y, z);\n\n let swc_compartment = parse_line_as_compartment(swc_line).unwrap();\n\n assert_eq!(swc_compartment.position, Point{x: *x, y: *y, z: *z});\n\n }\n\n }\n\n\n\n #[test]\n\n fn radius() {\n\n for rad in [4.3, 7.7, 9.9, 3.2].iter() {\n\n let swc_line = format!(\"10 1 3 3 3 {} 6\", rad);\n", "file_path": "src/swc_parser.rs", "rank": 51, "score": 15.112206145349967 }, { "content": " Err(msg) => assert!(msg.contains(\"got 6 items\"))\n\n }\n\n }\n\n\n\n #[test]\n\n fn leading_space_does_not_trigger_error() {\n\n let line = \" 2 3 4 5 6 7 1\".to_string();\n\n match parse_line_as_compartment(line) {\n\n Ok(_) => assert!(true),\n\n Err(_) => assert!(false)\n\n }\n\n }\n\n\n\n #[test]\n\n fn trailing_space_does_not_trigger_error() {\n\n let line = \"2 3 4 5 6 7 1 \".to_string();\n\n match parse_line_as_compartment(line) {\n\n Ok(_) => assert!(true),\n\n Err(_) => assert!(false)\n\n }\n", "file_path": "src/swc_parser.rs", "rank": 52, "score": 14.042110477242169 }, { "content": "}\n\n\n\nimpl ToDot for ConfigOptionGroup {\n\n fn to_dot(&self, leading_newline: bool, indent: Indent) -> String {\n\n let mut config_string = StringBuffer::new(leading_newline, indent, 256);\n\n\n\n // Prefix.\n\n config_string.push_str(\"node [\");\n\n\n\n let mut options_iterator = self.options.iter();\n\n // First option\n\n let (key, val) = options_iterator.next().unwrap();\n\n match val {\n\n Some(val) => config_string.push_str(&format!(\"{}={}\", key, val)),\n\n None => config_string.push_str(&key),\n\n }\n\n\n\n // All subsequent options\n\n for (key, val) in options_iterator {\n\n config_string.push_str(\",\");\n", "file_path": "src/config.rs", "rank": 53, "score": 13.336563111935014 }, { "content": " fn parse_float_as_string() {\n\n let mut yaml = load_hash_from_str(\"key: 1.23\");\n\n let parsed;\n\n match parse_config_entries(&mut yaml.entries()) {\n\n Ok(result) => parsed = result,\n\n Err(_) => panic!(\"Could not parse hash entry as String key and Option<String> value\"),\n\n }\n\n\n\n assert!(\n\n parsed.options.contains_key(\"key\"),\n\n \"Resulting HashMap does not contain expected key 'key'\"\n\n );\n\n assert_eq!(\n\n parsed.options[\"key\"],\n\n Some(\"1.23\".to_string()),\n\n \"Expected value associated with key 'key' to be '1.23'\"\n\n );\n\n }\n\n\n\n #[test]\n", "file_path": "src/config.rs", "rank": 54, "score": 12.818273115597172 }, { "content": " fn parse_int_as_string() {\n\n let mut yaml = load_hash_from_str(\"key: 23\");\n\n let parsed;\n\n match parse_config_entries(&mut yaml.entries()) {\n\n Ok(result) => parsed = result,\n\n Err(_) => panic!(\"Could not parse hash entry as String key and Option<String> value\"),\n\n }\n\n\n\n assert!(\n\n parsed.options.contains_key(\"key\"),\n\n \"Resulting HashMap does not contain expected key 'key'\"\n\n );\n\n assert_eq!(\n\n parsed.options[\"key\"],\n\n Some(\"23\".to_string()),\n\n \"Expected value associated with key 'key' to be '23'\"\n\n );\n\n }\n\n\n\n #[test]\n", "file_path": "src/config.rs", "rank": 55, "score": 12.818273115597172 }, { "content": " \"Resulting HashMap does not contain expected key 'key'\"\n\n );\n\n assert_eq!(\n\n parsed.options[\"key\"],\n\n Some(\"value\".to_string()),\n\n \"Expected value associated with key 'key' to be 'value'\"\n\n );\n\n }\n\n\n\n #[test]\n\n fn parse_multiple_string_entries() {\n\n let yaml_string = \"key1: value1\\nkey2: value2\";\n\n let mut yaml = load_hash_from_str(yaml_string);\n\n let parsed;\n\n match parse_config_entries(&mut yaml.entries()) {\n\n Ok(result) => parsed = result,\n\n Err(_) => panic!(\"Could not parse hash entry as String key and Option<String> value\"),\n\n }\n\n\n\n assert!(\n", "file_path": "src/config.rs", "rank": 56, "score": 12.72293437850121 }, { "content": " \"Expected parent_id for compartment {} to be less than {},\n\n got {} instead.\",\n\n id, id, parsed_parent_id\n\n ));\n\n }\n\n parent_id = Some(parsed_parent_id);\n\n }\n\n\n\n return Ok(SWCCompartment::new(\n\n id,\n\n compartment_kind,\n\n position,\n\n radius,\n\n parent_id,\n\n ));\n\n}\n\n\n\n#[cfg(test)]\n\nmod parse_line_as_compartment_tests {\n\n use super::*;\n", "file_path": "src/swc_parser.rs", "rank": 57, "score": 12.441222457496828 }, { "content": "\n\npub struct Graph {\n\n vertices: BTreeMap<usize, Vertex>,\n\n}\n\n\n\nimpl Graph {\n\n pub fn iter_vertices(&self) -> Iter<usize, Vertex> {\n\n self.vertices.iter()\n\n }\n\n\n\n pub fn iter_short_trees(&self) -> ShortTreeIter {\n\n let mut short_trees = Vec::with_capacity(self.vertices.len());\n\n for (id, vertex) in self.iter_vertices() {\n\n short_trees.push(ShortTree::from(vertex.clone()));\n\n }\n\n ShortTreeIter::new(short_trees)\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.vertices.len()\n", "file_path": "src/components.rs", "rank": 58, "score": 12.406686964353623 }, { "content": "\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct SWCCompartment {\n\n pub id: usize,\n\n pub kind: SWCCompartmentKind,\n\n pub position: Point,\n\n pub radius: f64,\n\n pub parent_id: Option<usize>,\n\n}\n\n\n\nimpl SWCCompartment {\n\n pub fn new(\n\n id: usize,\n\n kind: SWCCompartmentKind,\n\n position: Point,\n\n radius: f64,\n\n parent_id: Option<usize>,\n\n ) -> SWCCompartment {\n\n SWCCompartment {\n\n id: id,\n", "file_path": "src/swc_parser.rs", "rank": 59, "score": 12.36170138546312 }, { "content": "use std::collections::{btree_map::Iter, BTreeMap};\n\nuse std::convert::From;\n\n\n\nuse crate::swc_parser::{SWCCompartment, SWCCompartmentKind, SWCNeuron};\n\n\n\n#[derive(Clone)]\n\npub struct Vertex {\n\n data: SWCCompartment,\n\n children: Vec<usize>,\n\n}\n\n\n\nimpl Vertex {\n\n pub fn get_id(&self) -> usize {\n\n self.data.id\n\n }\n\n\n\n pub fn get_parent_id(&self) -> Option<usize> {\n\n self.data.parent_id\n\n }\n\n\n", "file_path": "src/components.rs", "rank": 60, "score": 11.986035536528004 }, { "content": "\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn parse_empty_value_as_None() {\n\n let mut yaml = load_hash_from_str(\"key:\");\n\n let parsed;\n\n match parse_config_entries(&mut yaml.entries()) {\n\n Ok(result) => parsed = result,\n\n Err(_) => panic!(\"Could not parse hash entry as String key and Option<String> value\"),\n\n }\n\n\n\n assert!(\n\n parsed.options.contains_key(\"key\"),\n\n \"Resulting HashMap does not contain expected key 'key'\"\n\n );\n\n assert_eq!(\n\n parsed.options[\"key\"], None,\n\n \"Expected value associated with key 'key' to be None\"\n\n );\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 61, "score": 11.713234867400871 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod vertex_from_swccompartment_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn id() {\n\n let swc_compartment = get_test_swccompartment(1, None);\n\n let vertex = Vertex::from(swc_compartment);\n\n assert_eq!(vertex.get_id(), swc_compartment.id);\n\n }\n\n\n\n #[test]\n\n fn kind() {\n\n let swc_compartment = get_test_swccompartment(1, None);\n\n let vertex = Vertex::from(swc_compartment);\n\n assert_eq!(vertex.get_kind(), swc_compartment.kind);\n\n }\n\n\n", "file_path": "src/components.rs", "rank": 62, "score": 10.955497555697297 }, { "content": " return Ok(config[0].clone());\n\n }\n\n\n\n fn try_read_file(filename: &str) -> Result<String, YamlParseError> {\n\n let yaml_string;\n\n match read_to_string(filename) {\n\n Ok(string) => yaml_string = string,\n\n Err(msg) => {\n\n return Err(YamlParseError::FileRead(format!(\n\n \"Could not open configuration file {}: {}\",\n\n filename, msg\n\n )))\n\n }\n\n }\n\n return Ok(yaml_string);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod config_tests {\n", "file_path": "src/config.rs", "rank": 63, "score": 10.909078582305263 }, { "content": " pub fn get_child_ids(&self) -> &Vec<usize> {\n\n &self.children\n\n }\n\n\n\n pub fn get_kind(&self) -> SWCCompartmentKind {\n\n self.data.kind\n\n }\n\n\n\n fn add_child(&mut self, child: &Vertex) {\n\n self.children.push(child.get_id());\n\n }\n\n}\n\n\n\nimpl From<SWCCompartment> for Vertex {\n\n fn from(compartment: SWCCompartment) -> Vertex {\n\n Vertex {\n\n data: compartment,\n\n children: Vec::<usize>::with_capacity(4),\n\n }\n\n }\n", "file_path": "src/components.rs", "rank": 64, "score": 10.833412895156037 }, { "content": " #[test]\n\n fn swc_compartment() {\n\n let compartment_line = \"2 3 4 5 6 7 1\".to_string();\n\n let expected_compartment = parse_line_as_compartment(compartment_line.clone())\n\n .expect(\"Broken test, could not parse compartment line.\");\n\n match parse_line(compartment_line.clone()).unwrap() {\n\n SWCLine::SWCCompartment(compartment) => assert_eq!(compartment, expected_compartment),\n\n _ => assert!(\n\n false,\n\n \"Compartment string {} not parsed as a compartment\",\n\n compartment_line\n\n ),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/swc_parser.rs", "rank": 65, "score": 10.666723987662484 }, { "content": "use std::fs::read_to_string;\n\n\n\nuse linked_hash_map::{Entries, LinkedHashMap};\n\nuse yaml_rust::{yaml::Yaml, YamlLoader};\n\n\n\nuse crate::swc_parser::SWCCompartmentKind;\n\nuse crate::writer::{Indent, StringBuffer, ToDot};\n\n\n\nstatic OPTION_GROUPS: &'static [&'static str] = &[\n\n \"soma\",\n\n \"axon\",\n\n \"dendrite\",\n\n \"apicaldendrite\",\n\n \"undefined\",\n\n \"custom\",\n\n];\n\n\n\npub struct Config {\n\n option_groups: LinkedHashMap<&'static str, ConfigOptionGroup>,\n\n}\n", "file_path": "src/config.rs", "rank": 66, "score": 10.471830553311474 }, { "content": " \"Expected contents of config YAML to be a Hash.\".to_string(),\n\n ))\n\n }\n\n }\n\n\n\n return Ok(());\n\n }\n\n\n\n fn try_parse_yaml(yaml_string: &str) -> Result<Yaml, YamlParseError> {\n\n let config;\n\n match YamlLoader::load_from_str(&yaml_string) {\n\n Ok(yaml) => config = yaml,\n\n Err(_) => {\n\n return Err(YamlParseError::FileRead(\n\n \"Could not parse contents of configuration file as YAML\".to_string(),\n\n ))\n\n }\n\n }\n\n debug_assert!(config.len() == 1);\n\n\n", "file_path": "src/config.rs", "rank": 67, "score": 10.464549603111852 }, { "content": " }\n\n}\n\n\n\nimpl From<SWCNeuron> for Graph {\n\n fn from(neuron: SWCNeuron) -> Graph {\n\n let mut graph = Graph {\n\n vertices: BTreeMap::<usize, Vertex>::new(),\n\n };\n\n\n\n for (_, compartment) in neuron.iter() {\n\n let vertex = Vertex::from(compartment.clone());\n\n\n\n match vertex.get_parent_id() {\n\n Some(parent_id) => {\n\n // Preconditions that should be guaranteed by the parser:\n\n // 1. The ID of the parent must be less than the ID of the child to comply with\n\n // SWC standard\n\n // 2. It is invalid for a child to have a parent that does not exist.\n\n debug_assert!(parent_id < vertex.get_id());\n\n debug_assert!(graph.vertices.contains_key(&parent_id));\n", "file_path": "src/components.rs", "rank": 68, "score": 10.391561866372957 }, { "content": "\n\nimpl Config {\n\n pub fn new() -> Result<Config, YamlParseError> {\n\n let mut config = Config {\n\n option_groups: LinkedHashMap::new(),\n\n };\n\n for group in OPTION_GROUPS {\n\n config.option_groups.insert(group, ConfigOptionGroup::new());\n\n }\n\n\n\n let default_config_bytes = include_bytes!(\"default_config.yml\");\n\n let default_config_yaml = Config::try_parse_yaml(\n\n std::str::from_utf8(default_config_bytes)\n\n .expect(\"Could not parse default config as str.\"),\n\n )?;\n\n config.try_overload_from_yaml(default_config_yaml)?;\n\n return Ok(config);\n\n }\n\n\n\n pub fn try_overload_from_file(&mut self, filename: &str) -> Result<(), YamlParseError> {\n", "file_path": "src/config.rs", "rank": 69, "score": 9.901014638573399 }, { "content": " #[test]\n\n fn comment_line() {\n\n let comment_string = \"# Comment contents\".to_string();\n\n let expected_minimum_contents = \"Comment contents\".to_string();\n\n match parse_line(comment_string.clone()).unwrap() {\n\n SWCLine::Comment(contents) => assert!(\n\n contents.contains(&expected_minimum_contents),\n\n \"Expected parsed comment string `{}` to contain `{}`, got {} instead\",\n\n comment_string,\n\n expected_minimum_contents,\n\n contents\n\n ),\n\n _ => assert!(\n\n false,\n\n \"Comment string `{}` not parsed as a comment\",\n\n comment_string\n\n ),\n\n }\n\n }\n\n\n", "file_path": "src/swc_parser.rs", "rank": 70, "score": 9.836285315516417 }, { "content": " let swc_compartment = parse_line_as_compartment(swc_line).unwrap();\n\n assert_eq!(swc_compartment.radius, *rad);\n\n }\n\n }\n\n\n\n #[test]\n\n fn positive_last_item_is_parent() {\n\n for parent_id in [2, 54, 893].iter() {\n\n let swc_line = format!(\"1000 1 3 3 3 3 {}\", parent_id);\n\n let swc_compartment = parse_line_as_compartment(swc_line).unwrap();\n\n match swc_compartment.parent_id {\n\n Some(parent) => assert_eq!(parent, *parent_id),\n\n None => assert!(false, \"Failed because no parent was found.\")\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn negative_last_item_means_no_parent() {\n\n for parent_id in [-244, -2, -1].iter() {\n", "file_path": "src/swc_parser.rs", "rank": 71, "score": 9.425542027201118 }, { "content": " entry.key().as_str().unwrap()\n\n )))\n\n }\n\n }\n\n group.options.insert(key, val);\n\n }\n\n return Ok(group);\n\n}\n\n\n\npub enum YamlParseError {\n\n /// Yaml enum is not the expected variant (see `yaml_rust::yaml::Yaml`).\n\n WrongType(String),\n\n /// Yaml object does not exist (see `yaml_rust::yaml::Yaml::BadValue`).\n\n BadValue,\n\n /// Could not read Yaml from a file.\n\n FileRead(String),\n\n}\n\n\n\n#[cfg(test)]\n\nmod parse_config_entries_tests {\n", "file_path": "src/config.rs", "rank": 72, "score": 9.418149976867745 }, { "content": " }\n\n\n\n fn try_insert(&mut self, compartment: SWCCompartment) -> Result<(), String> {\n\n match self.compartments.entry(compartment.id) {\n\n Entry::Occupied(_) => Err(format!(\n\n \"More than one compartment with id {} exists\",\n\n compartment.id\n\n )),\n\n Entry::Vacant(entry) => {\n\n entry.insert(compartment);\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n pub fn iter(&self) -> Iter<usize, SWCCompartment> {\n\n self.compartments.iter()\n\n }\n\n}\n\n\n", "file_path": "src/swc_parser.rs", "rank": 73, "score": 9.341537245575427 }, { "content": "#[cfg(test)]\n\nmod swcneuron_tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn insert_compartments_with_unique_ids() {\n\n // Create a neuron and insert a single root compartment.\n\n let mut neuron = SWCNeuron::new();\n\n let mut compartment = SWCCompartment::new(0, SWCCompartmentKind::Soma, Point{x: 0.0, y: 0.0, z: 0.0}, 0.5, None);\n\n neuron.try_insert(compartment.clone()).expect(\"Could not insert root node.\");\n\n\n\n for compartment_id in [2, 5, 4, 7, 88, 903].iter() {\n\n compartment.parent_id = Some(0);\n\n compartment.id = *compartment_id;\n\n neuron.try_insert(compartment.clone()).expect(&format!(\"Could not insert compartment with unique id {}\", compartment_id));\n\n }\n\n }\n\n\n\n #[test]\n\n fn insert_compartment_with_duplicate_ids_is_error() {\n", "file_path": "src/swc_parser.rs", "rank": 74, "score": 9.077718233115979 }, { "content": "use std::collections::{\n\n btree_map::{Entry, Iter},\n\n BTreeMap,\n\n};\n\nuse std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\n\n", "file_path": "src/swc_parser.rs", "rank": 75, "score": 9.045179142758847 }, { "content": "use std::fs::File;\n\nuse std::io::Write;\n\n\n\nmod cli_parser;\n\nmod components;\n\nmod config;\n\nmod swc_parser;\n\nmod writer;\n\n\n\nuse cli_parser::{get_cli_arguments, get_filename_without_extension};\n\nuse components::Graph;\n\nuse config::Config;\n\nuse swc_parser::parse_file;\n\nuse writer::{ConfiguredToDot, Indent};\n\n\n", "file_path": "src/main.rs", "rank": 76, "score": 8.72531723662878 }, { "content": " parsed.options.contains_key(\"key1\"),\n\n \"Resulting HashMap does not contain expected key 'key1'\"\n\n );\n\n assert!(\n\n parsed.options.contains_key(\"key2\"),\n\n \"Resulting HashMap does not contain expected key 'key2'\"\n\n );\n\n assert_eq!(\n\n parsed.options[\"key1\"],\n\n Some(\"value1\".to_string()),\n\n \"Expected value associated with key 'key1' to be 'value1'\"\n\n );\n\n assert_eq!(\n\n parsed.options[\"key2\"],\n\n Some(\"value2\".to_string()),\n\n \"Expected value associated with key 'key2' to be 'value2'\"\n\n );\n\n }\n\n\n\n #[test]\n", "file_path": "src/config.rs", "rank": 77, "score": 8.681726822019817 }, { "content": " kind: kind,\n\n position: position,\n\n radius: radius,\n\n parent_id: parent_id,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq, Debug)]\n\npub struct Point {\n\n pub x: f64,\n\n pub y: f64,\n\n pub z: f64,\n\n}\n\n\n\n/// Types of compartment defined by the most basic version of the SWC standard.\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]\n\npub enum SWCCompartmentKind {\n\n Undefined,\n\n Soma,\n", "file_path": "src/swc_parser.rs", "rank": 78, "score": 8.507232873523492 }, { "content": "pub struct ShortTree {\n\n root_id: usize,\n\n child_ids: Vec<usize>,\n\n}\n\n\n\nimpl ShortTree {\n\n pub fn get_root_id(&self) -> usize {\n\n self.root_id\n\n }\n\n\n\n pub fn get_child_ids(&self) -> &Vec<usize> {\n\n &self.child_ids\n\n }\n\n}\n\n\n\nimpl From<Vertex> for ShortTree {\n\n fn from(vertex: Vertex) -> ShortTree {\n\n ShortTree {\n\n root_id: vertex.get_id(),\n\n child_ids: vertex.get_child_ids().clone(),\n", "file_path": "src/components.rs", "rank": 79, "score": 8.381894464443628 }, { "content": " }\n\n}\n\n\n\npub struct ConfigOptionGroup {\n\n options: LinkedHashMap<String, Option<String>>,\n\n}\n\n\n\nimpl ConfigOptionGroup {\n\n fn new() -> ConfigOptionGroup {\n\n ConfigOptionGroup {\n\n options: LinkedHashMap::<String, Option<String>>::new(),\n\n }\n\n }\n\n\n\n fn override_options(&mut self, mut overrides: ConfigOptionGroup) {\n\n for entry in overrides.options.entries() {\n\n self.options\n\n .insert(entry.key().clone(), entry.get().clone());\n\n }\n\n }\n", "file_path": "src/config.rs", "rank": 80, "score": 8.380770774240265 }, { "content": " #[test]\n\n fn parent_id() {\n\n for parent_id in [None, Some(1), Some(7)].iter() {\n\n let swc_compartment = get_test_swccompartment(10, *parent_id);\n\n let vertex = Vertex::from(swc_compartment);\n\n assert_eq!(vertex.get_parent_id(), swc_compartment.parent_id);\n\n }\n\n }\n\n\n\n fn get_test_swccompartment(id: usize, parent_id: Option<usize>) -> SWCCompartment {\n\n use crate::swc_parser::Point;\n\n SWCCompartment::new(\n\n id,\n\n SWCCompartmentKind::Soma,\n\n Point{x: 1.0, y: 1.0, z: 1.0},\n\n 1.0,\n\n parent_id\n\n )\n\n }\n\n}\n", "file_path": "src/components.rs", "rank": 81, "score": 8.189441782533411 }, { "content": " match val {\n\n Some(val) => config_string.push_str(&format!(\"{}={}\", key, val)),\n\n None => config_string.push_str(&key),\n\n }\n\n }\n\n\n\n // Close delimiter.\n\n config_string.push_str(\"];\");\n\n\n\n return config_string.to_string();\n\n }\n\n}\n\n\n\n/// Parse the entries in a `Yaml::Hash` into a `ConfigOptionGroup`.\n\n///\n\n/// `Yaml::Null` variants result in a value of `Option::None`, and all other\n\n/// valid `Yaml` variants are coerced to `Option::Some(String)`.\n", "file_path": "src/config.rs", "rank": 82, "score": 7.649066189356484 }, { "content": "\n\n // Add vertex as a child of its parent.\n\n let parent = graph.vertices.get_mut(&parent_id).unwrap();\n\n parent.add_child(&vertex);\n\n }\n\n None => {}\n\n }\n\n\n\n debug_assert!(!graph.vertices.contains_key(&vertex.get_id()));\n\n graph.vertices.insert(vertex.get_id(), vertex);\n\n }\n\n\n\n return graph;\n\n }\n\n}\n\n\n\n/// A tree of height 1.\n\n///\n\n/// In DOT language, a tree of height 1 can be declared in one line.\n\n#[derive(Clone)]\n", "file_path": "src/components.rs", "rank": 83, "score": 6.908782503380755 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ShortTreeIter {\n\n trees: Vec<ShortTree>,\n\n ptr: usize,\n\n}\n\n\n\nimpl ShortTreeIter {\n\n fn new(trees: Vec<ShortTree>) -> ShortTreeIter {\n\n ShortTreeIter {\n\n trees: trees,\n\n ptr: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl Iterator for ShortTreeIter {\n", "file_path": "src/components.rs", "rank": 84, "score": 6.691526008051851 }, { "content": "impl SWCCompartmentKind {\n\n pub fn iter() -> SWCCompartmentKindIterator {\n\n SWCCompartmentKindIterator::new()\n\n }\n\n}\n\n\n\nimpl From<usize> for SWCCompartmentKind {\n\n fn from(kind: usize) -> SWCCompartmentKind {\n\n match kind {\n\n 0 => SWCCompartmentKind::Undefined,\n\n 1 => SWCCompartmentKind::Soma,\n\n 2 => SWCCompartmentKind::Axon,\n\n 3 => SWCCompartmentKind::Dendrite,\n\n 4 => SWCCompartmentKind::ApicalDendrite,\n\n num if num >= 5 => SWCCompartmentKind::Custom,\n\n _ => panic!(\"kind is not usize\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/swc_parser.rs", "rank": 85, "score": 6.6637119299472785 }, { "content": "\n\n // Get the name of the output file\n\n // Fall back to the name of the input file with .dot suffix if none is provided.\n\n let mut output_file_name: String;\n\n match cli_matches.value_of(\"output\") {\n\n Some(file_name) => output_file_name = file_name.to_string(),\n\n None => {\n\n output_file_name = get_filename_without_extension(input_file_name);\n\n output_file_name.push_str(\".dot\");\n\n }\n\n }\n\n\n\n let mut f = File::create(&output_file_name).expect(&format!(\n\n \"Could not create output file {}.\",\n\n &output_file_name\n\n ));\n\n f.write(\n\n &graphneuron\n\n .to_dot(false, Indent::flat(0), &config)\n\n .into_bytes(),\n\n );\n\n f.flush();\n\n}\n", "file_path": "src/main.rs", "rank": 86, "score": 6.653064946363512 }, { "content": " // Create a neuron and insert a single root compartment.\n\n let mut neuron = SWCNeuron::new();\n\n let mut compartment = SWCCompartment::new(1, SWCCompartmentKind::Soma, Point{x: 0.0, y: 0.0, z: 0.0}, 0.5, None);\n\n neuron.try_insert(compartment.clone()).expect(\"Could not insert root node.\");\n\n\n\n // Change all compartment attributes except id.\n\n compartment.radius += 1.0;\n\n compartment.position.x += 1.0;\n\n compartment.position.y += 1.0;\n\n compartment.position.z += 1.0;\n\n compartment.kind = SWCCompartmentKind::ApicalDendrite;\n\n compartment.parent_id = Some(0);\n\n\n\n // Since id is still the same, inserting compartment again is an error.\n\n match neuron.try_insert(compartment.clone()) {\n\n Ok(_) => assert!(false, \"Inserting compartments with the same id should be an error\"),\n\n Err(msg) => assert!(msg.to_lowercase().contains(\"more than one compartment with id 1\"))\n\n }\n\n }\n\n}\n", "file_path": "src/swc_parser.rs", "rank": 87, "score": 6.306432637117244 }, { "content": " let mut yaml = Config::try_parse_yaml_file(filename)?;\n\n self.try_overload_from_yaml(yaml)\n\n }\n\n\n\n pub fn get_config(&self, group: SWCCompartmentKind) -> &ConfigOptionGroup {\n\n match group {\n\n SWCCompartmentKind::Soma => &self.option_groups[\"soma\"],\n\n SWCCompartmentKind::Axon => &self.option_groups[\"axon\"],\n\n SWCCompartmentKind::Dendrite => &self.option_groups[\"dendrite\"],\n\n SWCCompartmentKind::ApicalDendrite => &self.option_groups[\"apicaldendrite\"],\n\n SWCCompartmentKind::Undefined => &self.option_groups[\"undefined\"],\n\n SWCCompartmentKind::Custom => &self.option_groups[\"custom\"],\n\n }\n\n }\n\n\n\n /// Load the contents of a file as a Yaml object.\n\n fn try_parse_yaml_file(filename: &str) -> Result<Yaml, YamlParseError> {\n\n let yaml_string = Config::try_read_file(filename)?;\n\n let yaml_object = Config::try_parse_yaml(&yaml_string)?;\n\n return Ok(yaml_object);\n", "file_path": "src/config.rs", "rank": 88, "score": 6.18126953457614 }, { "content": " Axon,\n\n Dendrite,\n\n ApicalDendrite,\n\n Custom,\n\n}\n\n\n\nuse std::fmt;\n\nimpl fmt::Display for SWCCompartmentKind {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n\n SWCCompartmentKind::Undefined => write!(f, \"undefined\"),\n\n SWCCompartmentKind::Soma => write!(f, \"somatic\"),\n\n SWCCompartmentKind::Axon => write!(f, \"axonal\"),\n\n SWCCompartmentKind::Dendrite => write!(f, \"(basal) dendritic\"),\n\n SWCCompartmentKind::ApicalDendrite => write!(f, \"apical dendritic\"),\n\n SWCCompartmentKind::Custom => write!(f, \"custom\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/swc_parser.rs", "rank": 89, "score": 6.086008960985925 }, { "content": "impl IntoIterator for SWCCompartmentKind {\n\n type Item = SWCCompartmentKind;\n\n type IntoIter = SWCCompartmentKindIterator;\n\n\n\n fn into_iter(self) -> SWCCompartmentKindIterator {\n\n SWCCompartmentKindIterator::new()\n\n }\n\n}\n\n\n\n/// Iterator over variants of `SWCCompartmentKind`\n\npub struct SWCCompartmentKindIterator {\n\n kinds: [SWCCompartmentKind; 6],\n\n ptr: usize,\n\n}\n\n\n\nimpl SWCCompartmentKindIterator {\n\n pub fn new() -> SWCCompartmentKindIterator {\n\n SWCCompartmentKindIterator {\n\n kinds: [\n\n SWCCompartmentKind::Undefined,\n", "file_path": "src/swc_parser.rs", "rank": 90, "score": 6.074295744828965 }, { "content": " SWCCompartmentKind::Soma,\n\n SWCCompartmentKind::Axon,\n\n SWCCompartmentKind::Dendrite,\n\n SWCCompartmentKind::ApicalDendrite,\n\n SWCCompartmentKind::Custom,\n\n ],\n\n ptr: 0,\n\n }\n\n }\n\n}\n\n\n\n/// Iterate over variants of `SWCCompartmentKind` in no particular order.\n\nimpl Iterator for SWCCompartmentKindIterator {\n\n type Item = SWCCompartmentKind;\n\n\n\n /// Get the next SWCComparmentKind\n\n fn next(&mut self) -> Option<SWCCompartmentKind> {\n\n let result;\n\n if self.ptr < self.kinds.len() {\n\n result = Some(self.kinds[self.ptr]);\n\n self.ptr += 1;\n\n } else {\n\n result = None;\n\n }\n\n return result;\n\n }\n\n}\n", "file_path": "src/swc_parser.rs", "rank": 91, "score": 5.949709499360603 }, { "content": " }\n\n\n\n fn try_overload_from_yaml(&mut self, yaml: Yaml) -> Result<(), YamlParseError> {\n\n // Check whether YAML config file contains a hash (which it should)\n\n match yaml {\n\n Yaml::Hash(mut top_level_hash) => {\n\n // Iterate over top level options that might be in the config file.\n\n for group in OPTION_GROUPS {\n\n // Check whether each config option is there.\n\n match top_level_hash.get_mut(&Yaml::from_str(*group)) {\n\n Some(mut yaml) => {\n\n // Check whether config option is a Hash, if it exists.\n\n match yaml {\n\n // If it is a hash, parse it.\n\n Yaml::Hash(hash) => {\n\n let option_group = parse_config_entries(&mut hash.entries())?;\n\n self.option_groups\n\n .get_mut(*group)\n\n .expect(&format!(\n\n \"Could not get group {} even though it exists\",\n", "file_path": "src/config.rs", "rank": 92, "score": 5.4199245006756405 }, { "content": " .expect(\"Could not parse compartmentkind\"),\n\n );\n\n let position = Point {\n\n x: specs[2].parse::<f64>().expect(\"Could not parse x position\"),\n\n y: specs[3].parse::<f64>().expect(\"Could not parse y position\"),\n\n z: specs[4].parse::<f64>().expect(\"Could not parse z position\"),\n\n };\n\n let radius = specs[5].parse::<f64>().expect(\"Could not parse radius\");\n\n\n\n let parent_id: Option<usize>;\n\n if specs[6].chars().next().unwrap() == '-' {\n\n // Negative parent id means there is no parent; this is the root of the\n\n // neuron graph.\n\n parent_id = None;\n\n } else {\n\n let parsed_parent_id = specs[6]\n\n .parse::<usize>()\n\n .expect(&format!(\"Could not parse parent id {}\", specs[6]));\n\n if parsed_parent_id >= id {\n\n return Err(format!(\n", "file_path": "src/swc_parser.rs", "rank": 93, "score": 5.182623415242325 }, { "content": " type Item = ShortTree;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let item;\n\n if self.ptr < self.trees.len() {\n\n item = Some(self.trees[self.ptr].clone());\n\n } else {\n\n item = None;\n\n }\n\n self.ptr += 1;\n\n return item;\n\n }\n\n}\n", "file_path": "src/components.rs", "rank": 94, "score": 4.787031071105124 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn construct_config() {\n\n match Config::new() {\n\n Ok(_) => {}\n\n Err(parse_error) => match parse_error {\n\n YamlParseError::WrongType(msg) => panic!(format!(\n\n \"Could not construct Config due to `YamlParseError::Wrongtype`: {}\",\n\n msg\n\n )),\n\n YamlParseError::FileRead(msg) => panic!(format!(\n\n \"Could not construct Config due to `YamlParseError::FileRead`: {}\",\n\n msg\n\n )),\n\n YamlParseError::BadValue => {\n\n panic!(\"Could not construct Config due to `YamlParseError::BadValue`\")\n\n }\n\n },\n\n }\n", "file_path": "src/config.rs", "rank": 95, "score": 4.539850503198069 }, { "content": "### Custom configuration\n\n\n\nswc2dot supports adding arbitrary node attributes according to SWC compartment\n\ntype using a YAML configuration file (see `src/default_config.yml`\n\n[here](src/default_config.yml) for an example of how the configuration file\n\nshould be formatted) passed to swc2dot using the `--config` or `-c` flag.\n\n\n\nFor example, you might have a SWC morphology in which all \"Custom\"-type\n\ncompartments represent dendritic spines. You could add the attribute\n\n`kind=spine` to the corresponding nodes and color them red by creating a\n\nconfiguration file called `spine_config.yml` (or something similar) with the\n\nfollowing contents.\n\n\n\n```yaml\n\ncustom:\n\n kind: spine\n\n style: filled\n\n fillcolor: red\n\n```\n\n\n\nThen when you use swc2dot, just supply `spine_config.yml` using the `--config`\n\nflag.\n\n\n\n```bash\n\n$ swc2dot --config spine_config.yml --output spiny.dot my_spiny_neuron.swc\n\n```\n\n\n\n## Installation\n\n\n\n### MacOS and Linux\n\n\n\nDownload a tarball containing a pre-built binary from the\n\n[releases](https://github.com/efharkin/swc2dot/releases) page (look for\n\n`swc2dot-<version>-x86_64-apple-darwin.tar.gz` (MacOS) or\n\n`swc2dot-<version>-x86_64-unknown-linux-musl.tar.gz` (Linux) under Assets),\n\nunzip it, and place the swc2dot executable anywhere in your `PATH`. Test your\n\ninstallation by running `swc2dot --help`.\n\n\n\n### Build from source\n\n\n\nYou'll need a working Rust compiler together with the build manager Cargo to\n\nbuild swc2dot. The easiest way to get set up is to install\n\n[rustup](https://rustup.rs). Try running `cargo --version` afterwards to make\n\nsure everything worked.\n\n\n\nNext, get a copy of the swc2dot git repository and open it in a terminal. On\n\nMacOS or Linux, you can do this by pasting `git clone --depth 1\n\nhttps://github.com/efharkin/swc2dot.git && cd swc2dot` into your terminal.\n\n\n\nFinally, compile swc2dot into an executable by running `cargo build --release`\n\n(note that this will download a couple of swc2dot's dependencies over the\n\ninternet). You can find the resulting binary in `./target/release/swc2dot`. Put\n\nit somewhere in your `PATH` and you're done! Try running `swc2dot --help` to\n\nmake sure everything worked.\n\n\n", "file_path": "README.md", "rank": 96, "score": 3.5326440288862995 }, { "content": "use clap::{App, Arg, ArgMatches};\n\n\n", "file_path": "src/cli_parser.rs", "rank": 97, "score": 2.9607802574471043 }, { "content": "is connected to nodes `2`, `3`, `4`, `14`, etc.).\n\n\n\nWe can visualize the morphological graph using any program that supports DOT\n\nformat. Example using Graphviz's neato (`neato -o example.png -Tpng\n\nmorphology.dot`):\n\n\n\n![Example morphological graph](doc/example.png)\n\n\n", "file_path": "README.md", "rank": 98, "score": 2.9372274811200474 }, { "content": "[![Build Status](https://travis-ci.com/efharkin/swc2dot.svg?token=w2Bu6kMAWz66WkG555u7&branch=master)](https://travis-ci.com/efharkin/swc2dot) [![codecov](https://codecov.io/gh/efharkin/swc2dot/branch/master/graph/badge.svg)](https://codecov.io/gh/efharkin/swc2dot)\n\n\n\n# swc2dot\n\n\n\nA simple command line tool for converting neuron morphologies in SWC format to\n\nDOT graph description language.\n\n\n\n## Description\n\n\n\nNeurons have complex and\n\n[beautiful](https://www.nytimes.com/2018/01/18/arts/design/brain-neuroscience-santiago-ramon-y-cajal-grey-gallery.html)\n\ndendritic arbours that capture synaptic inputs and shape their functional\n\nproperties. The SWC file format\n\n[widely](https://alleninstitute.github.io/AllenSDK/cell_types.html#morphology-swc-files)\n\n[used](http://www.neuromorpho.org) to register dendritic morphology is simpler\n\nand [not as\n\nbeautiful](http://www.neuronland.org/NLMorphologyConverter/MorphologyFormats/SWC/Spec.html).\n\n\n\nThe SWC format provides a minimal description of the morphology of a neuron as\n\nan acyclic graph of tapered cylinders arranged in 3D space. While SWC is well\n\nsupported by specialized software packages for neuron modelling, it is not\n\nsupported by more general programs for visualizing and manipulating graphs.\n\n\n\nswc2dot converts neuron morphologies stored in SWC to human-readable [DOT\n\nformat](https://graphs.grevian.org/example) supported by\n\n[Graphviz](https://www.graphviz.org), [Gephi](https://gephi.org),\n\n[NetworkX](https://networkx.github.io), and others.\n\n\n\n## Usage\n\n\n\nThe command-line interface is very simple:\n\n\n\n```\n\nswc2dot [OPTIONS] <INPUT>\n\n```\n\n\n\n`<INPUT>` should be the name of a file containing a neuron morphology in SWC\n\nformat. By default, the output is placed in a file called `<INPUT>.dot`, but\n\nthe name of the output file can also be specified manually with the `--output`\n\nflag.\n\n\n\nSee `swc2dot --help` for more information.\n\n\n\n### Example\n\n\n\nSuppose we have a file called `morphology.swc` that we want to convert. The\n\ncontents of the file might look something like this:\n\n```\n\n# Metadata...\n", "file_path": "README.md", "rank": 99, "score": 2.311168147241897 } ]
Rust
src/lib.rs
kyclark/excel2txt-rust
a573b3fa0cd5017d80866b1365f8edb2875c3cb8
extern crate clap; extern crate csv; extern crate regex; extern crate tempdir; use calamine::{open_workbook, Reader, Xlsx}; use clap::{App, Arg}; use csv::WriterBuilder; use regex::Regex; use std::error::Error; use std::fs::{self, DirBuilder}; use std::path::{Path, PathBuf}; type MyResult<T> = Result<T, Box<dyn Error>>; #[derive(Debug)] pub struct Config { files: Vec<String>, outdir: String, delimiter: u8, normalize: bool, make_dirs: bool, } pub fn get_args() -> MyResult<Config> { let matches = App::new("excel2txt") .version("0.1.0") .author("Ken Youens-Clark <[email protected]>") .about("Export Excel workbooks into delimited text files") .arg( Arg::with_name("file") .short("f") .long("file") .value_name("FILE") .help("File input") .required(true) .min_values(1), ) .arg( Arg::with_name("outdir") .short("o") .long("outdir") .value_name("DIR") .default_value("out") .help("Output directory"), ) .arg( Arg::with_name("delimiter") .short("d") .long("delimiter") .value_name("DELIM") .default_value("\t") .help("Delimiter for output files"), ) .arg( Arg::with_name("normalize") .short("n") .long("normalize") .help("Normalize headers"), ) .arg( Arg::with_name("make_dirs") .short("m") .long("mkdirs") .help("Make output directory for each input file"), ) .get_matches(); let files = matches.values_of_lossy("file").unwrap(); let bad: Vec<String> = files.iter().cloned().filter(|f| !is_file(f)).collect(); if !bad.is_empty() { let msg = format!( "Invalid file{}: {}", if bad.len() == 1 { "" } else { "s" }, bad.join(", ") ); return Err(From::from(msg)); } Ok(Config { files: files, outdir: matches.value_of("outdir").unwrap().to_string(), delimiter: *matches .value_of("delimiter") .unwrap() .as_bytes() .first() .unwrap(), normalize: matches.is_present("normalize"), make_dirs: matches.is_present("make_dirs"), }) } pub fn run(config: Config) -> MyResult<()> { for (fnum, file) in config.files.into_iter().enumerate() { let path = Path::new(&file); let basename = path.file_stem().expect("basename"); let stem = normalize(&basename.to_string_lossy().to_string()); println!("{}: {}", fnum + 1, basename.to_string_lossy()); let mut out_dir = PathBuf::from(&config.outdir); if config.make_dirs { out_dir.push(&stem) } if !out_dir.is_dir() { DirBuilder::new().recursive(true).create(&out_dir)?; } let mut excel: Xlsx<_> = open_workbook(file)?; let sheets = excel.sheet_names().to_owned(); for sheet in sheets { let ext = if config.delimiter == 44 { "csv" } else { "txt" }; let out_file = format!("{}__{}.{}", &stem, normalize(&sheet), ext); let out_path = &out_dir.join(out_file); let mut wtr = WriterBuilder::new() .delimiter(config.delimiter) .from_path(out_path)?; println!("\tSheet '{}' -> '{}'", sheet, out_path.display()); if let Some(Ok(r)) = excel.worksheet_range(&sheet) { for (rnum, row) in r.rows().enumerate() { let vals = row .into_iter() .map(|f| format!("{}", f)) .map(|f| if rnum == 0 { normalize(&f) } else { f }) .collect::<Vec<String>>(); wtr.write_record(&vals)?; } } wtr.flush()?; } } Ok(()) } fn normalize(val: &String) -> String { let mut new = val.to_string(); let camel = Regex::new(r"(.*)([a-z])([A-Z].*)").unwrap(); loop { if let Some(cap) = camel.captures(&new) { new = format!("{}{}_{}", &cap[1], &cap[2], &cap[3]); } else { break; } } let spaces = Regex::new(r"[\s]+").unwrap(); let non_alphanum = Regex::new(r"[^a-z0-9_]").unwrap(); let mult_underbar = Regex::new(r"[_]+").unwrap(); new = new.to_ascii_lowercase(); new = spaces.replace_all(&new.to_string(), "_").to_string(); new = non_alphanum.replace_all(&new.to_string(), "").to_string(); mult_underbar.replace_all(&new.to_string(), "_").to_string() } fn is_file(path: &String) -> bool { if let Ok(meta) = fs::metadata(path) { return meta.is_file(); } else { return false; } } #[cfg(test)] mod tests { use super::*; use std::path::PathBuf; use tempdir::TempDir; #[test] fn test_normalize() { assert_eq!(normalize(&"".to_string()), ""); assert_eq!(normalize(&"ABC".to_string()), "abc"); assert_eq!(normalize(&"ABC DEF".to_string()), "abc_def"); assert_eq!(normalize(&"foo-b*!a,r".to_string()), "foobar"); assert_eq!(normalize(&"Foo Bar".to_string()), "foo_bar"); assert_eq!(normalize(&"Foo / Bar".to_string()), "foo_bar"); assert_eq!(normalize(&"Foo (Bar)".to_string()), "foo_bar"); assert_eq!(normalize(&"FooBarBAZ".to_string()), "foo_bar_baz"); } #[test] fn test_1() { let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let file = manifest_dir.join(PathBuf::from("tests/test1.xlsx")); if let Ok(tmp_dir) = TempDir::new("test") { let outdir = &tmp_dir.path().display().to_string(); let conf = Config { files: vec![file.display().to_string()], outdir: outdir.to_string(), delimiter: 9, normalize: false, make_dirs: false, }; let _res = match run(conf) { Ok(_) => { let expected_dir = PathBuf::from(outdir); assert!(expected_dir.is_dir()); let expected_file = expected_dir.join("test1__sheet1.txt"); assert!(expected_file.is_file()); let contents = fs::read_to_string(expected_file).ok().unwrap(); let lines: Vec<&str> = contents.split("\n").collect(); assert_eq!(lines[0], "name\trank\tserial_number"); assert_eq!(lines[1], "Ed\tCaptain\t12345"); assert_eq!(lines[2], "Jorge\tMajor\t98765"); } Err(x) => panic!("{:?}", x), }; } } #[test] fn test_2() { let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let file = manifest_dir.join(PathBuf::from("tests/Test 2.xlsx")); if let Ok(tmp_dir) = TempDir::new("test") { let outdir = &tmp_dir.path().display().to_string(); let conf = Config { files: vec![file.display().to_string()], outdir: outdir.to_string(), delimiter: 44, normalize: true, make_dirs: true, }; let _res = match run(conf) { Ok(_) => { let expected_dir = PathBuf::from(tmp_dir.path().join("test_2")); assert!(expected_dir.is_dir()); let expected_file = expected_dir.join("test_2__sheet1.csv"); assert!(expected_file.is_file()); let contents = fs::read_to_string(expected_file).ok().unwrap(); let lines: Vec<&str> = contents.split("\n").collect(); assert_eq!(lines[0], "ice_cream_flavor,peoples_rank"); assert_eq!(lines[1], "chocolate,1"); assert_eq!(lines[2], "vanilla,2"); assert_eq!(lines[3], "stravberry,3"); } Err(x) => panic!("{:?}", x), }; } } }
extern crate clap; extern crate csv; extern crate regex; extern crate tempdir; use calamine::{open_workbook, Reader, Xlsx}; use clap::{App, Arg}; use csv::WriterBuilder; use regex::Regex; use std::error::Error; use std::fs::{self, DirBuilder}; use std::path::{Path, PathBuf}; type MyResult<T> = Result<T, Box<dyn Error>>; #[derive(Debug)] pub struct Config { files: Vec<String>, outdir: String, delimiter: u8, normalize: bool, make_dirs: bool, } pub fn get_args() -> MyResult<Config> { let matches = App::new("excel2txt") .version("0.1.0") .author("Ken Youens-Clark <[email protected]>") .about("Export Excel workbooks into delimited text files") .arg( Arg::with_name("file") .short("f") .long("file") .value_name("FILE") .help("File input") .required(true) .min_values(1), ) .arg( Arg::with_name("outdir") .short("o") .long("outdir") .value_name("DIR") .default_value("out") .help("Output directory"), ) .arg( Arg::with_name("delimiter") .short("d") .long("delimiter") .value_name("DELIM") .default_value("\t") .help("Delimiter for output files"), ) .arg( Arg::with_name("normalize") .short("n") .long("normalize") .help("Normalize headers"), ) .arg( Arg::with_name("make_dirs") .short("m") .long("mkdirs") .help("Make output directory for each input file"), ) .get_matches(); let files = matches.values_of_lossy("file").unwrap(); let bad: Vec<String> = files.iter().cloned().filter(|f| !is_file(f)).collect(); if !bad.is_empty() { let msg = format!( "Invalid file{}: {}", if bad.len() == 1 { "" } else { "s" }, bad.join(", ") ); return Err(From::from(msg)); } Ok(Config { files: files, outdir: matches.value_of("outdir").unwrap().to_string(), delimiter: *matches .value_of("delimiter") .unwrap() .as_bytes() .first() .unwrap(), normalize: matches.is_present("normalize"), make_dirs: matches.is_present("make_dirs"), }) } pub fn run(config: Config) -> MyResult<()> { for (fnum, file) in config.files.into_iter().enumerate() { let path = Path::new(&file); let basename = path.file_stem().expect("basename"); let stem = normalize(&basename.to_string_lossy().to_string()); println!("{}: {}", fnum + 1, basename.to_string_lossy()); let mut out_dir = PathBuf::from(&config.outdir); if config.make_dirs { out_dir.push(&stem) } if !out_dir.is_dir() { DirBuilder::new().recursive(true).create(&out_dir)?; } let mut excel: Xlsx<_> = open_workbook(file)?; let sheets = excel.sheet_names().to_owned(); for sheet in sheets { let ext = if config.delimiter == 44 { "csv" } else { "txt" }; let out_file = format!("{}__{}.{}", &stem, normalize(&sheet), ext); let out_path = &out_dir.join(out_file); let mut wtr = WriterBuilder::new() .delimiter(config.delimiter) .from_path(out_path)?; println!("\tSheet '{}' -> '{}'", sheet, out_path.display()); if let Some(Ok(r)) = excel.worksheet_range(&sheet) { for (rnum, row) in r.rows().enumerate() { let vals = row .into_iter() .map(|f| format!("{}", f)) .map(|f| if rnum == 0 { normalize(&f) } else { f }) .collect::<Vec<String>>(); wtr.write_record(&vals)?; } } wtr.flush()?; } } Ok(()) } fn normalize(val: &String) -> String { let mut new = val.to_string(); let camel = Regex::new(r"(.*)([a-z])([A-Z].*)").unwrap(); loop { if let Some(cap) = camel.captures(&new) { new = format!("{}{}_{}", &cap[1], &cap[2], &cap[3]); } else { break; } } let spaces = Regex::new(r"[\s]+").unwrap(); let non_alphanum = Regex::new(r"[^a-z0-9_]").unwrap(); let mult_underbar = Regex::new(r"[_]+").unwrap(); new = new.to_ascii_lowercase(); new = spaces.replace_all(&new.to_string(), "_").to_string(); new = non_alphanum.replace_all(&new.to_string(), "").to_string(); mult_underbar.replace_all(&new.to_string(), "_").to_string() } fn is_file(path: &String) -> bool { if let Ok(meta) = fs::metadata(path) { return meta.is_file(); } else { return false; } } #[cfg(test)] mod tests { use super::*; use std::path::PathBuf; use tempdir::TempDir; #[test] fn test_normalize() { assert_eq!(normalize(&"".to_string()), ""); assert_eq!(normalize(&"ABC".to_string()), "abc"); assert_eq!(normalize(&"ABC DEF".to_string()), "abc_def"); assert_eq!(normalize(&"foo-b*!a,r".to_string()), "foobar"); assert_eq!(normalize(&"Foo Bar".to_string()), "foo_bar"); assert_eq!(normalize(&"Foo / Bar".to_string()), "foo_bar"); assert_eq!(normalize(&"Foo (Bar)".to_string()), "foo_bar"); assert_eq!(normalize(&"FooBarBAZ".to_string()), "foo_bar_baz"); } #[test] fn test_1() { let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let file = manifest_dir.join(PathBuf::from("tests/test1.xlsx")); if let Ok(tmp_dir) = TempDir::new("test") { let outdir = &tmp_dir.path().display().
#[test] fn test_2() { let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); let file = manifest_dir.join(PathBuf::from("tests/Test 2.xlsx")); if let Ok(tmp_dir) = TempDir::new("test") { let outdir = &tmp_dir.path().display().to_string(); let conf = Config { files: vec![file.display().to_string()], outdir: outdir.to_string(), delimiter: 44, normalize: true, make_dirs: true, }; let _res = match run(conf) { Ok(_) => { let expected_dir = PathBuf::from(tmp_dir.path().join("test_2")); assert!(expected_dir.is_dir()); let expected_file = expected_dir.join("test_2__sheet1.csv"); assert!(expected_file.is_file()); let contents = fs::read_to_string(expected_file).ok().unwrap(); let lines: Vec<&str> = contents.split("\n").collect(); assert_eq!(lines[0], "ice_cream_flavor,peoples_rank"); assert_eq!(lines[1], "chocolate,1"); assert_eq!(lines[2], "vanilla,2"); assert_eq!(lines[3], "stravberry,3"); } Err(x) => panic!("{:?}", x), }; } } }
to_string(); let conf = Config { files: vec![file.display().to_string()], outdir: outdir.to_string(), delimiter: 9, normalize: false, make_dirs: false, }; let _res = match run(conf) { Ok(_) => { let expected_dir = PathBuf::from(outdir); assert!(expected_dir.is_dir()); let expected_file = expected_dir.join("test1__sheet1.txt"); assert!(expected_file.is_file()); let contents = fs::read_to_string(expected_file).ok().unwrap(); let lines: Vec<&str> = contents.split("\n").collect(); assert_eq!(lines[0], "name\trank\tserial_number"); assert_eq!(lines[1], "Ed\tCaptain\t12345"); assert_eq!(lines[2], "Jorge\tMajor\t98765"); } Err(x) => panic!("{:?}", x), }; } }
function_block-function_prefixed
[ { "content": "fn main() {\n\n let config = match excel2txt::get_args() {\n\n Ok(c) => c,\n\n Err(e) => {\n\n println!(\"Error: {}\", e);\n\n process::exit(1);\n\n }\n\n };\n\n\n\n if let Err(e) = excel2txt::run(config) {\n\n println!(\"Error: {}\", e);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 5, "score": 19465.868117170667 }, { "content": "# excel2txt-rust\n\n\n\nRust implementation of excel2txt\n\n\n\n```\n\nexcel2txt 0.1.0\n\nKen Youens-Clark <[email protected]>\n\nExport Excel workbooks into delimited text files\n\n\n\nUSAGE:\n\n excel2txt [FLAGS] [OPTIONS] --file <FILE>\n\n\n\nFLAGS:\n\n -h, --help Prints help information\n\n -m, --mkdirs Make output directory for each input file\n\n -n, --normalize Normalize headers\n\n -V, --version Prints version information\n\n\n\nOPTIONS:\n\n -d, --delimiter <DELIM> Delimiter for output files [default: \t]\n\n -f, --file <FILE> File input\n\n -o, --outdir <DIR> Output directory [default: out]\n\n```\n\n\n\n* The program accepts one or more Excel workbooks as the `-f|--file` argument.\n\n* Each worksheet will be exported to a text file delimited by the `-d|--delimiter` which defaults to the tab character.\n\n* All files will be written to the `-o|--outdir` directory which defaults to \"out\".\n\n* The `-m|--mkdirs` option will indicate that each workbook/Excel file will be placed into a separate directory in the output directory.\n\n* The `-n|--normalize` flag will alter the headers of each output file to lowercase values and remove non-alphanumeric characters or the underscore. This will also break \"CamelCase\" values into \"snake_case.\" (This same normalization will be used to create the output file names so as to avoid any possibility of creating output files with illegal or difficult characters.)\n\n* The `-V|--version` will cause the program to print the current version and exit.\n\n\n\n## Author\n\n\n\nKen Youens-Clark <[email protected]>\n\n\n\nNote: this is a Rust port of [excel2txt](https://github.com/kyclark/excel2txt-py) that I wrote in Python.\n\n\n\nSpecial thanks to Huo Linhe's [xlsx2csv](https://github.com/zitsen/xlsx2csv.rs).\n", "file_path": "README.md", "rank": 10, "score": 10.987322251398433 }, { "content": "extern crate excel2txt;\n\nuse std::process;\n\n\n", "file_path": "src/main.rs", "rank": 15, "score": 4.8268536927245185 } ]
Rust
src/mapper.rs
whitfin/efflux
fe756d76cedb962abcd1934285b9d58e6b00d48c
use crate::context::{Context, Offset}; use crate::io::Lifecycle; pub trait Mapper { fn setup(&mut self, _ctx: &mut Context) {} fn map(&mut self, key: usize, value: &[u8], ctx: &mut Context) { ctx.write(key.to_string().as_bytes(), value); } fn cleanup(&mut self, _ctx: &mut Context) {} } impl<M> Mapper for M where M: FnMut(usize, &[u8], &mut Context), { #[inline] fn map(&mut self, key: usize, value: &[u8], ctx: &mut Context) { self(key, value, ctx) } } pub(crate) struct MapperLifecycle<M> where M: Mapper, { mapper: M, } impl<M> MapperLifecycle<M> where M: Mapper, { pub(crate) fn new(mapper: M) -> Self { Self { mapper } } } impl<M> Lifecycle for MapperLifecycle<M> where M: Mapper, { #[inline] fn on_start(&mut self, ctx: &mut Context) { ctx.insert(Offset::new()); self.mapper.setup(ctx); } #[inline] fn on_entry(&mut self, input: &[u8], ctx: &mut Context) { let offset = { ctx.get_mut::<Offset>().unwrap().shift(input.len() + 2) }; self.mapper.map(offset, input, ctx); } #[inline] fn on_end(&mut self, ctx: &mut Context) { self.mapper.cleanup(ctx); } } #[cfg(test)] mod tests { use super::*; use crate::context::Contextual; use crate::io::Lifecycle; #[test] fn test_mapper_lifecycle() { let mut ctx = Context::new(); let mut mapper = MapperLifecycle::new(TestMapper); mapper.on_start(&mut ctx); { let mut vet = |input: &[u8], expected: usize| { mapper.on_entry(input, &mut ctx); let pair = ctx.get::<TestPair>(); assert!(pair.is_some()); let pair = pair.unwrap(); assert_eq!(pair.0, expected); assert_eq!(pair.1, input); }; vet(b"first_input_line", 18); vet(b"second_input_line", 37); vet(b"third_input_line", 55); } mapper.on_end(&mut ctx); } struct TestPair(usize, Vec<u8>); impl Contextual for TestPair {} struct TestMapper; impl Mapper for TestMapper { fn map(&mut self, key: usize, val: &[u8], ctx: &mut Context) { ctx.insert(TestPair(key, val.to_vec())); } } }
use crate::context::{Context, Offset}; use crate::io::Lifecycle; pub trait Mapper { fn setup(&mut self, _ctx: &mut Context) {} fn map(&mut self, key: usize, value: &[u8], ctx: &mut Context) { ctx.write(key.to_string().as_bytes(), value); } fn cleanup(&mut self, _ctx: &mut Context) {} } impl<M> Mapper for M where M: FnMut(usize, &[u8], &mut Context), { #[inline] fn map(&mut self, key: usize, value: &[u8], ctx: &mut Context) { self(key, value, ctx) } } pub(crate) struct MapperLifecycle<M> where M: Mapper, { mapper: M, } impl<M> MapperLifecycle<M> where M: Mapper, { pub(crate) fn new(mapper: M) -> Self { Self { mapper } } } impl<M> Lifecycle for MapperLifecycle<M> where M: Mapper, { #[inline] fn on_start(&mut self, ctx: &mut Context) { ctx.insert(Offset::new()); self.mapper.setup(ctx); } #[inline] fn on_entry(&mut self, input: &[u8], ctx: &mut Context) { let offset = { ctx.get_mut::<Offset>().unwrap().shift(input.len() + 2) }; self.mapper.map(offset, input, ctx); } #[inline] fn on_end(&mut self, ctx: &mut Context) { self.mapper.cleanup(ctx); } } #[cfg(test)] mod tests { use super::*; use crate::context::Contextual; use crate::io::Lifecycle; #[test] fn test_mapper_lifecycle() { let mut ctx = Context::new(); let mut mapper = MapperLifecycle::new(TestMapper); mapper.on_start(&mut ctx); { let mut vet = |input: &[u8], expected: usize| { mapper.on_entry(input, &mut ctx); let pair = ctx.get::<TestPair>(); assert!(pair.is_some());
}; vet(b"first_input_line", 18); vet(b"second_input_line", 37); vet(b"third_input_line", 55); } mapper.on_end(&mut ctx); } struct TestPair(usize, Vec<u8>); impl Contextual for TestPair {} struct TestMapper; impl Mapper for TestMapper { fn map(&mut self, key: usize, val: &[u8], ctx: &mut Context) { ctx.insert(TestPair(key, val.to_vec())); } } }
let pair = pair.unwrap(); assert_eq!(pair.0, expected); assert_eq!(pair.1, input);
random
[ { "content": "/// Marker trait to represent types which can be added to a `Context`.\n\npub trait Contextual: Any {}\n\n\n\n// all internal contextual types\n\nimpl Contextual for Configuration {}\n\nimpl Contextual for Delimiters {}\n\nimpl Contextual for Offset {}\n\n\n\n/// Context structure to represent a Hadoop job context.\n\n///\n\n/// This acts as an arbitrarily-typed bag, allowing for easy storage\n\n/// of random types between iterations of the stage. See the module\n\n/// documentation for further details and examples.\n\n#[derive(Debug, Default)]\n\npub struct Context {\n\n data: HashMap<TypeId, Box<dyn Any>>,\n\n}\n\n\n\nimpl Context {\n\n /// Creates a new `Context`.\n\n pub fn new() -> Self {\n", "file_path": "src/context/mod.rs", "rank": 1, "score": 95887.31140559321 }, { "content": "/// Executes an IO `Lifecycle` against `io::stdin`.\n\npub fn run_lifecycle<L>(mut lifecycle: L)\n\nwhere\n\n L: Lifecycle,\n\n{\n\n // lock stdin for perf\n\n let stdin = io::stdin();\n\n let stdin_lock = stdin.lock();\n\n\n\n // create a job context\n\n let mut ctx = Context::new();\n\n\n\n // fire the startup hooks\n\n lifecycle.on_start(&mut ctx);\n\n\n\n // create a line reader used to avoid vec allocations\n\n let mut lines = BufReader::new(stdin_lock).byte_lines();\n\n\n\n // read all inputs from stdin, and fire the entry hooks\n\n while let Some(Ok(input)) = lines.next() {\n\n lifecycle.on_entry(input, &mut ctx);\n\n }\n\n\n\n // fire the finalization hooks\n\n lifecycle.on_end(&mut ctx);\n\n}\n", "file_path": "src/io.rs", "rank": 2, "score": 93951.88232842984 }, { "content": "/// Lifecycle trait to allow hooking into IO streams.\n\n///\n\n/// This will be implemented by all stages of MapReduce (e.g. to\n\n/// appropriately handle buffering for the reduction stage). All\n\n/// trait methods default to noop, as they're all optional.\n\npub trait Lifecycle {\n\n /// Startup hook for the IO stream.\n\n fn on_start(&mut self, _ctx: &mut Context) {}\n\n\n\n /// Entry hook for the IO stream to handle input values.\n\n fn on_entry(&mut self, _input: &[u8], _ctx: &mut Context) {}\n\n\n\n /// Finalization hook for the IO stream.\n\n fn on_end(&mut self, _ctx: &mut Context) {}\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 3, "score": 85009.48785756492 }, { "content": "#[inline]\n\npub fn run_mapper<M>(mapper: M)\n\nwhere\n\n M: Mapper + 'static,\n\n{\n\n run_lifecycle(MapperLifecycle::new(mapper));\n\n}\n\n\n\n/// Executes a `Reducer` against the current `stdin`.\n", "file_path": "src/lib.rs", "rank": 4, "score": 80650.20574236786 }, { "content": "/// Trait to represent the reduction stage of MapReduce.\n\n///\n\n/// All trait methods have sane defaults to match the Hadoop MapReduce\n\n/// implementation, allowing the developer to pick and choose what they\n\n/// customize without having to write a large amount of boilerplate.\n\npub trait Reducer {\n\n /// Setup handler for the current `Reducer`.\n\n fn setup(&mut self, _ctx: &mut Context) {}\n\n\n\n /// Reduction handler for the current `Reducer`.\n\n ///\n\n /// The default implementation of this handler will emit each value against\n\n /// the key in the order they were received. This is typically the stage of\n\n /// interest for many MapReduce developers.\n\n fn reduce(&mut self, key: &[u8], values: &[&[u8]], ctx: &mut Context) {\n\n for value in values {\n\n ctx.write(key, value);\n\n }\n\n }\n\n\n\n /// Cleanup handler for the current `Reducer`.\n\n fn cleanup(&mut self, _ctx: &mut Context) {}\n\n}\n\n\n\n/// Enables raw functions to act as `Reducer` types.\n", "file_path": "src/reducer.rs", "rank": 5, "score": 64025.521749697145 }, { "content": "/// Simple struct to represent a word counter mapper.\n\n///\n\n/// Contains several internal patterns to use when processing\n\n/// the input text, to avoid re-compilation of Regex.\n\nstruct WordcountMapper {\n\n multi_spaces: Regex,\n\n punc_matcher: Regex,\n\n}\n\n\n\nimpl WordcountMapper {\n\n /// Creates a new `WordcountMapper` with pre-compiled `Regex`.\n\n pub fn new() -> Self {\n\n Self {\n\n // detects multiple spaces in a row\n\n multi_spaces: Regex::new(r\"\\s{2,}\").unwrap(),\n\n // detects punctuation followed by a space, or trailing\n\n punc_matcher: Regex::new(r\"[[:punct:]](\\s|$)\").unwrap(),\n\n }\n\n }\n\n}\n\n\n\n// Mapping stage implementation.\n\nimpl Mapper for WordcountMapper {\n\n /// Mapping implementation for the word counter example.\n", "file_path": "examples/wordcount/src/mapper.rs", "rank": 6, "score": 58136.23675789271 }, { "content": "fn main() {\n\n // simply run the mapping phase with our mapper\n\n efflux::run_mapper(WordcountMapper::new());\n\n}\n\n\n", "file_path": "examples/wordcount/src/mapper.rs", "rank": 7, "score": 49731.577778970954 }, { "content": "#[inline]\n\npub fn run_reducer<R>(reducer: R)\n\nwhere\n\n R: Reducer + 'static,\n\n{\n\n run_lifecycle(ReducerLifecycle::new(reducer));\n\n}\n\n\n\n// prelude module\n\npub mod prelude {\n\n //! A \"prelude\" for crates using the `efflux` crate.\n\n //!\n\n //! This prelude contains the required imports for almost all use cases, to\n\n //! avoid having to include modules and structures directly:\n\n //!\n\n //! ```rust\n\n //! use efflux::prelude::*;\n\n //! ```\n\n //!\n\n //! The prelude may grow over time, but it is unlikely to shrink.\n\n pub use super::context::{Configuration, Context, Contextual};\n\n pub use super::log;\n\n pub use super::mapper::Mapper;\n\n pub use super::reducer::Reducer;\n\n}\n", "file_path": "src/lib.rs", "rank": 8, "score": 48276.86888342231 }, { "content": "fn main() {\n\n // execute the mapping phase\n\n efflux::run_mapper({{project_struct}});\n\n}\n\n\n\n/// The struct which will implement the `Mapper` trait.\n\nstruct {{project_struct}};\n\n\n\n/// An empty implementation of the `Mapper` trait.\n\nimpl Mapper for {{project_struct}} {\n\n fn setup(&mut self, _ctx: &mut Context) {\n\n // Carry out any setup required in this block.\n\n }\n\n\n\n fn map(&mut self, _key: usize, _value: &[u8], _ctx: &mut Context) {\n\n // Carry out the main mapping tasks inside this block.\n\n }\n\n\n\n fn cleanup(&mut self, _ctx: &mut Context) {\n\n // Carry out any cleanup required in this block.\n\n }\n\n}\n", "file_path": "examples/template/{{project_name}}/src/mapper.rs", "rank": 9, "score": 45895.59704121083 }, { "content": " pub fn shift(&mut self, shift: usize) -> usize {\n\n self.0 += shift;\n\n self.0\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_offset_shifting() {\n\n let mut offset = Offset::new();\n\n\n\n let one = offset.shift(1);\n\n let two = offset.shift(1);\n\n let ten = offset.shift(8);\n\n\n\n assert_eq!(one, 1);\n\n assert_eq!(two, 2);\n\n assert_eq!(ten, 10);\n\n }\n\n}\n", "file_path": "src/context/offset.rs", "rank": 10, "score": 42054.60456315797 }, { "content": "//! Offset bindings to provide byte offsets for `Mapper` stages.\n\n\n\n/// Offset structure to allow tracking of the current byte.\n\n///\n\n/// This offers little more than abstraction over the byte\n\n/// offset being tracked manually; however the strong typing\n\n/// allows `Offset` to be added to a `Context`.\n\n#[derive(Debug, Default)]\n\npub struct Offset(usize);\n\n\n\nimpl Offset {\n\n /// Creates a new `Offset` from index `0`.\n\n pub fn new() -> Offset {\n\n Offset(0)\n\n }\n\n\n\n /// Shifts the inner offset by the provided shift value.Reducer\n\n ///\n\n /// The newly shifted offset is then returned, for convenience.\n\n #[inline]\n", "file_path": "src/context/offset.rs", "rank": 11, "score": 42049.61422148813 }, { "content": " {\n\n let types = TypeId::of::<T>();\n\n self.data.insert(types, Box::new(t));\n\n }\n\n\n\n /// Takes a `Contextual` type from the context.\n\n pub fn take<T>(&mut self) -> Option<T>\n\n where\n\n T: Contextual,\n\n {\n\n let types = TypeId::of::<T>();\n\n self.data\n\n .remove(&types)\n\n .and_then(|b| b.downcast::<T>().ok())\n\n .map(|t| *t)\n\n }\n\n\n\n /// Writes a key/value pair to the stage output.\n\n #[inline]\n\n pub fn write(&mut self, key: &[u8], val: &[u8]) {\n", "file_path": "src/context/mod.rs", "rank": 12, "score": 41291.2361858825 }, { "content": " where\n\n K: Display,\n\n V: Display,\n\n {\n\n self.write(key.to_string().as_bytes(), val.to_string().as_bytes());\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_context_creation() {\n\n let ctx = Context::new();\n\n\n\n assert!(ctx.get::<Configuration>().is_some());\n\n assert!(ctx.get::<Delimiters>().is_some());\n\n }\n\n\n", "file_path": "src/context/mod.rs", "rank": 13, "score": 41291.0671717592 }, { "content": " mref.unwrap().0 = 1;\n\n }\n\n\n\n let iref = ctx.get::<TestStruct>();\n\n\n\n assert!(iref.is_some());\n\n assert_eq!(iref.unwrap().0, 1);\n\n }\n\n\n\n #[test]\n\n fn test_taking_values() {\n\n let mut ctx = Context::new();\n\n let val = TestStruct(0);\n\n\n\n ctx.insert(val);\n\n\n\n let take = ctx.take::<TestStruct>();\n\n assert!(take.is_some());\n\n\n\n let take = ctx.take::<TestStruct>();\n\n assert!(take.is_none());\n\n }\n\n\n\n struct TestStruct(usize);\n\n impl Contextual for TestStruct {}\n\n}\n", "file_path": "src/context/mod.rs", "rank": 14, "score": 41290.91531471225 }, { "content": " #[test]\n\n fn test_context_insertion() {\n\n let mut ctx = Context::new();\n\n let val = TestStruct(0);\n\n\n\n ctx.insert(val);\n\n\n\n assert!(ctx.get::<TestStruct>().is_some());\n\n }\n\n\n\n #[test]\n\n fn test_mutable_references() {\n\n let mut ctx = Context::new();\n\n let val = TestStruct(0);\n\n\n\n ctx.insert(val);\n\n\n\n {\n\n let mref = ctx.get_mut::<TestStruct>();\n\n assert!(mref.is_some());\n", "file_path": "src/context/mod.rs", "rank": 15, "score": 41290.151754320854 }, { "content": "//! ```\n\n//!\n\n//! There are several types which will exist on a `Context` at various times\n\n//! throughout execution due to internal use. Whilst these can be read by the\n\n//! developer, they should rarely ever be modified as things may break. The\n\n//! current set of `Contextual` types added are as follows:\n\n//!\n\n//! - `Configuration`\n\n//! - `Delimiters`\n\n//! - `Offset`\n\n//!\n\n//! The most interesting of these types is the `Configuration` type, as it\n\n//! represents the job configuration provided by Hadoop.\n\nuse std::any::{Any, TypeId};\n\nuse std::collections::HashMap;\n\nuse std::fmt::Display;\n\nuse std::io::{self, Write};\n\n\n\nmod conf;\n\nmod delim;\n\nmod offset;\n\n\n\npub use self::conf::Configuration;\n\npub use self::delim::Delimiters;\n\npub use self::offset::Offset;\n\n\n\n/// Marker trait to represent types which can be added to a `Context`.\n", "file_path": "src/context/mod.rs", "rank": 16, "score": 41288.440773422655 }, { "content": " // grab a reference to the context output delimiters\n\n let out = self.get::<Delimiters>().unwrap().output();\n\n\n\n // lock the stdout buffer\n\n let stdout = io::stdout();\n\n let mut lock = stdout.lock();\n\n\n\n // write the pair and newline\n\n lock.write_all(key).unwrap();\n\n lock.write_all(out).unwrap();\n\n lock.write_all(val).unwrap();\n\n lock.write_all(b\"\\n\").unwrap();\n\n }\n\n\n\n /// Writes a key/value formatted pair to the stage output.\n\n ///\n\n /// This is a simple sugar API around `write` which allows callers to\n\n /// provide a type which implements `Display` to serialize automatically.\n\n #[inline]\n\n pub fn write_fmt<K, V>(&mut self, key: K, val: V)\n", "file_path": "src/context/mod.rs", "rank": 17, "score": 41286.82182607436 }, { "content": "//! inner: usize\n\n//! }\n\n//!\n\n//! // only `Contextual` structs can be store\n\n//! impl Contextual for MyState {}\n\n//!\n\n//! // create a new context\n\n//! let mut ctx = Context::new();\n\n//!\n\n//! // create the state\n\n//! let state = MyState { inner: 3 };\n\n//!\n\n//! // store in context\n\n//! ctx.insert(state);\n\n//!\n\n//! // get a reference back out, as an option\n\n//! let state_ref = ctx.get::<MyState>().expect(\"state not found\");\n\n//!\n\n//! // check it's the same state\n\n//! assert_eq!(state_ref.inner, 3)\n", "file_path": "src/context/mod.rs", "rank": 18, "score": 41285.580251368825 }, { "content": "//! Hadoop job context representations and bindings.\n\n//!\n\n//! This module exposes an arbitrarily typed map to be used as a job context\n\n//! for all Hadoop stages. It can be used to lookup different types and store\n\n//! state across executions of a task (although note that it's local to each\n\n//! mapper/reduce process). Authors of `Mapper` and `Reducer` implementations\n\n//! shouldn't need to store state here as they have mutable access to their\n\n//! struct values.\n\n//!\n\n//! Values can be references as `mut` when required, as there should be only\n\n//! as single thread owning a `Context` at any given time. An example of\n\n//! inserting a value and retrieving it is as follows:\n\n//!\n\n//! ```rust\n\n//! # extern crate efflux;\n\n//! use efflux::prelude::*;\n\n//!\n\n//! // custom state\n\n//! #[derive(Eq, PartialEq)]\n\n//! struct MyState {\n", "file_path": "src/context/mod.rs", "rank": 19, "score": 41285.33016493312 }, { "content": " // new base container\n\n let mut ctx = Self {\n\n data: HashMap::new(),\n\n };\n\n\n\n // construct default types\n\n let conf = Configuration::new();\n\n let delim = Delimiters::new(&conf);\n\n\n\n // add both\n\n ctx.insert(conf);\n\n ctx.insert(delim);\n\n\n\n ctx\n\n }\n\n\n\n /// Retrieves a potential reference to a `Contextual` type.\n\n pub fn get<T: Contextual>(&self) -> Option<&T>\n\n where\n\n T: Contextual,\n", "file_path": "src/context/mod.rs", "rank": 20, "score": 41283.62772545588 }, { "content": " {\n\n let types = TypeId::of::<T>();\n\n self.data.get(&types).and_then(|b| b.downcast_ref::<T>())\n\n }\n\n\n\n /// Retrieves a potential mutable reference to a `Contextual` type.\n\n pub fn get_mut<T>(&mut self) -> Option<&mut T>\n\n where\n\n T: Contextual,\n\n {\n\n let types = TypeId::of::<T>();\n\n self.data\n\n .get_mut(&types)\n\n .and_then(|b| b.downcast_mut::<T>())\n\n }\n\n\n\n /// Inserts a `Contextual` type into the context.\n\n pub fn insert<T>(&mut self, t: T)\n\n where\n\n T: Contextual,\n", "file_path": "src/context/mod.rs", "rank": 21, "score": 41282.503341120035 }, { "content": "/// Simple struct to represent a word counter reducer.\n\nstruct WordcountReducer;\n\n\n\n// Reducing stage implementation.\n\nimpl Reducer for WordcountReducer {\n\n /// Reduction implementation for the word counter example.\n\n fn reduce(&mut self, key: &[u8], values: &[&[u8]], ctx: &mut Context) {\n\n // base counter\n\n let mut count = 0;\n\n\n\n for value in values {\n\n // parse each value sum them all to obtain total appearances\n\n count += std::str::from_utf8(value).unwrap().parse::<usize>().unwrap();\n\n }\n\n\n\n // write the word and the total count as bytes\n\n ctx.write(key, count.to_string().as_bytes());\n\n }\n\n}\n", "file_path": "examples/wordcount/src/reducer.rs", "rank": 22, "score": 33354.416759435335 }, { "content": "fn main() {\n\n // simply run the reduction phase with our reducer\n\n efflux::run_reducer(WordcountReducer);\n\n}\n\n\n", "file_path": "examples/wordcount/src/reducer.rs", "rank": 23, "score": 30840.667322497226 }, { "content": "fn main() {\n\n // execute the reduction phase\n\n efflux::run_reducer({{ project_struct }});\n\n}\n\n\n\n/// The struct which will implement the `Reducer` trait.\n\nstruct {{ project_struct }};\n\n\n\n/// An empty implementation of the `Reducer` trait.\n\nimpl Reducer for {{ project_struct }} {\n\n fn setup(&mut self, _ctx: &mut Context) {\n\n // Carry out any setup required in this block.\n\n }\n\n\n\n fn reduce(&mut self, _key: &[u8], _values: &[&[u8]], _ctx: &mut Context) {\n\n // Carry out the main combination tasks inside this block.\n\n }\n\n\n\n fn cleanup(&mut self, _ctx: &mut Context) {\n\n // Carry out any cleanup required in this block.\n\n }\n\n}\n", "file_path": "examples/template/{{project_name}}/src/combiner.rs", "rank": 24, "score": 28898.657426433936 }, { "content": "fn main() {\n\n // execute the reduction phase\n\n efflux::run_reducer({{ project_struct }});\n\n}\n\n\n\n/// The struct which will implement the `Reducer` trait.\n\nstruct {{ project_struct }};\n\n\n\n/// An empty implementation of the `Reducer` trait.\n\nimpl Reducer for {{ project_struct }} {\n\n fn setup(&mut self, _ctx: &mut Context) {\n\n // Carry out any setup required in this block.\n\n }\n\n\n\n fn reduce(&mut self, _key: &[u8], _values: &[&[u8]], _ctx: &mut Context) {\n\n // Carry out the main reducer tasks inside this block.\n\n }\n\n\n\n fn cleanup(&mut self, _ctx: &mut Context) {\n\n // Carry out any cleanup required in this block.\n\n }\n\n}\n", "file_path": "examples/template/{{project_name}}/src/reducer.rs", "rank": 25, "score": 28898.657426433936 }, { "content": " ///\n\n /// The input value is split into words using the internal patterns,\n\n /// and each word is then written to the context.\n\n fn map(&mut self, _key: usize, value: &[u8], ctx: &mut Context) {\n\n // skip empty\n\n if value.is_empty() {\n\n return;\n\n }\n\n\n\n // parse into a string using the input bytes\n\n let value = std::str::from_utf8(value).unwrap();\n\n\n\n // trim whitespaces\n\n let value = &value.trim();\n\n\n\n // remove all punctuation breaks (e.g. \". \")\n\n let value = self.punc_matcher.replace_all(&value, \"$1\");\n\n\n\n // compress all sequential spaces into a single space\n\n let value = self.multi_spaces.replace_all(&value, \" \");\n\n\n\n // split on spaces to find words\n\n for word in value.split(\" \") {\n\n // write each word\n\n ctx.write_fmt(word, 1);\n\n }\n\n }\n\n}\n", "file_path": "examples/wordcount/src/mapper.rs", "rank": 32, "score": 20021.4177659739 }, { "content": "//! Mapper binary for the MapReduce word counter example.\n\nextern crate efflux;\n\nextern crate regex;\n\n\n\nuse efflux::prelude::{Context, Mapper};\n\nuse regex::Regex;\n\n\n", "file_path": "examples/wordcount/src/mapper.rs", "rank": 33, "score": 20012.72839289453 }, { "content": " /// Constructs a new `Configuration` using a custom input.\n\n pub fn with_env<I, T>(pairs: I) -> Self\n\n where\n\n T: Into<String>,\n\n I: Iterator<Item = (T, T)>,\n\n {\n\n // create container\n\n let mut conf = Self {\n\n inner: HashMap::new(),\n\n };\n\n\n\n // iterate all pairs\n\n for (key, val) in pairs {\n\n let key = key.into();\n\n let val = val.into();\n\n\n\n // hadoop never has uppercased values\n\n if key.chars().any(|c| c.is_uppercase()) {\n\n continue;\n\n }\n", "file_path": "src/context/conf.rs", "rank": 34, "score": 19755.24183360054 }, { "content": " pub fn output(&self) -> &[u8] {\n\n &self.output\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_map_delimiters_creation() {\n\n let env = vec![\n\n (\"mapreduce.task.ismap\", \"true\"),\n\n (\"stream.map.input.field.separator\", \":\"),\n\n (\"stream.map.output.field.separator\", \"|\"),\n\n ];\n\n\n\n let conf = Configuration::with_env(env.into_iter());\n\n let delim = Delimiters::new(&conf);\n\n\n", "file_path": "src/context/delim.rs", "rank": 35, "score": 19752.435417449564 }, { "content": "//! Delimiter bindings to provide byte offsets for all stages.\n\nuse super::conf::Configuration;\n\n\n\n/// Delimiters struct to store the input/output separators\n\n/// for all stages of a MapReduce lifecycle. Once created,\n\n/// this structure should be considered immutable.\n\n#[derive(Debug)]\n\npub struct Delimiters {\n\n input: Vec<u8>,\n\n output: Vec<u8>,\n\n}\n\n\n\nimpl Delimiters {\n\n /// Creates a new `Delimiters` from a job `Configuration`.\n\n pub fn new(conf: &Configuration) -> Self {\n\n // check to see if this is map/reduce stage\n\n let stage = match conf.get(\"mapreduce.task.ismap\") {\n\n Some(val) if val == \"true\" => \"map\",\n\n _ => \"reduce\",\n\n };\n", "file_path": "src/context/delim.rs", "rank": 36, "score": 19752.25917224898 }, { "content": "\n\n // fetch the input/output separators for the current stage\n\n let input_key = format!(\"stream.{}.input.field.separator\", stage);\n\n let output_key = format!(\"stream.{}.output.field.separator\", stage);\n\n\n\n Self {\n\n // separators are optional, so default to a tab\n\n input: conf.get(&input_key).unwrap_or(\"\\t\").as_bytes().to_vec(),\n\n output: conf.get(&output_key).unwrap_or(\"\\t\").as_bytes().to_vec(),\n\n }\n\n }\n\n\n\n /// Returns a reference to the input delimiter.\n\n #[inline]\n\n pub fn input(&self) -> &[u8] {\n\n &self.input\n\n }\n\n\n\n /// Returns a reference to the output delimiter.\n\n #[inline]\n", "file_path": "src/context/delim.rs", "rank": 37, "score": 19751.740720689995 }, { "content": "\n\n /// Inserts a key/value pair into the `Configuration`.\n\n pub fn insert<T>(&mut self, key: T, val: T)\n\n where\n\n T: Into<String>,\n\n {\n\n // convert to String\n\n let mut key_str = key.into();\n\n\n\n // hadoop compatibility\n\n if key_str.contains('.') {\n\n key_str = key_str.replace(\".\", \"_\");\n\n }\n\n\n\n // insert into the internal mapping\n\n self.inner.insert(key_str, val.into());\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/context/conf.rs", "rank": 38, "score": 19751.582760887857 }, { "content": "\n\n // insert the key/value pair\n\n conf.insert(key, val);\n\n }\n\n\n\n conf\n\n }\n\n\n\n /// Retrieves a potential `Configuration` value.\n\n pub fn get(&self, key: &str) -> Option<&str> {\n\n // shimming for hadoop\n\n let opt = if key.contains('.') {\n\n self.inner.get(&key.replace(\".\", \"_\"))\n\n } else {\n\n self.inner.get(key)\n\n };\n\n\n\n // better than &String\n\n opt.map(|s| s.as_ref())\n\n }\n", "file_path": "src/context/conf.rs", "rank": 39, "score": 19748.666209925406 }, { "content": "//! Module to provide representation of the Hadoop `Configuration` class.\n\nuse std::collections::HashMap;\n\nuse std::env;\n\n\n\n/// Configuration struct to represent a Hadoop configuration.\n\n///\n\n/// Internally this is simply a `String` -> `String` map, as\n\n/// we don't have enough information to parse with. The struct\n\n/// implementation exists as a compatibility layer.\n\n#[derive(Debug, Default)]\n\npub struct Configuration {\n\n inner: HashMap<String, String>,\n\n}\n\n\n\nimpl Configuration {\n\n /// Constructs a new `Configuration` using Hadoop's input.\n\n pub fn new() -> Self {\n\n Self::with_env(env::vars())\n\n }\n\n\n", "file_path": "src/context/conf.rs", "rank": 40, "score": 19747.686366499063 }, { "content": "mod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_config_creation() {\n\n let env = vec![\n\n (\"FAKE_VAR\", \"1\"),\n\n (\"mapred.job.id\", \"123\"),\n\n (\"mapred_job_id\", \"123\"),\n\n ];\n\n\n\n let conf = Configuration::with_env(env.into_iter());\n\n\n\n assert_eq!(conf.inner.get(\"FAKE_VAR\"), None);\n\n assert_eq!(conf.inner.get(\"mapred.job.id\"), None);\n\n assert_eq!(conf.inner.get(\"mapred_job_id\"), Some(&\"123\".to_owned()));\n\n }\n\n\n\n #[test]\n\n fn test_retrieval_shimming() {\n", "file_path": "src/context/conf.rs", "rank": 41, "score": 19746.881427370932 }, { "content": " assert_eq!(delim.input(), b\":\");\n\n assert_eq!(delim.output(), b\"|\");\n\n }\n\n\n\n #[test]\n\n fn test_reduce_delimiters_creation() {\n\n let env = vec![\n\n (\"mapreduce.task.ismap\", \"false\"),\n\n (\"stream.reduce.input.field.separator\", \":\"),\n\n (\"stream.reduce.output.field.separator\", \"|\"),\n\n ];\n\n\n\n let conf = Configuration::with_env(env.into_iter());\n\n let delim = Delimiters::new(&conf);\n\n\n\n assert_eq!(delim.input(), b\":\");\n\n assert_eq!(delim.output(), b\"|\");\n\n }\n\n\n\n #[test]\n", "file_path": "src/context/delim.rs", "rank": 42, "score": 19744.759280278256 }, { "content": " fn test_delimiter_defaults() {\n\n let env = Vec::<(String, String)>::new();\n\n\n\n let conf = Configuration::with_env(env.into_iter());\n\n let delim = Delimiters::new(&conf);\n\n\n\n assert_eq!(delim.input(), b\"\\t\");\n\n assert_eq!(delim.output(), b\"\\t\");\n\n }\n\n}\n", "file_path": "src/context/delim.rs", "rank": 43, "score": 19743.60425196274 }, { "content": " let env = vec![(\"mapred.job.id\", \"123\"), (\"mapred_job_id\", \"123\")];\n\n let conf = Configuration::with_env(env.into_iter());\n\n\n\n assert_eq!(conf.get(\"mapred.job.id\"), Some(\"123\"));\n\n assert_eq!(conf.get(\"mapred_job_id\"), Some(\"123\"));\n\n }\n\n\n\n #[test]\n\n fn test_insertion_shimming() {\n\n let env = Vec::<(String, String)>::new();\n\n let mut conf = Configuration::with_env(env.into_iter());\n\n\n\n conf.insert(\"mapred.job.id\", \"123\");\n\n\n\n assert_eq!(conf.get(\"mapred_job_id\"), Some(\"123\"));\n\n }\n\n}\n", "file_path": "src/context/conf.rs", "rank": 44, "score": 19743.084382643945 }, { "content": "//! `Mapper` implementation for the {{project_name}} project.\n\nuse efflux::prelude::*;\n\n\n\n{% set project_clean = project_name | slugify | title | split(pat=\"-\") | join(sep=\"\") -%}\n\n{% set project_struct = project_clean ~ \"Mapper\" -%}\n\n\n", "file_path": "examples/template/{{project_name}}/src/mapper.rs", "rank": 45, "score": 17900.50365277736 }, { "content": "# example Hadoop task invocation\n\n$ hadoop jar hadoop-streaming-2.8.2.jar \\\n\n -input <INPUT> \\\n\n -output <OUTPUT> \\\n\n -mapper <MAPPER> \\\n\n -reducer <REDUCER>\n\n\n\n# example simulation run via UNIX utilities\n\n$ cat <INPUT> | <MAPPER> | sort -k1,1 | <REDUCER> > <OUTPUT>\n\n```\n\n\n\nThis can be tested using the [wordcount](examples/wordcount) example to confirm that the outputs are indeed the same. There may be some cases where output differs, but it should be sufficient for many cases.\n", "file_path": "README.md", "rank": 46, "score": 12626.77206918892 }, { "content": "# Efflux\n\n[![Crates.io](https://img.shields.io/crates/v/efflux.svg)](https://crates.io/crates/efflux) [![Build Status](https://img.shields.io/github/workflow/status/whitfin/efflux/CI)](https://github.com/whitfin/efflux/actions)\n\n\n\nEfflux is a set of Rust interfaces for MapReduce and Hadoop Streaming. It enables Rust developers to run batch jobs on Hadoop infrastructure whilst staying with the efficiency and safety they're used to.\n\n\n\nInitially written to scratch a personal itch, this crate offers simple traits to mask the internals of working with Hadoop Streaming which lend themselves well to writing jobs quickly. Functionality is handed off to macros where possible to provide compile time guarantees, and any other functionality is kept simple to avoid overhead wherever possible.\n\n\n\n## Installation\n\n\n\nEfflux is available on [crates.io](https://crates.io/crates/efflux) as a library crate, so you only need to add it as a dependency:\n\n\n\n```toml\n\n[dependencies]\n\nefflux = \"2.0\"\n\n```\n\n\n\nYou can then gain access to everything relevant using the `prelude` module of Efflux:\n\n\n\n```rust\n\nuse efflux::prelude::*;\n\n```\n\n\n\n## Usage\n\n\n\nEfflux comes with a handy template to help generate new projects, using the [kickstart](https://github.com/Keats/kickstart) tool. You can simply use the commands below and follow the prompt to generate a new project skeleton:\n\n\n\n```shell\n\n# install kickstart\n\n$ cargo install kickstart\n\n\n\n# create a project from the template\n\n$ kickstart -s examples/template https://github.com/whitfin/efflux\n\n```\n\n\n\nIf you'd rather not use the templating tool, you can always work from the examples found in this repository. A good place to start is the traditional [wordcount](examples/wordcount) example.\n\n\n\n## Testing\n\n\n\nTesting your binaries is actually fairly simple, as you can simulate the Hadoop phases using a basic UNIX pipeline. The following example replicates the Hadoop job flow and generates output that matches a job executed with Hadoop itself:\n\n\n\n```shell\n", "file_path": "README.md", "rank": 47, "score": 12622.262841351836 }, { "content": "# Word Counter\n\n\n\nThis example is based on the traditional MapReduce word counter example. Given an input file, this job will count the number of times each word appears. Sample input and outputs are provided in the `data` directory.\n\n\n\nYou can run this example with or without Hadoop, using the following shell instructions:\n\n\n\n```shell\n\n# build the binaries\n\n$ cargo build --release\n\n\n\n# run with Hadoop Streaming\n\n$ hadoop jar hadoop-streaming-2.8.2.jar \\\n\n -input ./data/input.txt \\\n\n -output ./output \\\n\n -mapper ./target/release/wordcount_mapper \\\n\n -reducer ./target/release/wordcount_reducer\n\n\n\n# run with Unix command shimming\n\n$ cat ./data/input.txt | \\\n\n ./target/release/wordcount_mapper | \\\n\n sort -k1,1 | \\\n\n ./target/release/wordcount_reducer \\\n\n > ./output/output.txt\n\n```\n", "file_path": "examples/wordcount/README.md", "rank": 48, "score": 11687.870427430442 }, { "content": "# {{project_name}}\n\n\n\nThis project was generated using the Kickstart template.\n\n\n\nYou can run this project with or without Hadoop, using the following shell instructions:\n\n\n\n```shell\n\n# build the binaries\n\n$ cargo build --release\n\n\n\n# run with Hadoop Streaming\n\n$ hadoop jar hadoop-streaming-2.8.2.jar \\\n\n -input <INPUT> \\\n\n -output <OUTPUT> \\\n\n -mapper ./target/release/{{project_name}}_mapper \\\n\n -reducer ./target/release/{{project_name}}_reducer\n\n\n\n# run with Unix command shimming\n\n$ cat <INPUT> | \\\n\n ./target/release/{{project_name}}_mapper | \\\n\n sort -k1,1 | \\\n\n ./target/release/{{project_name}}_reducer \\\n\n > <OUTPUT>\n\n```\n", "file_path": "examples/template/{{project_name}}/README.md", "rank": 49, "score": 10881.550607241026 }, { "content": " /// Finalizes the lifecycle by emitting any leftover pairs.\n\n #[inline]\n\n fn on_end(&mut self, ctx: &mut Context) {\n\n // construct a references list to avoid exposing vecs\n\n let mut values = Vec::with_capacity(self.values.len());\n\n for value in &self.values {\n\n values.push(value.as_slice());\n\n }\n\n\n\n // reduce the last batche of values\n\n self.reducer.reduce(&self.key, &values, ctx);\n\n self.reducer.cleanup(ctx);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::context::Contextual;\n\n use crate::io::Lifecycle;\n", "file_path": "src/reducer.rs", "rank": 50, "score": 23.323999405317586 }, { "content": "impl<R> Reducer for R\n\nwhere\n\n R: FnMut(&[u8], &[&[u8]], &mut Context),\n\n{\n\n /// Reduction handler by passing through the values to the inner closure.\n\n #[inline]\n\n fn reduce(&mut self, key: &[u8], value: &[&[u8]], ctx: &mut Context) {\n\n self(key, value, ctx)\n\n }\n\n}\n\n\n\n/// Lifecycle structure to represent a reduction.\n\npub(crate) struct ReducerLifecycle<R>\n\nwhere\n\n R: Reducer,\n\n{\n\n on: bool,\n\n key: Vec<u8>,\n\n values: Vec<Vec<u8>>,\n\n reducer: R,\n", "file_path": "src/reducer.rs", "rank": 51, "score": 21.516807801865795 }, { "content": "\n\n impl Contextual for TestPair {}\n\n\n\n impl Reducer for TestReducer {\n\n fn reduce(&mut self, key: &[u8], values: &[&[u8]], ctx: &mut Context) {\n\n let mut stored = Vec::new();\n\n for value in values {\n\n stored.push(value.to_vec());\n\n }\n\n ctx.insert(TestPair(key.to_vec(), stored));\n\n }\n\n }\n\n}\n", "file_path": "src/reducer.rs", "rank": 52, "score": 19.634940973927797 }, { "content": " let mut ctx = Context::new();\n\n let mut reducer = ReducerLifecycle::new(TestReducer);\n\n\n\n reducer.on_start(&mut ctx);\n\n reducer.on_entry(b\"key\", &mut ctx);\n\n reducer.on_entry(b\"key\\t\", &mut ctx);\n\n reducer.on_end(&mut ctx);\n\n\n\n let pair = ctx.get::<TestPair>();\n\n\n\n assert!(pair.is_some());\n\n\n\n let pair = pair.unwrap();\n\n\n\n assert_eq!(pair.0, b\"key\");\n\n assert_eq!(pair.1, vec![b\"\", b\"\"]);\n\n }\n\n\n\n struct TestPair(Vec<u8>, Vec<Vec<u8>>);\n\n struct TestReducer;\n", "file_path": "src/reducer.rs", "rank": 53, "score": 18.81337321848069 }, { "content": "where\n\n R: Reducer,\n\n{\n\n /// Creates all required state for the lifecycle.\n\n #[inline]\n\n fn on_start(&mut self, ctx: &mut Context) {\n\n self.reducer.setup(ctx);\n\n }\n\n\n\n /// Processes each entry by buffering sequential key entries into the\n\n /// internal group. Once the key changes the prior group is passed off\n\n /// into the actual `Reducer` trait, and the group is reset.\n\n fn on_entry(&mut self, input: &[u8], ctx: &mut Context) {\n\n let (key, value) = {\n\n // grab the delimiters from the context\n\n let delim = ctx.get::<Delimiters>().unwrap();\n\n\n\n // search (quickly) for the input byte delimiter\n\n match twoway::find_bytes(&input, delim.input()) {\n\n Some(n) if n < input.len() => {\n", "file_path": "src/reducer.rs", "rank": 54, "score": 18.70705527444254 }, { "content": "//! Efflux is a set of Rust interfaces for MapReduce and Hadoop Streaming.\n\n//!\n\n//! This crate provides easy interfaces for working with MapReduce, whether\n\n//! or not you're running on the Hadoop platform. Usage is as simple as a\n\n//! struct which implements either the `Mapper` or `Reducer` trait, as all\n\n//! other interaction is taken care of internally.\n\n//!\n\n//! Macros are provided for IO, to provide a compile-time guarantee of things\n\n//! such as counter/status updates, or writing to the Hadoop task logs.\n\n#![doc(html_root_url = \"https://docs.rs/efflux/2.0.1\")]\n\n#[macro_use]\n\npub mod macros;\n\npub mod context;\n\npub mod io;\n\npub mod mapper;\n\npub mod reducer;\n\n\n\nuse self::mapper::Mapper;\n\nuse self::reducer::Reducer;\n\n\n\nuse self::mapper::MapperLifecycle;\n\nuse self::reducer::ReducerLifecycle;\n\n\n\nuse self::io::run_lifecycle;\n\n\n\n/// Executes a `Mapper` against the current `stdin`.\n\n#[inline]\n", "file_path": "src/lib.rs", "rank": 55, "score": 15.54127243411957 }, { "content": "\n\n #[test]\n\n fn test_reducer_lifecycle() {\n\n let mut ctx = Context::new();\n\n let mut reducer = ReducerLifecycle::new(TestReducer);\n\n\n\n reducer.on_start(&mut ctx);\n\n\n\n {\n\n reducer.on_entry(b\"first\\tone\", &mut ctx);\n\n reducer.on_entry(b\"first\\ttwo\", &mut ctx);\n\n reducer.on_entry(b\"first\\tthree\", &mut ctx);\n\n reducer.on_entry(b\"second\\tone\", &mut ctx);\n\n reducer.on_entry(b\"second\\ttwo\", &mut ctx);\n\n reducer.on_entry(b\"second\\tthree\", &mut ctx);\n\n\n\n let pair = ctx.get::<TestPair>();\n\n\n\n assert!(pair.is_some());\n\n\n", "file_path": "src/reducer.rs", "rank": 56, "score": 14.312606415500685 }, { "content": " let pair = pair.unwrap();\n\n\n\n assert_eq!(pair.0, b\"first\");\n\n assert_eq!(pair.1, vec![&b\"one\"[..], b\"two\", b\"three\"]);\n\n }\n\n\n\n reducer.on_end(&mut ctx);\n\n\n\n let pair = ctx.get::<TestPair>();\n\n\n\n assert!(pair.is_some());\n\n\n\n let pair = pair.unwrap();\n\n\n\n assert_eq!(pair.0, b\"second\");\n\n assert_eq!(pair.1, vec![&b\"one\"[..], b\"two\", b\"three\"]);\n\n }\n\n\n\n #[test]\n\n fn test_reducer_empty_values() {\n", "file_path": "src/reducer.rs", "rank": 57, "score": 11.308954779986404 }, { "content": "}\n\n\n\n/// Basic creation for `ReducerLifecycle`\n\nimpl<R> ReducerLifecycle<R>\n\nwhere\n\n R: Reducer,\n\n{\n\n /// Constructs a new `ReducerLifecycle` instance.\n\n pub(crate) fn new(reducer: R) -> Self {\n\n Self {\n\n reducer,\n\n on: false,\n\n key: Vec::new(),\n\n values: Vec::new(),\n\n }\n\n }\n\n}\n\n\n\n/// `Lifecycle` implementation for the reduction stage.\n\nimpl<R> Lifecycle for ReducerLifecycle<R>\n", "file_path": "src/reducer.rs", "rank": 58, "score": 10.340579466448263 }, { "content": "//! IO binding module for the `efflux` crate.\n\n//!\n\n//! Provides lifecycles for Hadoop Streaming IO, to allow the rest\n\n//! of this crate to be a little more ignorant of how inputs flow.\n\nuse bytelines::*;\n\nuse std::io::{self, BufReader};\n\n\n\nuse crate::context::Context;\n\n\n\n/// Lifecycle trait to allow hooking into IO streams.\n\n///\n\n/// This will be implemented by all stages of MapReduce (e.g. to\n\n/// appropriately handle buffering for the reduction stage). All\n\n/// trait methods default to noop, as they're all optional.\n", "file_path": "src/io.rs", "rank": 59, "score": 10.19852646516248 }, { "content": " }\n\n\n\n // construct a references list to avoid exposing vecs\n\n let mut values = Vec::with_capacity(self.values.len());\n\n for value in &self.values {\n\n values.push(value.as_slice());\n\n }\n\n\n\n // reduce the key and value group\n\n self.reducer.reduce(&self.key, &values, ctx);\n\n\n\n // reset the key\n\n self.key.clear();\n\n self.key.extend(key);\n\n\n\n // drain the internal buffer\n\n self.values.clear();\n\n self.values.push(value.to_vec());\n\n }\n\n\n", "file_path": "src/reducer.rs", "rank": 60, "score": 9.53514999515771 }, { "content": " // split the input at the given index when applicable\n\n (&input[..n], &input[n + delim.input().len()..])\n\n }\n\n\n\n // otherwise the input is the key\n\n _ => (&input[..], &b\"\"[..]),\n\n }\n\n };\n\n\n\n // first key\n\n if !self.on {\n\n self.on = true;\n\n self.key.clear();\n\n self.key.extend(key);\n\n }\n\n\n\n // append to buffer\n\n if self.key == key {\n\n self.values.push(value.to_vec());\n\n return;\n", "file_path": "src/reducer.rs", "rank": 61, "score": 9.035612989448289 }, { "content": "//! Exposed structures based on the reduction stage.\n\n//!\n\n//! This module offers the `Reducer` trait, which allows a developer\n\n//! to easily create a reduction stage due to the sane defaults. Also\n\n//! offered is the `ReducerLifecycle` binding for use as an IO stage.\n\nuse crate::context::{Context, Delimiters};\n\nuse crate::io::Lifecycle;\n\n\n\n/// Trait to represent the reduction stage of MapReduce.\n\n///\n\n/// All trait methods have sane defaults to match the Hadoop MapReduce\n\n/// implementation, allowing the developer to pick and choose what they\n\n/// customize without having to write a large amount of boilerplate.\n", "file_path": "src/reducer.rs", "rank": 62, "score": 8.549745937116898 }, { "content": "//! Reducer binary for the MapReduce word counter example.\n\nextern crate efflux;\n\n\n\nuse efflux::prelude::{Context, Reducer};\n\n\n", "file_path": "examples/wordcount/src/reducer.rs", "rank": 63, "score": 4.24965399766498 }, { "content": "//! `Reducer` implementation for the {{project_name}} project.\n\nuse efflux::prelude::*;\n\n\n\n{% set project_clean = project_name | slugify | title | split(pat=\"-\") | join(sep=\"\") -%}\n\n{% set project_struct = project_clean ~ \"Reducer\" -%}\n\n\n", "file_path": "examples/template/{{project_name}}/src/reducer.rs", "rank": 64, "score": 3.9169210365140716 }, { "content": "//! Combining `Reducer` implementation for the {{project_name}} project.\n\nuse efflux::prelude::*;\n\n\n\n{% set project_clean = project_name | slugify | title | split(pat=\"-\") | join(sep=\"\") -%}\n\n{% set project_struct = project_clean ~ \"Combiner\" -%}\n\n\n", "file_path": "examples/template/{{project_name}}/src/combiner.rs", "rank": 65, "score": 3.864568176158298 }, { "content": "//! Compile time utilities to ease Hadoop usage.\n\n\n\n/// Prints output to the Hadoop task logs.\n\n///\n\n/// As `::std::io::stdout` is used to Hadoop Streaming writes, logging\n\n/// must go through this macro instead to successfully make it to the logs.\n\n#[macro_export]\n\nmacro_rules! log {\n\n () => (eprintln!());\n\n ($fmt:expr) => (eprintln!($fmt));\n\n ($fmt:expr, $($arg:tt)*) => (eprintln!($fmt, $($arg)*));\n\n}\n\n\n\n/// Updates a counter for the current job.\n\n///\n\n/// A counter belongs to a group by a label; as such both must be given\n\n/// to this macro in order to compile correctly. Note that neither the\n\n/// group nor label can contain a `\",\"`, as Hadoop uses this to split\n\n/// the IO stream.\n\n///\n", "file_path": "src/macros.rs", "rank": 66, "score": 1.7815359899041894 } ]
Rust
src/main.rs
erikbrinkman/stats
f709428cd7a8e734b67f32693678ed269b384fa7
use std::f64; use std::fs::File; use std::io::{self, BufRead, BufReader, BufWriter, Read, StdinLock, StdoutLock, Write}; use std::iter; use clap::{crate_version, App, Arg, ArgGroup, ArgMatches}; use inc_stats::{Mode, Percentiles, SummStats}; fn parse_command_line<'a>() -> ArgMatches<'a> { App::new("stats") .version(crate_version!()) .author("Erik Brinkman <[email protected]>") .about("Compute summary statistics of streams of numbers") .arg(Arg::with_name("count") .short("c") .long("count") .help("Print count")) .arg(Arg::with_name("min") .long("min") .help("Print min")) .arg(Arg::with_name("max") .long("max") .help("Print max")) .arg(Arg::with_name("mean") .short("m") .long("mean") .help("Print mean")) .arg(Arg::with_name("sum") .long("sum") .help("Print sum")) .arg(Arg::with_name("stddev") .short("s") .long("stddev") .help("Print sample standard deviation")) .arg(Arg::with_name("var") .short("v") .long("var") .help("Print sample variance")) .arg(Arg::with_name("stderr") .long("stderr") .help("Print standard error")) .arg(Arg::with_name("median") .long("median") .help("Print median (Note: computing median takes O(n) space")) .arg(Arg::with_name("percentiles") .short("p") .long("percentiles") .takes_value(true) .use_delimiter(true) .help("Print arbitrary percentiles. The argument should be a comma delimited list of floats in [0, 100] (Note: computing any percentile takes O(n) space)")) .arg(Arg::with_name("mode") .long("mode") .help("Print mode (Note: computing mode takes O(n) space")) .arg(Arg::with_name("mode-count") .long("mode-count") .help("Print the number of times the mode occured (Note: computing mode takes O(n) space")) .arg(Arg::with_name("distinct") .long("distinct") .help("Print the number of distinct values (Note: computing mode takes O(n) space")) .arg(Arg::with_name("distinct-nan") .long("distinct-nan") .help("Print the number of distinct values treading all NaNs as distinct (Note: computing mode takes O(n) space")) .group(ArgGroup::with_name("format").args(&["tsv", "json"])) .arg(Arg::with_name("tsv") .short("t") .long("tsv") .help("Output as tsv. This will force tsv output even if only one statistics is requested.")) .arg(Arg::with_name("json") .short("j") .long("json") .help("Output as compressed json")) .arg(Arg::with_name("input") .short("i") .long("input") .value_name("file") .default_value("-") .help("Take input from file")) .arg(Arg::with_name("output") .short("o") .long("output") .value_name("file") .default_value("-") .help("Write output to file")) .get_matches() } enum StatsReader<'a> { Stdin(StdinLock<'a>), File(BufReader<File>), } impl<'a> Read for StatsReader<'a> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { match self { StatsReader::Stdin(lock) => lock.read(buf), StatsReader::File(file) => file.read(buf), } } } impl<'a> BufRead for StatsReader<'a> { fn fill_buf(&mut self) -> io::Result<&[u8]> { match self { StatsReader::Stdin(lock) => lock.fill_buf(), StatsReader::File(file) => file.fill_buf(), } } fn consume(&mut self, amt: usize) { match self { StatsReader::Stdin(lock) => lock.consume(amt), StatsReader::File(file) => file.consume(amt), } } } enum StatsWriter<'a> { Stdout(StdoutLock<'a>), File(BufWriter<File>), } impl<'a> Write for StatsWriter<'a> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { match self { StatsWriter::Stdout(lock) => lock.write(buf), StatsWriter::File(file) => file.write(buf), } } fn flush(&mut self) -> io::Result<()> { match self { StatsWriter::Stdout(lock) => lock.flush(), StatsWriter::File(file) => file.flush(), } } } fn main() { let matches = parse_command_line(); let stdin = io::stdin(); let input = match matches.value_of("input") { Some("-") => StatsReader::Stdin(stdin.lock()), Some(file_name) => StatsReader::File(BufReader::new( File::open(file_name) .unwrap_or_else(|_| panic!("File \"{}\" does not exist", file_name)), )), None => unreachable!(), }; let stdout = io::stdout(); let mut output = match matches.value_of("output") { Some("-") => StatsWriter::Stdout(stdout.lock()), Some(file_name) => { StatsWriter::File(BufWriter::new(File::open(file_name).unwrap_or_else(|_| { panic!("Couldn't open file \"{}\" for writing", file_name) }))) } None => unreachable!(), }; let mut stats = SummStats::new(); let mut percs = Percentiles::new(); let mut mode = Mode::new(); let add_mode = ["mode", "mode-count", "distinct", "distinct-nan"] .iter() .any(|s| matches.is_present(s)); let add_percs = ["percentiles", "median"] .iter() .any(|s| matches.is_present(s)); let add_stats = [ "count", "min", "max", "mean", "sum", "stddev", "var", "stderr", ] .iter() .any(|s| matches.is_present(s)) || !(add_mode && add_percs); for line in input.lines() { for token in line .expect("Couldn't read from file") .split(char::is_whitespace) .filter(|s| !s.is_empty()) { let num: f64 = token .parse() .unwrap_or_else(|_| panic!("Could not parse \"{}\" as float", token)); if add_mode { mode.add(num); } if add_percs { percs.add(num); } if add_stats { stats.add(num); } } } let results = compute_results(&matches, &stats, &mut percs, &mode); if matches.is_present("tsv") { write_tsv(&results, &mut output); } else if matches.is_present("json") { write_json(&results, &mut output); } else if results.len() == 1 { let (_, val) = results[0]; writeln!(output, "{}", val).expect("couldn't write to output"); } else { write_tsv(&results, &mut output); } } fn compute_results( matches: &ArgMatches, stats: &SummStats<f64>, percs: &mut Percentiles<f64>, mode: &Mode<f64>, ) -> Vec<(String, f64)> { let mut results = Vec::new(); if matches.is_present("count") { results.push((String::from("count"), stats.count() as f64)); } if matches.is_present("min") { results.push((String::from("min"), stats.min().unwrap_or(f64::NAN))); } if matches.is_present("max") { results.push((String::from("max"), stats.max().unwrap_or(f64::NAN))); } if matches.is_present("mean") { results.push((String::from("mean"), stats.mean().unwrap_or(f64::NAN))); } if matches.is_present("sum") { results.push((String::from("sum"), stats.sum())); } if matches.is_present("stddev") { results.push(( String::from("stddev"), stats.standard_deviation().unwrap_or(f64::NAN), )); } if matches.is_present("var") { results.push((String::from("var"), stats.variance().unwrap_or(f64::NAN))); } if matches.is_present("stderr") { results.push(( String::from("stderr"), stats.standard_error().unwrap_or(f64::NAN), )); } if matches.is_present("percentiles") { let percentiles: Vec<f64> = matches .values_of("percentiles") .unwrap() .map(|p| { p.parse::<f64>() .unwrap_or_else(|_| panic!("Could not parse \"{}\" as float", p)) }) .collect(); let vals = match percs.percentiles(percentiles.iter().map(|p| p / 100.0)) { Err(_) => panic!("percentiles must between 0 and 100: {:?}", percentiles), Ok(None) => iter::repeat(f64::NAN).take(percentiles.len()).collect(), Ok(Some(pvals)) => pvals, }; for (perc, val) in percentiles.iter().zip(vals) { results.push((format!("{}%", perc), val)); } } if matches.is_present("median") { results.push((String::from("median"), percs.median().unwrap_or(f64::NAN))); } if matches.is_present("mode") { results.push((String::from("mode"), mode.mode().unwrap_or(f64::NAN))); } if matches.is_present("mode-count") { results.push((String::from("mode #"), mode.mode_count() as f64)); } if matches.is_present("distinct") { results.push((String::from("distinct"), mode.count_distinct() as f64)); } if matches.is_present("distinct-nan") { results.push(( String::from("distinct (NaN)"), mode.count_distinct_nan() as f64, )); } if results.is_empty() { results.push((String::from("count"), stats.count() as f64)); results.push((String::from("min"), stats.min().unwrap_or(f64::NAN))); results.push((String::from("max"), stats.max().unwrap_or(f64::NAN))); results.push((String::from("mean"), stats.mean().unwrap_or(f64::NAN))); results.push(( String::from("stddev"), stats.standard_deviation().unwrap_or(f64::NAN), )); } results } fn write_tsv(results: &[(String, f64)], output: &mut impl Write) { for &(ref name, ref val) in results { writeln!(output, "{}\t{}", name, val).expect("couldn't write to output"); } } fn write_json(results: &[(String, f64)], output: &mut impl Write) { write!(output, "{{").expect("couldn't write to output"); let mut iter = results.iter(); let &(ref name, ref val) = iter.next().unwrap(); write!(output, "\"{}\":{}", name, val).expect("couldn't write to output"); for &(ref name, ref val) in iter { write!(output, ",\"{}\":{}", name, val).expect("couldn't write to output"); } writeln!(output, "}}").expect("couldn't write to output"); }
use std::f64; use std::fs::File; use std::io::{self, BufRead, BufReader, BufWriter, Read, StdinLock, StdoutLock, Write}; use std::iter; use clap::{crate_version, App, Arg, ArgGroup, ArgMatches}; use inc_stats::{Mode, Percentiles, SummStats}; fn parse_command_line<'a>() -> ArgMatches<'a> { App::new("stats") .version(crate_version!()) .author("Erik Brinkman <[email protected]>") .about("Compute summary statistics of streams of numbers") .arg(Arg::with_name("count") .short("c") .long("count") .help("Print count")) .arg(Arg::with_name("min") .long("min") .help("Print min")) .arg(Arg::with_name("max") .long("max") .help("Print max")) .arg(Arg::with_name("mean") .short("m") .long("mean") .help("Print mean")) .arg(Arg::with_name("sum") .long("sum") .help("Print sum")) .arg(Arg::with_name("stddev") .short("s") .long("stddev") .help("Print sample standard deviation")) .arg(Arg::with_name("var") .short("v") .long("var") .help("Print sample variance")) .arg(Arg::with_name("stderr") .long("stderr") .help("Print standard error")) .arg(Arg::with_name("median") .long("median") .help("Print median (Note: computing median takes O(n) space")) .arg(Arg::with_name("percentiles") .short("p") .long("percentiles") .takes_value(true) .use_delimiter(true) .help("Print arbitrary percentiles. The argument should be a comma delimited list of floats in [0, 100] (Note: computing any percentile takes O(n) space)")) .arg(Arg::with_name("mode") .long("mode") .help("Print mode (Note: computing mode takes O(n) space")) .arg(Arg::with_name("mode-coun
stats: &SummStats<f64>, percs: &mut Percentiles<f64>, mode: &Mode<f64>, ) -> Vec<(String, f64)> { let mut results = Vec::new(); if matches.is_present("count") { results.push((String::from("count"), stats.count() as f64)); } if matches.is_present("min") { results.push((String::from("min"), stats.min().unwrap_or(f64::NAN))); } if matches.is_present("max") { results.push((String::from("max"), stats.max().unwrap_or(f64::NAN))); } if matches.is_present("mean") { results.push((String::from("mean"), stats.mean().unwrap_or(f64::NAN))); } if matches.is_present("sum") { results.push((String::from("sum"), stats.sum())); } if matches.is_present("stddev") { results.push(( String::from("stddev"), stats.standard_deviation().unwrap_or(f64::NAN), )); } if matches.is_present("var") { results.push((String::from("var"), stats.variance().unwrap_or(f64::NAN))); } if matches.is_present("stderr") { results.push(( String::from("stderr"), stats.standard_error().unwrap_or(f64::NAN), )); } if matches.is_present("percentiles") { let percentiles: Vec<f64> = matches .values_of("percentiles") .unwrap() .map(|p| { p.parse::<f64>() .unwrap_or_else(|_| panic!("Could not parse \"{}\" as float", p)) }) .collect(); let vals = match percs.percentiles(percentiles.iter().map(|p| p / 100.0)) { Err(_) => panic!("percentiles must between 0 and 100: {:?}", percentiles), Ok(None) => iter::repeat(f64::NAN).take(percentiles.len()).collect(), Ok(Some(pvals)) => pvals, }; for (perc, val) in percentiles.iter().zip(vals) { results.push((format!("{}%", perc), val)); } } if matches.is_present("median") { results.push((String::from("median"), percs.median().unwrap_or(f64::NAN))); } if matches.is_present("mode") { results.push((String::from("mode"), mode.mode().unwrap_or(f64::NAN))); } if matches.is_present("mode-count") { results.push((String::from("mode #"), mode.mode_count() as f64)); } if matches.is_present("distinct") { results.push((String::from("distinct"), mode.count_distinct() as f64)); } if matches.is_present("distinct-nan") { results.push(( String::from("distinct (NaN)"), mode.count_distinct_nan() as f64, )); } if results.is_empty() { results.push((String::from("count"), stats.count() as f64)); results.push((String::from("min"), stats.min().unwrap_or(f64::NAN))); results.push((String::from("max"), stats.max().unwrap_or(f64::NAN))); results.push((String::from("mean"), stats.mean().unwrap_or(f64::NAN))); results.push(( String::from("stddev"), stats.standard_deviation().unwrap_or(f64::NAN), )); } results } fn write_tsv(results: &[(String, f64)], output: &mut impl Write) { for &(ref name, ref val) in results { writeln!(output, "{}\t{}", name, val).expect("couldn't write to output"); } } fn write_json(results: &[(String, f64)], output: &mut impl Write) { write!(output, "{{").expect("couldn't write to output"); let mut iter = results.iter(); let &(ref name, ref val) = iter.next().unwrap(); write!(output, "\"{}\":{}", name, val).expect("couldn't write to output"); for &(ref name, ref val) in iter { write!(output, ",\"{}\":{}", name, val).expect("couldn't write to output"); } writeln!(output, "}}").expect("couldn't write to output"); }
t") .long("mode-count") .help("Print the number of times the mode occured (Note: computing mode takes O(n) space")) .arg(Arg::with_name("distinct") .long("distinct") .help("Print the number of distinct values (Note: computing mode takes O(n) space")) .arg(Arg::with_name("distinct-nan") .long("distinct-nan") .help("Print the number of distinct values treading all NaNs as distinct (Note: computing mode takes O(n) space")) .group(ArgGroup::with_name("format").args(&["tsv", "json"])) .arg(Arg::with_name("tsv") .short("t") .long("tsv") .help("Output as tsv. This will force tsv output even if only one statistics is requested.")) .arg(Arg::with_name("json") .short("j") .long("json") .help("Output as compressed json")) .arg(Arg::with_name("input") .short("i") .long("input") .value_name("file") .default_value("-") .help("Take input from file")) .arg(Arg::with_name("output") .short("o") .long("output") .value_name("file") .default_value("-") .help("Write output to file")) .get_matches() } enum StatsReader<'a> { Stdin(StdinLock<'a>), File(BufReader<File>), } impl<'a> Read for StatsReader<'a> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { match self { StatsReader::Stdin(lock) => lock.read(buf), StatsReader::File(file) => file.read(buf), } } } impl<'a> BufRead for StatsReader<'a> { fn fill_buf(&mut self) -> io::Result<&[u8]> { match self { StatsReader::Stdin(lock) => lock.fill_buf(), StatsReader::File(file) => file.fill_buf(), } } fn consume(&mut self, amt: usize) { match self { StatsReader::Stdin(lock) => lock.consume(amt), StatsReader::File(file) => file.consume(amt), } } } enum StatsWriter<'a> { Stdout(StdoutLock<'a>), File(BufWriter<File>), } impl<'a> Write for StatsWriter<'a> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { match self { StatsWriter::Stdout(lock) => lock.write(buf), StatsWriter::File(file) => file.write(buf), } } fn flush(&mut self) -> io::Result<()> { match self { StatsWriter::Stdout(lock) => lock.flush(), StatsWriter::File(file) => file.flush(), } } } fn main() { let matches = parse_command_line(); let stdin = io::stdin(); let input = match matches.value_of("input") { Some("-") => StatsReader::Stdin(stdin.lock()), Some(file_name) => StatsReader::File(BufReader::new( File::open(file_name) .unwrap_or_else(|_| panic!("File \"{}\" does not exist", file_name)), )), None => unreachable!(), }; let stdout = io::stdout(); let mut output = match matches.value_of("output") { Some("-") => StatsWriter::Stdout(stdout.lock()), Some(file_name) => { StatsWriter::File(BufWriter::new(File::open(file_name).unwrap_or_else(|_| { panic!("Couldn't open file \"{}\" for writing", file_name) }))) } None => unreachable!(), }; let mut stats = SummStats::new(); let mut percs = Percentiles::new(); let mut mode = Mode::new(); let add_mode = ["mode", "mode-count", "distinct", "distinct-nan"] .iter() .any(|s| matches.is_present(s)); let add_percs = ["percentiles", "median"] .iter() .any(|s| matches.is_present(s)); let add_stats = [ "count", "min", "max", "mean", "sum", "stddev", "var", "stderr", ] .iter() .any(|s| matches.is_present(s)) || !(add_mode && add_percs); for line in input.lines() { for token in line .expect("Couldn't read from file") .split(char::is_whitespace) .filter(|s| !s.is_empty()) { let num: f64 = token .parse() .unwrap_or_else(|_| panic!("Could not parse \"{}\" as float", token)); if add_mode { mode.add(num); } if add_percs { percs.add(num); } if add_stats { stats.add(num); } } } let results = compute_results(&matches, &stats, &mut percs, &mode); if matches.is_present("tsv") { write_tsv(&results, &mut output); } else if matches.is_present("json") { write_json(&results, &mut output); } else if results.len() == 1 { let (_, val) = results[0]; writeln!(output, "{}", val).expect("couldn't write to output"); } else { write_tsv(&results, &mut output); } } fn compute_results( matches: &ArgMatches,
random
[ { "content": "/// Get the mean of a set of data\n\n///\n\n/// This method takes constant space and linear time.\n\n///\n\n/// # Examples:\n\n///\n\n/// ```\n\n/// let mean: f64 = inc_stats::mean(&[2.0, 4.0]).unwrap();\n\n/// assert!((3.0 - mean).abs() < 1.0e-6);\n\n/// ```\n\npub fn mean<T, V, I>(data: I) -> Option<T>\n\nwhere\n\n T: Float + FromPrimitive + AddAssign,\n\n V: DerefCopy<Output = T>,\n\n I: IntoIterator<Item = V>,\n\n{\n\n data.into_iter().collect::<SummStats<_>>().mean()\n\n}\n\n\n\n/// The mutable data structure that caches ordered percentiles\n", "file_path": "src/lib.rs", "rank": 4, "score": 42163.17223907391 }, { "content": "/// Get the median of a set of data\n\n///\n\n/// This takes linear time and linear space.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// let med = inc_stats::median(&[3.0, 1.0, 2.0]).unwrap();\n\n/// assert_eq!(2.0, med);\n\n/// ```\n\n///\n\n/// ```\n\n/// let med = inc_stats::median(std::iter::empty::<f64>());\n\n/// assert!(med.is_none());\n\n/// ```\n\npub fn median<T, V, I>(data: I) -> Option<T>\n\nwhere\n\n T: Float + FromPrimitive,\n\n V: DerefCopy<Output = T>,\n\n I: IntoIterator<Item = V>,\n\n{\n\n data.into_iter().collect::<Percentiles<T>>().median()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 42162.31968813242 }, { "content": "/// Get the mode of a set of data\n\n///\n\n/// If multiple modes exist, this returns the first element that reached the largest count.\n\n/// NaNs are ignored when computing the mode.\n\n///\n\n/// # Examples:\n\n///\n\n/// ```\n\n/// let mode = inc_stats::mode(&[2.0, 4.0, 2.0]);\n\n/// assert_eq!(Some(2.0), mode);\n\n/// ```\n\n///\n\n/// ```\n\n/// let mode: Option<f64> = inc_stats::mode(&[]);\n\n/// assert!(mode.is_none());\n\n/// ```\n\npub fn mode<T, V, I>(data: I) -> Option<T>\n\nwhere\n\n T: Float + ToBytes,\n\n V: DerefCopy<Output = T>,\n\n I: IntoIterator<Item = V>,\n\n{\n\n data.into_iter().collect::<Mode<T>>().mode()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn f32_mean_test() {\n\n let avg: f32 = mean(&[0.0, 1.0, 2.0]).unwrap();\n\n assert!((avg - 1.0).abs() < 1e-6);\n\n }\n\n\n\n #[test]\n", "file_path": "src/lib.rs", "rank": 6, "score": 42162.20408521133 }, { "content": "#[derive(Debug, PartialEq)]\n\nstruct HashFloat<T: Float + ToBytes>(T);\n\n\n\nimpl<T: Float + ToBytes> Eq for HashFloat<T> {}\n\n\n\nimpl<T: Float + ToBytes> Hash for HashFloat<T> {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.0.to_bytes().hash(state);\n\n }\n\n}\n\n\n\n/// Mode computation struct\n\n///\n\n/// This struct stores data to allow efficient computation of the mode. This struct takes linear\n\n/// space. It implements FromIterator to allow collection.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// let mut mode = inc_stats::Mode::new();\n\n/// for &num in &[2.0, 4.0, 8.0] {\n", "file_path": "src/lib.rs", "rank": 8, "score": 32449.668403369105 }, { "content": "/// weighted average between two values, weight given to high\n\npub fn weighted_average<T: Float + FromPrimitive>(low: T, high: T, weight: f64) -> Option<T> {\n\n Some(low * T::from_f64(1.0 - weight)? + high * T::from_f64(weight)?)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn inside_out_tests() {\n\n let best_case: Vec<_> = (0..7).collect();\n\n let best_actual: Vec<_> = inside_out(&best_case).unwrap().copied().collect();\n\n assert_eq!(best_actual, vec!(3, 1, 5, 0, 2, 4, 6));\n\n\n\n let worst_case: Vec<_> = (0..8).collect();\n\n let worst_actual: Vec<_> = inside_out(&worst_case).unwrap().copied().collect();\n\n assert_eq!(worst_actual, vec!(7, 3, 1, 5, 0, 2, 4, 6));\n\n\n\n let middle_case: Vec<_> = (0..5).collect();\n\n let middle_actual: Vec<_> = inside_out(&middle_case).unwrap().copied().collect();\n\n assert_eq!(middle_actual, vec!(3, 1, 0, 2, 4));\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 9, "score": 31287.21888173199 }, { "content": "/// Create an iterator that does BFS treating the slice as a maximal tree\n\n///\n\n/// Errors if the input is too long\n\npub fn inside_out<'a, T>(elems: &'a [T]) -> Result<InsideOut<'a, T>, StatsError> {\n\n let step = checked_step(elems.len())\n\n .ok_or(StatsError::from(\"elems to long to efficiently alternate\"))?;\n\n Ok(InsideOut {\n\n elems,\n\n current: 0,\n\n step,\n\n })\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 10, "score": 30147.072203717595 }, { "content": "#[derive(Debug)]\n\nstruct CachedOrdering<T: Float + FromPrimitive> {\n\n data: Vec<T>,\n\n in_order: BTreeSet<usize>,\n\n}\n\n\n\nimpl<T: Float + FromPrimitive> CachedOrdering<T> {\n\n /// Create a new Percentiles object with no data\n\n fn new() -> Self {\n\n CachedOrdering {\n\n // all of the points aded so far\n\n data: Vec::new(),\n\n // indices in data that are known to be in sorted order\n\n in_order: BTreeSet::new(),\n\n }\n\n }\n\n\n\n /// Add a data point\n\n fn add(&mut self, val: T) {\n\n self.data.push(val);\n\n self.in_order.clear();\n", "file_path": "src/lib.rs", "rank": 11, "score": 27355.418158528875 }, { "content": "/// Compute the step involved\n\nfn checked_step(len: usize) -> Option<usize> {\n\n len.checked_add(1)?\n\n .checked_next_power_of_two()?\n\n .checked_mul(2)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 12, "score": 24410.297590015824 }, { "content": "/// Summary statistics struct\n\n///\n\n/// This struct aggregates data to compute summary statistics using constant space overhead. It\n\n/// implements the FromIterator trait so it can be collected from an iterator of floats.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// let mut stats = inc_stats::SummStats::new();\n\n/// for &num in &[2.0, 4.0, 8.0] {\n\n/// stats.add(num);\n\n/// }\n\n/// assert_eq!(3, stats.count());\n\n/// ```\n\n///\n\n/// ```\n\n/// let stats: inc_stats::SummStats<f64> = [2.0, 4.0, 8.0].iter().collect();\n\n/// assert_eq!(3, stats.count());\n\n/// ```\n\n#[derive(Debug)]\n", "file_path": "src/lib.rs", "rank": 17, "score": 13.700522268029044 }, { "content": " _ => Some(self.mean),\n\n }\n\n }\n\n\n\n /// Get the sum\n\n ///\n\n /// Constant time.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let stats: inc_stats::SummStats<f64> = [2.0, 4.0].iter().collect();\n\n /// assert!((6.0 - stats.sum()).abs() < 1.0e-6);\n\n /// ```\n\n pub fn sum(&self) -> T {\n\n self.tcount() * self.mean\n\n }\n\n\n\n /// Get the sample standard deviation\n\n ///\n", "file_path": "src/lib.rs", "rank": 19, "score": 12.359080479150535 }, { "content": " self.ssd += (val - self.mean) * delta;\n\n if val < self.min {\n\n self.min = val;\n\n }\n\n if self.max < val {\n\n self.max = val;\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Get the number of values added\n\n pub fn count(&self) -> u64 {\n\n self.count\n\n }\n\n\n\n fn tcount(&self) -> T {\n\n // if we could add the last value, then we must have been able to convert this\n\n T::from_u64(self.count).unwrap()\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 20, "score": 12.03285303151387 }, { "content": " /// Add a number\n\n ///\n\n /// Check for conversion errors, will only happen when the internal count can't be converted\n\n /// into the float data type.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let mut stats = inc_stats::SummStats::new();\n\n /// stats.checked_add(0.0).unwrap();\n\n /// assert_eq!(1, stats.count());\n\n /// ```\n\n pub fn checked_add(&mut self, rval: impl DerefCopy<Output = T>) -> Result<(), StatsError> {\n\n // NOTE need to exit early before mutating state\n\n let count = T::from_u64(self.count + 1).ok_or(\"can't convert from count to float type\")?;\n\n let val = rval.deref_copy();\n\n self.non_nan |= !val.is_nan();\n\n self.count += 1;\n\n let delta = val - self.mean;\n\n self.mean += delta / count;\n", "file_path": "src/lib.rs", "rank": 21, "score": 11.672375374886615 }, { "content": "Stats CLI\n\n=========\n\n\n\n[![crates.io](https://img.shields.io/crates/v/stats-cli)](https://crates.io/crates/stats-cli)\n\n[![documentation](https://docs.rs/stats-cli/badge.svg)](https://docs.rs/stats-cli)\n\n[![tests](https://github.com/erikbrinkman/stats/actions/workflows/rust.yml/badge.svg)](https://github.com/erikbrinkman/stats/actions/workflows/rust.yml)\n\n![MIT License](https://img.shields.io/github/license/erikbrinkman/stats)\n\n\n\nRust package for efficiently computing statistics on floating point numbers from the command line.\n\nThis contains both a command line binary and a rust library with incremental statistics objects.\n\nCurrently only summary statistics (min, mean, variance), percentiles, and mode are supported.\n\n\n\nSee the rust documentation for the library api, or the cli help for usage.\n\n\n\n\n\nTo Do\n\n-----\n\n\n\n- [ ] Add sketches (many other crates do this)\n\n- [ ] Add testing to cli\n\n- [ ] Weigh tradeoffs behind `DerefCopy` trait vs `Borrow + Copy`\n", "file_path": "README.md", "rank": 22, "score": 11.264163836833085 }, { "content": "pub struct SummStats<T: Float + FromPrimitive + AddAssign> {\n\n non_nan: bool,\n\n count: u64,\n\n mean: T,\n\n ssd: T,\n\n min: T,\n\n max: T,\n\n}\n\n\n\nimpl<T: Float + FromPrimitive + AddAssign> SummStats<T> {\n\n /// Create a new SummStats struct with no data\n\n pub fn new() -> Self {\n\n SummStats {\n\n non_nan: false, // any value is not nan\n\n count: 0,\n\n mean: T::zero(),\n\n ssd: T::zero(),\n\n min: T::infinity(),\n\n max: T::neg_infinity(),\n\n }\n", "file_path": "src/lib.rs", "rank": 23, "score": 10.826381710103293 }, { "content": "\n\n /// Get the amount of data\n\n fn len(&self) -> usize {\n\n self.data.len()\n\n }\n\n}\n\n\n\n/// Data percentile struct\n\n///\n\n/// This struct stores data to allow efficient computation of percentiles. This struct takes linear\n\n/// space. It implements FromIterator to allow collection. This collection ignores NaNs.\n\n///\n\n/// The structure is designed for efficient computation of percentiles when data is added and then\n\n/// percentiles are computed. Adding data is constant time, querying percentiles is linear time,\n\n/// with some caching to make it faster for computing several percentiles. If you were going to\n\n/// query percentiles while adding data, then you probably want to use a different data structure.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n", "file_path": "src/lib.rs", "rank": 27, "score": 10.517102889817817 }, { "content": "\n\n /// Count the number of distinct values\n\n ///\n\n /// Distinctness for floating points is very finicy. Values that may print the same may not be\n\n /// same underlying value. Computations that yield the same value in \"real\" math may not yield\n\n /// the same value in floating point math.\n\n ///\n\n /// This ignores nans\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let num: inc_stats::Mode<_> = [1.0, 2.0, 2.0, std::f64::NAN].iter().collect();\n\n /// assert_eq!(2, num.count_distinct());\n\n /// ```\n\n pub fn count_distinct(&self) -> usize {\n\n self.counts.len()\n\n }\n\n\n\n /// Count the number of distinct values\n", "file_path": "src/lib.rs", "rank": 28, "score": 10.496448439528166 }, { "content": "//! Module with incremental statistics functions\n\n//!\n\n//! This contains helper functions for computing statistics on iterators, as well as structs that\n\n//! support incremental addition of data.\n\nmod bytes;\n\nmod copy;\n\nmod utils;\n\n\n\nuse bytes::ToBytes;\n\npub use copy::DerefCopy;\n\nuse num_traits::{Float, FromPrimitive};\n\nuse std::cell::RefCell;\n\nuse std::cmp::{self, Eq};\n\nuse std::collections::{BTreeSet, HashMap};\n\nuse std::f64;\n\nuse std::hash::{Hash, Hasher};\n\nuse std::iter::{self, FromIterator};\n\nuse std::ops::AddAssign;\n\npub use utils::StatsError;\n\n\n", "file_path": "src/lib.rs", "rank": 29, "score": 9.500587603387292 }, { "content": " pub fn mode_count_nan(&self) -> usize {\n\n cmp::max(self.mode_count, self.nan_count)\n\n }\n\n}\n\n\n\nimpl<T: Float + ToBytes, V: DerefCopy<Output = T>> FromIterator<V> for Mode<T> {\n\n fn from_iter<I>(iter: I) -> Self\n\n where\n\n I: IntoIterator<Item = V>,\n\n {\n\n let mut mode = Mode::new();\n\n for val in iter {\n\n mode.add(val);\n\n }\n\n mode\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 30, "score": 9.476484244104288 }, { "content": "\n\n /// Get a number of percentiles\n\n ///\n\n /// This takes linear time in the number of added data points, and log linear in the number of\n\n /// percentiles. This will be marginally more efficient than calling percentile repeatedly in a\n\n /// bad order.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// ```\n\n /// let percs: inc_stats::Percentiles<f64> = [1.0, 3.0, 7.0].iter().collect();\n\n /// let quarts = percs.percentiles(&[0.75, 0.25, 0.5]).unwrap().unwrap();\n\n /// assert!((5.0 - quarts[0]).abs() < 1.0e-6);\n\n /// assert!((2.0 - quarts[1]).abs() < 1.0e-6);\n\n /// assert!((3.0 - quarts[2]).abs() < 1.0e-6);\n\n /// ```\n\n // NOTE inside out does not guarantee worst case linear complexity. Asking for percentiles that\n\n // correspond to the 1st, 2nd, 3rd, index etc will still have `log p * n` complexity (versus `p\n\n // * n` for the native way). If we instead picked the percentiles closest to the midpoint of\n\n // the remaining space, the complexity would drop to `log p + n`, which is just n.\n", "file_path": "src/lib.rs", "rank": 31, "score": 9.318237601311305 }, { "content": " fn f32_median_test() {\n\n let avg: f32 = median(&[0.0, 1.0, 2.0, 3.0]).unwrap();\n\n assert!((avg - 1.5).abs() < 1e-6);\n\n }\n\n\n\n #[test]\n\n fn nan_percentile_test() {\n\n let percs: Percentiles<_> = [f64::NAN].iter().collect();\n\n // we know we put something in\n\n assert_eq!(1, percs.count());\n\n // but don't have enough data to get median\n\n assert_eq!(None, percs.median());\n\n }\n\n\n\n #[test]\n\n fn nan_mode_test() {\n\n let avg: Mode<_> = [f64::NAN].iter().collect();\n\n assert!(avg.mode_nan().unwrap().is_nan());\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 32, "score": 9.240013307817721 }, { "content": "/// mode.add(num);\n\n/// }\n\n/// assert_eq!(3, mode.count());\n\n/// ```\n\n///\n\n/// ```\n\n/// let mode: inc_stats::Mode<f64> = [2.0, 4.0, 8.0].iter().collect();\n\n/// assert_eq!(3, mode.count());\n\n/// ```\n\n#[derive(Debug)]\n\npub struct Mode<T: Float + ToBytes> {\n\n counts: HashMap<HashFloat<T>, usize>,\n\n count: usize,\n\n nan_count: usize,\n\n mode: Vec<T>,\n\n mode_count: usize,\n\n}\n\n\n\nimpl<T: Float + ToBytes> Mode<T> {\n\n /// Create a new Mode object with no data\n", "file_path": "src/lib.rs", "rank": 33, "score": 8.857630758422726 }, { "content": " /// # Examples\n\n ///\n\n /// ```\n\n /// let stats: inc_stats::SummStats<f64> = [2.0, 4.0].iter().collect();\n\n /// assert!((1.0 - stats.standard_error().unwrap()).abs() < 1.0e-6);\n\n /// ```\n\n pub fn standard_error(&self) -> Option<T> {\n\n self.standard_deviation().map(|d| d / self.tcount().sqrt())\n\n }\n\n}\n\n\n\nimpl<T: Float + FromPrimitive + AddAssign> Default for SummStats<T> {\n\n fn default() -> Self {\n\n SummStats::new()\n\n }\n\n}\n\n\n\nimpl<T: Float + FromPrimitive + AddAssign, V: DerefCopy<Output = T>> FromIterator<V>\n\n for SummStats<T>\n\n{\n", "file_path": "src/lib.rs", "rank": 34, "score": 8.630249742376872 }, { "content": " self.mode.clear();\n\n self.mode.push(val);\n\n self.mode_count += 1;\n\n } else if *val_count == self.mode_count {\n\n self.mode.push(val);\n\n }\n\n }\n\n }\n\n\n\n /// Get the number of data points\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let num: inc_stats::Mode<_> = [1.0, 2.0, std::f64::NAN].iter().collect();\n\n /// assert_eq!(3, num.count());\n\n /// ```\n\n pub fn count(&self) -> usize {\n\n self.count\n\n }\n", "file_path": "src/lib.rs", "rank": 35, "score": 8.239362580968715 }, { "content": "//! Various utilities\n\nuse num_traits::{Float, FromPrimitive};\n\nuse std::error::Error;\n\nuse std::fmt::{self, Display, Formatter};\n\n\n\n/// Any error from this library\n\n#[derive(Debug)]\n\npub struct StatsError(String);\n\n\n\nimpl StatsError {\n\n pub fn new(msg: String) -> StatsError {\n\n StatsError(msg)\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a str> for StatsError {\n\n fn from(msg: &'a str) -> StatsError {\n\n StatsError(String::from(msg))\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 36, "score": 7.912005333328033 }, { "content": " /// ```\n\n /// let mode: inc_stats::Mode<_> = [2.0, 4.0, std::f64::NAN, 4.0].iter().collect();\n\n /// assert_eq!(2, mode.mode_count());\n\n /// ```\n\n pub fn mode_count(&self) -> usize {\n\n self.mode_count\n\n }\n\n\n\n /// Return the number of times the mode occurred\n\n ///\n\n /// Counts NaNs as a possible mode.\n\n ///\n\n /// Constant time.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let mode: inc_stats::Mode<_> = [2.0, 4.0, std::f64::NAN, std::f64::NAN].iter().collect();\n\n /// assert_eq!(2, mode.mode_count_nan());\n\n /// ```\n", "file_path": "src/lib.rs", "rank": 37, "score": 7.897268983315051 }, { "content": "/// let mut percs = inc_stats::Percentiles::new();\n\n/// for &num in &[2.0, 4.0, 8.0] {\n\n/// percs.add(num);\n\n/// }\n\n/// assert_eq!(3, percs.count());\n\n/// ```\n\n///\n\n/// ```\n\n/// let percs: inc_stats::Percentiles<f64> = [2.0, 4.0, 8.0].iter().collect();\n\n/// assert_eq!(3, percs.count());\n\n/// ```\n\n#[derive(Debug)]\n\npub struct Percentiles<T: Float + FromPrimitive> {\n\n data: RefCell<CachedOrdering<T>>,\n\n nan_count: usize,\n\n}\n\n\n\nimpl<T: Float + FromPrimitive> Percentiles<T> {\n\n /// Create a new Percentiles object with no data\n\n pub fn new() -> Self {\n", "file_path": "src/lib.rs", "rank": 38, "score": 7.659064293273435 }, { "content": " pub fn new() -> Self {\n\n Mode {\n\n counts: HashMap::new(),\n\n count: 0,\n\n nan_count: 0,\n\n mode: Vec::new(),\n\n mode_count: 0,\n\n }\n\n }\n\n\n\n /// Add a data point\n\n pub fn add(&mut self, rval: impl DerefCopy<Output = T>) {\n\n let val = rval.deref_copy();\n\n self.count += 1;\n\n if val.is_nan() {\n\n self.nan_count += 1;\n\n } else {\n\n let val_count = self.counts.entry(HashFloat(val)).or_insert(0);\n\n *val_count += 1;\n\n if *val_count > self.mode_count {\n", "file_path": "src/lib.rs", "rank": 39, "score": 7.638135270473622 }, { "content": " /// # Examples\n\n ///\n\n /// ```\n\n /// let mode: inc_stats::Mode<_> = [2.0, 4.0, std::f64::NAN, std::f64::NAN].iter().collect();\n\n /// assert!(mode.mode_nan().unwrap().is_nan());\n\n /// ```\n\n pub fn mode_nan(&self) -> Option<T> {\n\n if self.nan_count > self.mode_count {\n\n Some(T::nan())\n\n } else {\n\n self.mode()\n\n }\n\n }\n\n\n\n /// Return the number of times the mode occurred\n\n ///\n\n /// Constant time.\n\n ///\n\n /// # Examples\n\n ///\n", "file_path": "src/lib.rs", "rank": 40, "score": 7.4680433005175395 }, { "content": " /// let percs: inc_stats::Percentiles<f64> = [1.0, 5.0, 100.0].iter().collect();\n\n /// let med = percs.median().unwrap();\n\n /// assert_eq!(5.0, med);\n\n /// ```\n\n pub fn median(&self) -> Option<T> {\n\n self.percentile(0.5).expect(\"0.5 is a valid percentile\")\n\n }\n\n}\n\n\n\nimpl<T: Float + FromPrimitive> Default for Percentiles<T> {\n\n fn default() -> Self {\n\n Percentiles::new()\n\n }\n\n}\n\n\n\nimpl<T: Float + FromPrimitive, V: DerefCopy<Output = T>> FromIterator<V> for Percentiles<T> {\n\n fn from_iter<I>(iter: I) -> Self\n\n where\n\n I: IntoIterator<Item = V>,\n\n {\n\n let mut percs = Percentiles::new();\n\n for val in iter {\n\n percs.add(val);\n\n }\n\n percs\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 41, "score": 7.419981250472691 }, { "content": " /// Constant time.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let stats: inc_stats::SummStats<f64> = [2.0, 4.0].iter().collect();\n\n /// assert!((1.4142136 - stats.standard_deviation().unwrap()).abs() < 1.0e-6);\n\n /// ```\n\n pub fn standard_deviation(&self) -> Option<T> {\n\n self.variance().map(T::sqrt)\n\n }\n\n\n\n /// Get the sample variance\n\n ///\n\n /// Constant time.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let stats: inc_stats::SummStats<f64> = [2.0, 4.0].iter().collect();\n", "file_path": "src/lib.rs", "rank": 42, "score": 6.951213201926089 }, { "content": " fn from_iter<I>(iter: I) -> Self\n\n where\n\n I: IntoIterator<Item = V>,\n\n {\n\n let mut stats = SummStats::new();\n\n for val in iter {\n\n stats.add(val);\n\n }\n\n stats\n\n }\n\n}\n\n\n\n/// Get the mean of a set of data\n\n///\n\n/// This method takes constant space and linear time.\n\n///\n\n/// # Examples:\n\n///\n\n/// ```\n\n/// let mean: f64 = inc_stats::mean(&[2.0, 4.0]).unwrap();\n\n/// assert!((3.0 - mean).abs() < 1.0e-6);\n\n/// ```\n", "file_path": "src/lib.rs", "rank": 43, "score": 6.894133924806588 }, { "content": " ///\n\n /// This treats all NaNs as different\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let num: inc_stats::Mode<_> = [1.0, std::f64::NAN, std::f64::NAN].iter().collect();\n\n /// assert_eq!(3, num.count_distinct_nan());\n\n /// ```\n\n ///\n\n /// Treat all nans the same\n\n /// ```\n\n /// let num: inc_stats::Mode<_> = [1.0, std::f64::NAN, std::f64::NAN].iter().collect();\n\n /// assert_eq!(2, std::cmp::min(num.count_distinct() + 1, num.count_distinct_nan()));\n\n /// ```\n\n pub fn count_distinct_nan(&self) -> usize {\n\n self.counts.len() + self.nan_count\n\n }\n\n\n\n /// Return an iterator of all of the modes\n", "file_path": "src/lib.rs", "rank": 44, "score": 6.835671674582745 }, { "content": "\n\nimpl Display for StatsError {\n\n fn fmt(&self, formatter: &mut Formatter) -> Result<(), fmt::Error> {\n\n write!(formatter, \"statistical error: {}\", self.0)\n\n }\n\n}\n\n\n\nimpl Error for StatsError {}\n\n\n\n/// An iterator that does BFS assuming slice is a full binary tree\n\npub struct InsideOut<'a, T> {\n\n elems: &'a [T],\n\n current: usize,\n\n step: usize,\n\n}\n\n\n\nimpl<'a, T> Iterator for InsideOut<'a, T> {\n\n type Item = &'a T;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n", "file_path": "src/utils.rs", "rank": 45, "score": 6.545682328296806 }, { "content": " /// ```\n\n /// let mode: inc_stats::Mode<_> = [std::f64::NAN, 5.0].iter().collect();\n\n /// let mut it = mode.modes_nan();\n\n /// assert_eq!(Some(5.0), it.next());\n\n /// assert!(it.next().unwrap().is_nan());\n\n /// assert!(it.next().is_none());\n\n /// ```\n\n pub fn modes_nan(&self) -> impl Iterator<Item = T> + '_ {\n\n self.modes().chain(self.nan_mode())\n\n }\n\n\n\n /// Return the current mode\n\n ///\n\n /// If multiple modes exist, this returns the first element that reached the largest count.\n\n /// NaNs are ignored when computing the mode.\n\n ///\n\n /// Constant time.\n\n ///\n\n /// # Examples\n\n ///\n", "file_path": "src/lib.rs", "rank": 47, "score": 6.356943887133843 }, { "content": " Percentiles {\n\n data: RefCell::new(CachedOrdering::new()),\n\n nan_count: 0,\n\n }\n\n }\n\n\n\n /// Add a data point\n\n pub fn add(&mut self, rval: impl DerefCopy<Output = T>) {\n\n let val = rval.deref_copy();\n\n if val.is_nan() {\n\n self.nan_count += 1;\n\n } else {\n\n self.data.borrow_mut().add(val);\n\n }\n\n }\n\n\n\n /// Get the number of data points\n\n pub fn count(&self) -> usize {\n\n self.data.borrow().len() + self.nan_count\n\n }\n", "file_path": "src/lib.rs", "rank": 49, "score": 6.109408333163952 }, { "content": " /// assert!((2.0 - stats.variance().unwrap()).abs() < 1.0e-6);\n\n /// ```\n\n ///\n\n /// ```\n\n /// let mut stats = inc_stats::SummStats::new();\n\n /// stats.add(0.0);\n\n /// assert!(stats.variance().is_none());\n\n /// ```\n\n pub fn variance(&self) -> Option<T> {\n\n match self.count {\n\n 0 | 1 => None,\n\n // if we could add to this, it must be possible\n\n _ => Some(self.ssd / T::from_u64(self.count - 1).unwrap()),\n\n }\n\n }\n\n\n\n /// Get the standard error\n\n ///\n\n /// Constant time.\n\n ///\n", "file_path": "src/lib.rs", "rank": 50, "score": 6.067138008616497 }, { "content": " }\n\n\n\n /// Add a number\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let mut stats = inc_stats::SummStats::new();\n\n /// stats.add(0.0);\n\n /// stats.add(&1.2);\n\n /// assert_eq!(2, stats.count());\n\n /// ```\n\n ///\n\n /// # Panics\n\n ///\n\n /// when the internal count can't be converted into the float data type.\n\n pub fn add(&mut self, bval: impl DerefCopy<Output = T>) {\n\n self.checked_add(bval).unwrap();\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 51, "score": 5.975083267519093 }, { "content": " self.mode.iter().copied()\n\n }\n\n\n\n /// gets an option for if nan would be in the mode\n\n fn nan_mode(&self) -> Option<T> {\n\n if self.nan_count > 0 && self.nan_count >= self.mode_count {\n\n Some(T::nan())\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Return an iterator of all of the modes\n\n ///\n\n /// This iterator will include NaN if present as a mode. NaN will always be returned last\n\n ///\n\n /// Constant time.\n\n ///\n\n /// # Examples\n\n ///\n", "file_path": "src/lib.rs", "rank": 52, "score": 5.841757813948931 }, { "content": " // allocate result\n\n let mut result: Vec<Option<T>> = iter::repeat(None).take(indexed.len()).collect();\n\n for &(ind, perc) in utils::inside_out(&indexed)? {\n\n // we checked that we had data\n\n result[ind] = Some(self.percentile(perc)?.unwrap());\n\n }\n\n let checked_result: Option<Vec<_>> = result.iter().copied().collect();\n\n // fails if there is a logic error in inside_out\n\n Ok(Some(checked_result.unwrap()))\n\n }\n\n }\n\n }\n\n\n\n /// Get a percentile\n\n ///\n\n /// Linear time.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// ```\n", "file_path": "src/lib.rs", "rank": 53, "score": 5.677726133805371 }, { "content": " /// let percs: inc_stats::Percentiles<f64> = [1.0, 5.0].iter().collect();\n\n /// let quart = percs.percentile(0.25).unwrap().unwrap();\n\n /// assert!((2.0 - quart).abs() < 1.0e-6);\n\n /// ```\n\n pub fn percentile(\n\n &self,\n\n percentile: impl DerefCopy<Output = f64>,\n\n ) -> Result<Option<T>, StatsError> {\n\n let perc = percentile.deref_copy();\n\n if perc < 0.0 || 1.0 < perc {\n\n Err(StatsError::new(format!(\n\n \"all percentiles must be between 0 and 1, but got: {}\",\n\n perc\n\n )))\n\n } else {\n\n let mut ordering = self.data.borrow_mut();\n\n match ordering.len() {\n\n 0 => Ok(None),\n\n _ => {\n\n let p_index = (ordering.len() - 1) as f64 * perc;\n", "file_path": "src/lib.rs", "rank": 54, "score": 5.292470240663748 }, { "content": " ///\n\n /// Multiple modes are retruned in the order they became a mode. NaNs are ignored.\n\n ///\n\n /// This iterator has read only reference to the mode data structure that must be dropped to\n\n /// continue modifying the mode.\n\n ///\n\n /// Constant time.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let mut mode = inc_stats::Mode::new();\n\n /// {\n\n /// let mut it = mode.modes();\n\n /// assert!(it.next().is_none());\n\n /// }\n\n ///\n\n /// mode.add(5.0);\n\n /// {\n\n /// let mut it = mode.modes();\n", "file_path": "src/lib.rs", "rank": 55, "score": 5.286008188559054 }, { "content": " }\n\n\n\n /// Get the mean\n\n ///\n\n /// Constant time.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let stats: inc_stats::SummStats<f64> = [2.0, 4.0].iter().collect();\n\n /// assert!((3.0 - stats.mean().unwrap()).abs() < 1.0e-6);\n\n /// ```\n\n ///\n\n /// ```\n\n /// let stats = inc_stats::SummStats::<f64>::new();\n\n /// assert!(stats.mean().is_none());\n\n /// ```\n\n pub fn mean(&self) -> Option<T> {\n\n match self.count {\n\n 0 => None,\n", "file_path": "src/lib.rs", "rank": 56, "score": 4.909720136976665 }, { "content": " /// ```\n\n /// let mode: inc_stats::Mode<_> = [2.0, 4.0, std::f64::NAN, 4.0].iter().collect();\n\n /// assert_eq!(4.0, mode.mode().unwrap());\n\n /// ```\n\n ///\n\n /// ```\n\n /// let mode = inc_stats::Mode::<f64>::new();\n\n /// assert!(mode.mode().is_none());\n\n /// ```\n\n pub fn mode(&self) -> Option<T> {\n\n self.modes().next()\n\n }\n\n\n\n /// Return the current mode\n\n ///\n\n /// If multiple modes exist, this returns the first element that reached the largest count that\n\n /// wasn't NaN. NaN will be returned only if it is the unique mode.\n\n ///\n\n /// Constant time.\n\n ///\n", "file_path": "src/lib.rs", "rank": 57, "score": 4.89967629468708 }, { "content": " pub fn percentiles<P, I>(&self, percentiles: I) -> Result<Option<Vec<T>>, StatsError>\n\n where\n\n P: DerefCopy<Output = f64>,\n\n I: IntoIterator<Item = P>,\n\n {\n\n let len = self.data.borrow().len();\n\n match len {\n\n 0 => Ok(None),\n\n _ => {\n\n // need to output result in same order, but need this sorted for efficiency\n\n let mut indexed: Vec<(usize, f64)> = percentiles\n\n .into_iter()\n\n .map(DerefCopy::deref_copy)\n\n .enumerate()\n\n .collect();\n\n if indexed.iter().any(|(_, e)| e.is_nan()) {\n\n Err(StatsError::from(\"percentiles can't be nan\"))?\n\n }\n\n // we checked there were no nans\n\n indexed.sort_unstable_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap());\n", "file_path": "src/lib.rs", "rank": 58, "score": 4.840298538351371 }, { "content": "//! Trait for converting float to bytes\n\n//! there are some modules that do this, but their goal is serialization, so it made more sense to\n\n//! just pseudo-extend num_traits\n\nuse core::borrow::{Borrow, BorrowMut};\n\nuse core::cmp::{Eq, Ord, PartialEq, PartialOrd};\n\nuse core::fmt::Debug;\n\nuse core::hash::Hash;\n\n\n\n// TODO num_traits might include them at some point:\n\n// https://github.com/rust-num/num-traits/pull/103\n", "file_path": "src/bytes.rs", "rank": 59, "score": 4.576863285301518 }, { "content": " let low_index = p_index.floor() as usize;\n\n let high_index = p_index.ceil() as usize;\n\n let low = ordering.order_index(low_index);\n\n let high = ordering.order_index(high_index);\n\n let weight = p_index - low_index as f64;\n\n let perc = utils::weighted_average(low, high, weight)\n\n .ok_or(\"can't convert from weight to float\")?;\n\n Ok(Some(perc))\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// Get the median\n\n ///\n\n /// Linear time.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// ```\n", "file_path": "src/lib.rs", "rank": 60, "score": 3.361768112800116 }, { "content": " /// assert_eq!(Some(5.0), it.next());\n\n /// assert!(it.next().is_none());\n\n /// }\n\n ///\n\n /// mode.add(3.0);\n\n /// {\n\n /// let mut it = mode.modes();\n\n /// assert_eq!(Some(5.0), it.next());\n\n /// assert_eq!(Some(3.0), it.next());\n\n /// assert!(it.next().is_none());\n\n /// }\n\n ///\n\n /// mode.add(3.0);\n\n /// {\n\n /// let mut it = mode.modes();\n\n /// assert_eq!(Some(3.0), it.next());\n\n /// assert!(it.next().is_none());\n\n /// }\n\n /// ```\n\n pub fn modes(&self) -> impl Iterator<Item = T> + '_ {\n", "file_path": "src/lib.rs", "rank": 61, "score": 3.262132734762756 }, { "content": " /// Get the minimum non nan value\n\n ///\n\n /// Constant time. If no non nan values have been added, this is None.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let stats: inc_stats::SummStats<_> = [2.0, 4.0, std::f64::NAN].iter().collect();\n\n /// assert_eq!(2.0, stats.min().unwrap());\n\n /// ```\n\n ///\n\n /// ```\n\n /// let mut stats = inc_stats::SummStats::new();\n\n /// stats.add(std::f64::NAN);\n\n /// assert!(stats.min().is_none());\n\n /// ```\n\n pub fn min(&self) -> Option<T> {\n\n if self.non_nan {\n\n Some(self.min)\n\n } else {\n", "file_path": "src/lib.rs", "rank": 62, "score": 2.9913474780745894 }, { "content": " None\n\n }\n\n }\n\n\n\n /// Get the maximum non nan value\n\n ///\n\n /// Constant time. If no non nan values have been added, this is None.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// let stats: inc_stats::SummStats<_> = [2.0, 4.0, std::f64::NAN].iter().collect();\n\n /// assert_eq!(4.0, stats.max().unwrap());\n\n /// ```\n\n pub fn max(&self) -> Option<T> {\n\n if self.non_nan {\n\n Some(self.max)\n\n } else {\n\n None\n\n }\n", "file_path": "src/lib.rs", "rank": 63, "score": 2.9704198978028726 }, { "content": "//! Trait for copying for ownership\n\n//!\n\n//! This ultimately allows methods to take either references or owned values and still do type\n\n//! inference\n\n\n", "file_path": "src/copy.rs", "rank": 64, "score": 2.5187334251700735 }, { "content": " self.current += self.step;\n\n match self.elems.get(self.current) {\n\n Some(elem) => Some(elem),\n\n _ if self.step <= 2 => None,\n\n _ => {\n\n self.step /= 2;\n\n self.current = (self.step / 2) - 1;\n\n Some(&self.elems[self.current])\n\n }\n\n }\n\n }\n\n}\n\n\n\n/// Compute the step involved\n", "file_path": "src/utils.rs", "rank": 65, "score": 2.3366698274877766 } ]
Rust
src/volume/storage/redis/redis.rs
isgasho/zbox
752c2739f883f416e27e9c342552ff1f9838c0e6
use std::fmt::{self, Debug}; use std::sync::Mutex; use redis::{Client, Commands, Connection}; use base::crypto::{Crypto, Key}; use base::IntoRef; use error::{Error, Result}; use trans::Eid; use volume::address::Span; use volume::storage::Storable; use volume::BLK_SIZE; #[inline] fn super_blk_key(suffix: u64) -> String { format!("super_blk:{}", suffix) } #[inline] fn wal_key(id: &Eid) -> String { format!("wal:{}", id.to_string()) } #[inline] fn addr_key(id: &Eid) -> String { format!("address:{}", id.to_string()) } #[inline] fn blk_key(blk_idx: usize) -> String { format!("block:{}", blk_idx) } pub struct RedisStorage { client: Client, conn: Option<Mutex<Connection>>, } impl RedisStorage { pub fn new(path: &str) -> Result<Self> { let url = if path.starts_with("+unix+") { format!("redis+unix:///{}", &path[6..]) } else { format!("redis://{}", path) }; let client = Client::open(url.as_str())?; Ok(RedisStorage { client, conn: None }) } fn get_bytes(&self, key: &str) -> Result<Vec<u8>> { match self.conn { Some(ref conn) => { let conn = conn.lock().unwrap(); if !conn.exists::<&str, bool>(key)? { return Err(Error::NotFound); } let ret = conn.get(key)?; Ok(ret) } None => unreachable!(), } } fn set_bytes(&self, key: &str, val: &[u8]) -> Result<()> { match self.conn { Some(ref conn) => { let conn = conn.lock().unwrap(); conn.set(key, val)?; Ok(()) } None => unreachable!(), } } fn del(&self, key: &str) -> Result<()> { match self.conn { Some(ref conn) => { let conn = conn.lock().unwrap(); conn.del(key)?; Ok(()) } None => unreachable!(), } } } impl Storable for RedisStorage { fn exists(&self) -> Result<bool> { let conn = self.client.get_connection()?; let key = super_blk_key(0); conn.exists::<&str, bool>(&key).map_err(Error::from) } fn connect(&mut self) -> Result<()> { let conn = self.client.get_connection()?; self.conn = Some(Mutex::new(conn)); Ok(()) } #[inline] fn init(&mut self, _crypto: Crypto, _key: Key) -> Result<()> { Ok(()) } #[inline] fn open(&mut self, _crypto: Crypto, _key: Key) -> Result<()> { Ok(()) } fn get_super_block(&mut self, suffix: u64) -> Result<Vec<u8>> { let key = super_blk_key(suffix); self.get_bytes(&key) } fn put_super_block(&mut self, super_blk: &[u8], suffix: u64) -> Result<()> { let key = super_blk_key(suffix); self.set_bytes(&key, super_blk) } fn get_wal(&mut self, id: &Eid) -> Result<Vec<u8>> { let key = wal_key(id); self.get_bytes(&key) } fn put_wal(&mut self, id: &Eid, wal: &[u8]) -> Result<()> { let key = wal_key(id); self.set_bytes(&key, wal) } fn del_wal(&mut self, id: &Eid) -> Result<()> { let key = wal_key(id); self.del(&key) } fn get_address(&mut self, id: &Eid) -> Result<Vec<u8>> { let key = addr_key(id); self.get_bytes(&key) } fn put_address(&mut self, id: &Eid, addr: &[u8]) -> Result<()> { let key = addr_key(id); self.set_bytes(&key, addr) } fn del_address(&mut self, id: &Eid) -> Result<()> { let key = addr_key(id); self.del(&key) } fn get_blocks(&mut self, dst: &mut [u8], span: Span) -> Result<()> { let mut read = 0; for blk_idx in span { let key = blk_key(blk_idx); let blk = self.get_bytes(&key)?; assert_eq!(blk.len(), BLK_SIZE); dst[read..read + BLK_SIZE].copy_from_slice(&blk); read += BLK_SIZE; } Ok(()) } fn put_blocks(&mut self, span: Span, mut blks: &[u8]) -> Result<()> { for blk_idx in span { let key = blk_key(blk_idx); self.set_bytes(&key, &blks[..BLK_SIZE])?; blks = &blks[BLK_SIZE..]; } Ok(()) } fn del_blocks(&mut self, span: Span) -> Result<()> { for blk_idx in span { let key = blk_key(blk_idx); self.del(&key)?; } Ok(()) } #[inline] fn flush(&mut self) -> Result<()> { Ok(()) } } impl Debug for RedisStorage { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("RedisStorage").finish() } } impl IntoRef for RedisStorage {} #[cfg(test)] mod tests { use super::*; use base::init_env; #[test] fn redis_storage() { init_env(); let mut rs = RedisStorage::new("127.0.0.1").unwrap(); rs.connect().unwrap(); rs.init(Crypto::default(), Key::new_empty()).unwrap(); let id = Eid::new(); let buf = vec![1, 2, 3]; let blks = vec![42u8; BLK_SIZE * 3]; let mut dst = vec![0u8; BLK_SIZE * 3]; rs.put_super_block(&buf, 0).unwrap(); let s = rs.get_super_block(0).unwrap(); assert_eq!(&s[..], &buf[..]); rs.put_wal(&id, &buf).unwrap(); let s = rs.get_wal(&id).unwrap(); assert_eq!(&s[..], &buf[..]); rs.del_wal(&id).unwrap(); assert_eq!(rs.get_wal(&id).unwrap_err(), Error::NotFound); rs.put_address(&id, &buf).unwrap(); let s = rs.get_address(&id).unwrap(); assert_eq!(&s[..], &buf[..]); rs.del_address(&id).unwrap(); assert_eq!(rs.get_address(&id).unwrap_err(), Error::NotFound); let span = Span::new(0, 3); rs.put_blocks(span, &blks).unwrap(); rs.get_blocks(&mut dst, span).unwrap(); assert_eq!(&dst[..], &blks[..]); rs.del_blocks(Span::new(1, 2)).unwrap(); assert_eq!( rs.get_blocks(&mut dst, Span::new(0, 3)).unwrap_err(), Error::NotFound ); assert_eq!( rs.get_blocks(&mut dst[..BLK_SIZE], Span::new(1, 1)) .unwrap_err(), Error::NotFound ); assert_eq!( rs.get_blocks(&mut dst[..BLK_SIZE], Span::new(2, 1)) .unwrap_err(), Error::NotFound ); drop(rs); let mut rs = RedisStorage::new("127.0.0.1").unwrap(); rs.connect().unwrap(); rs.open(Crypto::default(), Key::new_empty()).unwrap(); rs.get_blocks(&mut dst[..BLK_SIZE], Span::new(0, 1)) .unwrap(); assert_eq!(&dst[..BLK_SIZE], &blks[..BLK_SIZE]); assert_eq!( rs.get_blocks(&mut dst[..BLK_SIZE], Span::new(1, 1)) .unwrap_err(), Error::NotFound ); assert_eq!( rs.get_blocks(&mut dst[..BLK_SIZE], Span::new(2, 1)) .unwrap_err(), Error::NotFound ); } }
use std::fmt::{self, Debug}; use std::sync::Mutex; use redis::{Client, Commands, Connection}; use base::crypto::{Crypto, Key}; use base::IntoRef; use error::{Error, Result}; use trans::Eid; use volume::address::Span; use volume::storage::Storable; use volume::BLK_SIZE; #[inline] fn super_blk_key(suffix: u64) -> String { format!("super_blk:{}", suffix) } #[inline] fn wal_key(id: &Eid) -> String { format!("wal:{}", id.to_string()) } #[inline] fn addr_key(id: &Eid) -> String { format!("address:{}", id.to_string()) } #[inline] fn blk_key(blk_idx: usize) -> String { format!("block:{}", blk_idx) } pub struct RedisStorage { client: Client, conn: Option<Mutex<Connection>>, } impl RedisStorage { pub fn new(path: &str) -> Result<Self> { let url = if path.starts_with("+unix+") { format!("redis+unix:///{}", &path[6..]) } else { format!("redis://{}", path) }; let client = Client::open(url.as_str())?; Ok(RedisStorage { client, conn: None }) } fn get_bytes(&self, key: &str) -> Result<Vec<u8>> { match self.conn { Some(ref conn) => { let conn = conn.lock().unwrap(); if !conn.exists::<&str, bool>(key)? { return Err(Error::NotFound); } let ret = conn.get(key)?; Ok(ret) } None => unreachable!(), } } fn set_bytes(&self, key: &str, val: &[u8]) -> Result<()> { match self.conn { Some(ref conn) => { let conn = conn.lock().unwrap(); conn.set(key, val)?; Ok(()) } None => unreachable!(), } } fn del(&self, key: &str) -> Result<()> { match self.conn { Some(ref conn) => { let conn = conn.lock().unwrap(); conn.del(key)?; Ok(()) } None => unreachable!(), } } } impl Storable for RedisStorage { fn exists(&self) -> Result<bool> { let conn = self.client.get_connection()?; let key = super_blk_key(0); conn.exists::<&str, bool>(&key).map_err(Error::from) } fn connect(&mut self) -> Result<()> { let conn = self.client.get_connection()?; self.conn = Some(Mutex::new(conn)); Ok(()) } #[inline] fn init(&mut self, _crypto: Crypto, _key: Key) -> Result<()> { Ok(()) } #[inline] fn open(&mut self, _crypto: Crypto, _key: Key) -> Result<()> { Ok(()) } fn get_super_block(&mut self, suffix: u64) -> Result<Vec<u8>> { let key = super_blk_key(suffix); self.get_bytes(&key) } fn put_super_block(&mut self, super_blk: &[u8], suffix: u64) -> Result<()> { let key = super_blk_key(suffix); self.set_bytes(&key, super_blk) } fn get_wal(&mut self, id: &Eid) -> Result<Vec<u8>> { let key = wal_key(id); self.get_bytes(&key) } fn put_wal(&mut self, id: &Eid, wal: &[u8]) -> Result<()> { let key = wal_key(id); self.set_bytes(&key, wal) } fn del_wal(&mut self, id: &Eid) -> Result<()> { let key = wal_key(id); self.del(&key) } fn get_address(&mut self, id: &Eid) -> Result<Vec<u8>> { let key = addr_key(id); self.get_bytes(&key) } fn put_address(&mut self, id: &Eid, addr: &[u8]) -> Result<()> { let key = addr_key(id); self.set_bytes(&key, addr) } fn del_address(&mut self, id: &Eid) -> Result<()> { let key = addr_key(id); self.del(&key) } fn get_blocks(&mut self, dst: &mut [u8], span: Span) -> Result<()> { let mut read = 0; for blk_idx in span { let key = blk_key(blk_idx); let blk = self.get_bytes(&key)?; assert_eq!(blk.len(), BLK_SIZE); dst[read..read + BLK_SIZE].copy_from_slice(&blk); read += BLK_SIZE; } Ok(()) } fn put_blocks(&mut self, span: Span, mut blks: &[u8]) -> Result<()> { for blk_idx in span { let key = blk_key(blk_idx); self.set_bytes(&key, &blks[..BLK_SIZE])?; blks = &blks[BLK_SIZE..]; } Ok(()) } fn del_blocks(&mut self, span: Span) -> Result<()> { for blk_idx in span { let key = blk_key(blk_idx); self.del(&key)?; } Ok(()) } #[inline] fn flush(&mut self) -> Result<()> { Ok(()) } } impl Debug for RedisStorage { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("RedisStorage").finish() } } impl IntoRef for RedisStorage {} #[cfg(test)] mod tests { use super::*; use base::init_env; #[test] fn redis_storage() { init_env(); let mut rs = RedisStorage::new("127.0.0.1").unwrap(); rs.connect().unwrap(); rs.init(Crypto::default(), Key::new_empty()).unwrap(); let id = Eid::new(); let buf = vec![1, 2, 3]; let blks = vec![42u8; BLK_SIZE * 3]; let mut dst = vec![0u8; BLK_SIZE * 3]; rs.put_super_block(&buf, 0).unwrap(); let s = rs.get_super_block(0).unwrap(); assert_eq!(&s[..], &buf[..]); rs.put_wal(&id, &buf).unwrap(); let s = rs.get_wal(&id).unwrap(); assert_eq!(&s[..], &buf[..]); rs.del_wal(&id).unwrap(); assert_eq!(rs.get_wal(&id).unwrap_err(), Error::NotFound); rs.put_address(&id, &buf).unwrap(); let s = rs.get_address(&id).unwrap(); assert_eq!(&s[..], &buf[..]); rs.del_address(&id).unwrap(); assert_eq!(rs.get_address(&id).unwrap_err(), Error::NotFound); let span = Span::new(0, 3); rs.put_blocks(span, &blks).unwrap(); rs.get_blocks(&mut dst, span).unwrap(); assert_eq!(&dst[..], &blks[..]); rs.del_blocks(Span::new(1, 2)).unwrap(); assert_eq!( rs.get_blocks(&mut dst, Span::new(0, 3)).unwrap_err(),
}
Error::NotFound ); assert_eq!( rs.get_blocks(&mut dst[..BLK_SIZE], Span::new(1, 1)) .unwrap_err(), Error::NotFound ); assert_eq!( rs.get_blocks(&mut dst[..BLK_SIZE], Span::new(2, 1)) .unwrap_err(), Error::NotFound ); drop(rs); let mut rs = RedisStorage::new("127.0.0.1").unwrap(); rs.connect().unwrap(); rs.open(Crypto::default(), Key::new_empty()).unwrap(); rs.get_blocks(&mut dst[..BLK_SIZE], Span::new(0, 1)) .unwrap(); assert_eq!(&dst[..BLK_SIZE], &blks[..BLK_SIZE]); assert_eq!( rs.get_blocks(&mut dst[..BLK_SIZE], Span::new(1, 1)) .unwrap_err(), Error::NotFound ); assert_eq!( rs.get_blocks(&mut dst[..BLK_SIZE], Span::new(2, 1)) .unwrap_err(), Error::NotFound ); }
function_block-function_prefix_line
[ { "content": "pub fn random_buf(buf: &mut [u8]) {\n\n unsafe {\n\n randombytes_buf(buf.as_mut_ptr(), buf.len());\n\n }\n\n}\n\n\n", "file_path": "tests/common/crypto.rs", "rank": 0, "score": 387509.8669507819 }, { "content": "pub fn random_slice(buf: &[u8]) -> (usize, &[u8]) {\n\n let pos = random_usize(buf.len());\n\n let len = random_usize(buf.len() - pos);\n\n (pos, &buf[pos..(pos + len)])\n\n}\n\n\n", "file_path": "tests/common/crypto.rs", "rank": 1, "score": 359591.58282773243 }, { "content": "pub fn random_buf_deterministic(buf: &mut [u8], seed: &RandomSeed) {\n\n unsafe {\n\n randombytes_buf_deterministic(\n\n buf.as_mut_ptr(),\n\n buf.len(),\n\n seed.as_ptr(),\n\n );\n\n }\n\n}\n\n\n", "file_path": "tests/common/crypto.rs", "rank": 3, "score": 345649.0779037907 }, { "content": "pub fn random_slice_with_len(buf: &[u8], len: usize) -> &[u8] {\n\n let pos = random_usize(buf.len() - len);\n\n &buf[pos..(pos + len)]\n\n}\n\n\n\npub const RANDOM_SEED_SIZE: usize = 32;\n\n\n\n#[derive(Debug, Default)]\n\npub struct RandomSeed(pub [u8; RANDOM_SEED_SIZE]);\n\n\n\nimpl RandomSeed {\n\n pub fn new() -> Self {\n\n let mut seed = Self::default();\n\n random_buf(&mut seed.0);\n\n seed\n\n }\n\n\n\n pub fn from(seed: &[u8]) -> Self {\n\n assert_eq!(seed.len(), RANDOM_SEED_SIZE);\n\n let mut ret = RandomSeed([0u8; RANDOM_SEED_SIZE]);\n", "file_path": "tests/common/crypto.rs", "rank": 4, "score": 340369.92167520756 }, { "content": "// parse uri\n\n// example: access_key@repo_id?cache_type=mem&cache_size=2mb[&base=path]\n\n// return: (\n\n// access_key: &str,\n\n// repo_id: &str,\n\n// cache_type: CacheType,\n\n// cache_size: usize,\n\n// base: PathBuf\n\n// )\n\nfn parse_uri(mut uri: &str) -> Result<(&str, &str, CacheType, usize, PathBuf)> {\n\n if !uri.is_ascii() {\n\n return Err(Error::InvalidUri);\n\n }\n\n\n\n // parse access key, required\n\n let mut idx = uri.find('@').ok_or(Error::InvalidUri)?;\n\n let access_key = &uri[..idx];\n\n uri = &uri[idx + 1..];\n\n if uri.is_empty() {\n\n return Err(Error::InvalidUri);\n\n }\n\n\n\n // parse repo id, required\n\n let repo_id;\n\n if let Some(idx) = uri.find('?') {\n\n repo_id = &uri[..idx];\n\n uri = &uri[idx + 1..];\n\n } else {\n\n repo_id = &uri[..uri.len()];\n", "file_path": "src/volume/storage/zbox/zbox.rs", "rank": 7, "score": 307089.5605474685 }, { "content": "fn verify_content(f: &mut File, buf: &[u8]) {\n\n let mut dst = Vec::new();\n\n let ver_num = f.history().unwrap().last().unwrap().num();\n\n let mut rdr = f.version_reader(ver_num).unwrap();\n\n let result = rdr.read_to_end(&mut dst).unwrap();\n\n assert_eq!(result, buf.len());\n\n assert_eq!(&dst[..], &buf[..]);\n\n}\n\n\n", "file_path": "tests/file.rs", "rank": 8, "score": 303978.49936662195 }, { "content": "pub fn hash(inbuf: &[u8]) -> Hash {\n\n let mut ret = Hash::new();\n\n unsafe {\n\n match crypto_generichash(\n\n ret.as_mut_ptr(),\n\n HASH_SIZE,\n\n inbuf.as_ptr(),\n\n inbuf.len() as u64,\n\n ptr::null(),\n\n 0,\n\n ) {\n\n 0 => ret,\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\nimpl Debug for Hash {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Hash({}..)\", &self.to_string()[..6])\n", "file_path": "tests/common/crypto.rs", "rank": 10, "score": 263074.68352677854 }, { "content": "pub fn ensure_parents_dir(path: &std::path::Path) -> Result<()> {\n\n let parent = path.parent().unwrap();\n\n if !parent.exists() {\n\n std::fs::create_dir_all(parent)?;\n\n }\n\n Ok(())\n\n}\n\n\n\n/// Remove parent dir if it is empty\n\n#[cfg(any(\n\n feature = \"storage-file\",\n\n all(feature = \"storage-zbox\", not(target_arch = \"wasm32\"))\n\n))]\n", "file_path": "src/base/utils.rs", "rank": 11, "score": 252922.67874375987 }, { "content": "pub fn remove_empty_parent_dir(path: &std::path::Path) -> Result<()> {\n\n for parent in path.ancestors().skip(1) {\n\n match std::fs::read_dir(parent) {\n\n Ok(dirs) => {\n\n if dirs.count() > 0 {\n\n break;\n\n }\n\n }\n\n Err(ref err) if err.kind() == std::io::ErrorKind::NotFound => break,\n\n Err(err) => return Err(Error::from(err)),\n\n }\n\n std::fs::remove_dir(&parent)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/base/utils.rs", "rank": 12, "score": 249094.52781864247 }, { "content": "pub fn random_usize(upper_bound: usize) -> usize {\n\n unsafe { randombytes_uniform(upper_bound as u32) as usize }\n\n}\n\n\n", "file_path": "tests/common/crypto.rs", "rank": 13, "score": 246209.7039554553 }, { "content": "#[inline]\n\npub fn zbox_version() -> String {\n\n format!(\"ZboxFS {}\", Version::lib_version())\n\n}\n\n\n\nstatic INIT: Once = ONCE_INIT;\n\n\n\ncfg_if! {\n\n if #[cfg(target_os = \"android\")] {\n\n pub fn init_env() {\n\n // only call the initialisation code once globally\n\n INIT.call_once(|| {\n\n android_logger::init_once(\n\n Filter::default()\n\n .with_min_level(Level::Trace)\n\n .with_allowed_module_path(\"zbox::base\")\n\n .with_allowed_module_path(\"zbox::fs::fs\")\n\n .with_allowed_module_path(\"zbox::trans::txmgr\"),\n\n Some(\"zboxfs\"),\n\n );\n\n crypto::Crypto::init().expect(\"Initialise crypto failed\");\n", "file_path": "src/base/mod.rs", "rank": 14, "score": 241750.94255500636 }, { "content": "#[allow(dead_code)]\n\npub fn speed_str(duration: &Duration, data_len: usize) -> String {\n\n let secs = duration.as_secs() as f32\n\n + duration.subsec_nanos() as f32 / 1_000_000_000.0;\n\n format!(\"{} MB/s\", data_len as f32 / (1024.0 * 1024.0) / secs)\n\n}\n\n\n\n/// Ensure all parents dir are created along the path\n\n#[cfg(any(\n\n feature = \"storage-file\",\n\n all(feature = \"storage-zbox\", not(target_arch = \"wasm32\"))\n\n))]\n", "file_path": "src/base/utils.rs", "rank": 15, "score": 233954.0172045443 }, { "content": "fn test_file_perf(data: &[u8], dir: &Path) {\n\n println!(\"---------------------------------------------\");\n\n println!(\"File storage performance test (no compress)\");\n\n println!(\"---------------------------------------------\");\n\n let mut repo = RepoOpener::new()\n\n .create_new(true)\n\n .open(&format!(\"file://{}/repo\", dir.display()), \"pwd\")\n\n .unwrap();\n\n let mut files = make_files(&mut repo);\n\n test_perf(&mut repo, &mut files, data);\n\n\n\n println!(\"---------------------------------------------\");\n\n println!(\"File storage performance test (compress)\");\n\n println!(\"---------------------------------------------\");\n\n let mut repo = RepoOpener::new()\n\n .create_new(true)\n\n .compress(true)\n\n .open(&format!(\"file://{}/repo2\", dir.display()), \"pwd\")\n\n .unwrap();\n\n let mut files = make_files(&mut repo);\n\n test_perf(&mut repo, &mut files, data);\n\n}\n\n\n", "file_path": "tests/perf.rs", "rank": 16, "score": 233710.6058008732 }, { "content": "fn test_baseline(data: &Vec<u8>, dir: &Path) {\n\n println!(\"---------------------------------------------\");\n\n println!(\"Baseline test\");\n\n println!(\"---------------------------------------------\");\n\n\n\n let mut buf = vec![0u8; FILE_LEN];\n\n let tx_time = Duration::default();\n\n\n\n // test memcpy speed\n\n let now = Instant::now();\n\n for i in 0..ROUND {\n\n unsafe {\n\n ptr::copy_nonoverlapping(\n\n (&data[i * FILE_LEN..(i + 1) * FILE_LEN]).as_ptr(),\n\n (&mut buf[..]).as_mut_ptr(),\n\n FILE_LEN,\n\n );\n\n }\n\n }\n\n let memcpy_time = now.elapsed();\n", "file_path": "tests/perf.rs", "rank": 17, "score": 229706.25762219465 }, { "content": "fn test_perf(repo: &mut Repo, files: &mut Vec<File>, data: &[u8]) {\n\n print!(\"Performing testing...\");\n\n io::stdout().flush().unwrap();\n\n\n\n // write\n\n let now = Instant::now();\n\n for i in 0..ROUND {\n\n let data = &data[i * FILE_LEN..(i + 1) * FILE_LEN];\n\n files[i].write_once(&data[..]).unwrap();\n\n }\n\n let write_time = now.elapsed();\n\n\n\n // read\n\n let mut buf = Vec::new();\n\n let now = Instant::now();\n\n for i in 0..ROUND {\n\n files[i].seek(SeekFrom::Start(0)).unwrap();\n\n let read = files[i].read_to_end(&mut buf).unwrap();\n\n assert_eq!(read, FILE_LEN);\n\n }\n", "file_path": "tests/perf.rs", "rank": 18, "score": 221035.9689469154 }, { "content": "#[inline]\n\nfn time_str(duration: &Duration) -> String {\n\n format!(\"{}.{}s\", duration.as_secs(), duration.subsec_nanos())\n\n}\n\n\n", "file_path": "tests/perf.rs", "rank": 19, "score": 212431.13838743078 }, { "content": "fn speed_str(duration: &Duration) -> String {\n\n let secs = duration.as_secs() as f32\n\n + duration.subsec_nanos() as f32 / 1_000_000_000.0;\n\n let speed = DATA_LEN as f32 / (1024.0 * 1024.0) / secs;\n\n format!(\"{:.2} MB/s\", speed)\n\n}\n\n\n", "file_path": "tests/perf.rs", "rank": 20, "score": 212425.89007740145 }, { "content": "fn tps_str(duration: &Duration) -> String {\n\n if duration.eq(&Duration::default()) {\n\n return format!(\"N/A\");\n\n }\n\n let secs = duration.as_secs() as f32\n\n + duration.subsec_nanos() as f32 / 1_000_000_000.0;\n\n let speed = TX_ROUND as f32 / secs;\n\n format!(\"{:.0} tx/s\", speed)\n\n}\n\n\n", "file_path": "tests/perf.rs", "rank": 21, "score": 212425.89007740145 }, { "content": "#[allow(dead_code)]\n\n#[inline]\n\npub fn align_floor_u64(x: u64, size: u64) -> u64 {\n\n x - (x & (size - 1))\n\n}\n\n\n\n/// Align usize to floor and convert to chunk index, size must be 2^n integer\n", "file_path": "src/base/utils.rs", "rank": 22, "score": 206509.05420920422 }, { "content": "#[allow(dead_code)]\n\n#[inline]\n\npub fn align_ceil_u64(x: u64, size: u64) -> u64 {\n\n if x == 0 {\n\n return size;\n\n }\n\n x + (-(x as i64) & (size as i64 - 1)) as u64\n\n}\n\n\n\n/// Align usize to ceil and convert to chunk index, size must be 2^n integer\n", "file_path": "src/base/utils.rs", "rank": 23, "score": 206509.05420920422 }, { "content": "/// Storable trait\n\npub trait Storable: Debug + Send + Sync {\n\n // check if storage exists\n\n fn exists(&self) -> Result<bool>;\n\n\n\n // make connection to storage\n\n fn connect(&mut self) -> Result<()>;\n\n\n\n // initial a storage\n\n fn init(&mut self, crypto: Crypto, key: Key) -> Result<()>;\n\n\n\n // open a storage\n\n fn open(&mut self, crypto: Crypto, key: Key) -> Result<()>;\n\n\n\n // super block read/write, must not buffered\n\n // write no need to be atomic, but must gurantee any successful\n\n // write is persistent\n\n fn get_super_block(&mut self, suffix: u64) -> Result<Vec<u8>>;\n\n fn put_super_block(&mut self, super_blk: &[u8], suffix: u64) -> Result<()>;\n\n\n\n // wal read/write, must not buffered\n", "file_path": "src/volume/storage/mod.rs", "rank": 24, "score": 203579.6859350751 }, { "content": "// run SELECT statement on a blob column\n\nfn run_select_blob(stmt: *mut ffi::sqlite3_stmt) -> Result<Vec<u8>> {\n\n let result = unsafe { ffi::sqlite3_step(stmt) };\n\n match result {\n\n ffi::SQLITE_ROW => {\n\n // get data and data size\n\n let (data, data_len) = unsafe {\n\n (\n\n ffi::sqlite3_column_blob(stmt, 0),\n\n ffi::sqlite3_column_bytes(stmt, 0) as usize,\n\n )\n\n };\n\n\n\n // copy data to vec and return it\n\n let mut ret = vec![0u8; data_len];\n\n unsafe {\n\n ptr::copy_nonoverlapping(\n\n data,\n\n (&mut ret).as_mut_ptr() as *mut c_void,\n\n data_len,\n\n );\n", "file_path": "src/volume/storage/sqlite/sqlite.rs", "rank": 25, "score": 202712.6112065087 }, { "content": "// read one data piece and calculate its hash\n\nfn piece_hash<R: Read + Seek>(offset: usize, rdr: &mut R) -> IoResult<Hash> {\n\n rdr.seek(SeekFrom::Start(align_piece_floor(offset) as u64))?;\n\n let mut buf = vec![0u8; PIECE_SIZE];\n\n let mut pos = 0;\n\n let mut state = Crypto::hash_init();\n\n\n\n loop {\n\n let read = rdr.read(&mut buf[pos..])?;\n\n if read == 0 {\n\n break;\n\n }\n\n Crypto::hash_update(&mut state, &buf[pos..pos + read]);\n\n pos += read;\n\n }\n\n\n\n Ok(Crypto::hash_final(&mut state))\n\n}\n\n\n", "file_path": "src/content/merkle_tree.rs", "rank": 26, "score": 202117.77279493003 }, { "content": "// parse storage part in uri\n\nfn parse_uri(uri: &str) -> Result<Box<dyn Storable>> {\n\n if !uri.is_ascii() {\n\n return Err(Error::InvalidUri);\n\n }\n\n\n\n // extract storage string\n\n let idx = uri.find(\"://\").ok_or(Error::InvalidUri)?;\n\n let part = &uri[..idx];\n\n\n\n match part {\n\n \"mem\" => {\n\n #[cfg(feature = \"storage-mem\")]\n\n {\n\n Ok(Box::new(super::mem::MemStorage::new()))\n\n }\n\n #[cfg(not(feature = \"storage-mem\"))]\n\n {\n\n Err(Error::InvalidUri)\n\n }\n\n }\n", "file_path": "src/volume/storage/storage.rs", "rank": 27, "score": 201827.09588256414 }, { "content": "// make sector relative path from its index\n\nfn sector_rel_path(sec_idx: usize, hash_key: &HashKey) -> PathBuf {\n\n let buf = (sec_idx as u64).to_le_bytes();\n\n Path::new(BASE_DIR)\n\n .join(Crypto::hash_with_key(&buf, hash_key).to_rel_path())\n\n}\n\n\n\n/// Sector recycle map\n", "file_path": "src/volume/storage/zbox/sector.rs", "rank": 28, "score": 197680.3266679347 }, { "content": "/// Entity Id trait\n\npub trait Id {\n\n fn id(&self) -> &Eid;\n\n fn id_mut(&mut self) -> &mut Eid;\n\n}\n", "file_path": "src/trans/eid.rs", "rank": 29, "score": 195902.6626924799 }, { "content": "pub fn random_u32(upper_bound: u32) -> u32 {\n\n unsafe { randombytes_uniform(upper_bound) }\n\n}\n\n\n", "file_path": "tests/common/crypto.rs", "rank": 30, "score": 193043.09010024925 }, { "content": "fn smoke_test(uri: String) {\n\n init_env();\n\n\n\n // initialise repo and write file\n\n {\n\n let mut repo =\n\n RepoOpener::new().create(true).open(&uri, \"pwd\").unwrap();\n\n\n\n let mut file = OpenOptions::new()\n\n .create(true)\n\n .open(&mut repo, \"/my_file.txt\")\n\n .unwrap();\n\n\n\n file.write_once(b\"Hello, World!\").unwrap();\n\n\n\n // read file content using std::io::Read trait\n\n let mut content = String::new();\n\n file.seek(SeekFrom::Start(0)).unwrap();\n\n file.read_to_string(&mut content).unwrap();\n\n assert_eq!(content, \"Hello, World!\");\n", "file_path": "tests/repo.rs", "rank": 31, "score": 192686.91925254746 }, { "content": "fn test_mem_perf(data: &[u8]) {\n\n println!(\"---------------------------------------------\");\n\n println!(\"Memory storage performance test (no compress)\");\n\n println!(\"---------------------------------------------\");\n\n let mut repo = RepoOpener::new()\n\n .create(true)\n\n .open(\"mem://perf\", \"pwd\")\n\n .unwrap();\n\n let mut files = make_files(&mut repo);\n\n test_perf(&mut repo, &mut files, data);\n\n\n\n println!(\"---------------------------------------------\");\n\n println!(\"Memory storage performance test (compress)\");\n\n println!(\"---------------------------------------------\");\n\n let mut repo = RepoOpener::new()\n\n .create(true)\n\n .compress(true)\n\n .open(\"mem://perf2\", \"pwd\")\n\n .unwrap();\n\n let mut files = make_files(&mut repo);\n\n test_perf(&mut repo, &mut files, data);\n\n}\n\n\n", "file_path": "tests/perf.rs", "rank": 32, "score": 188263.21042576816 }, { "content": "fn make_test_data() -> Vec<u8> {\n\n print!(\n\n \"\\nMaking {} MB pseudo random test data...\",\n\n DATA_LEN / 1024 / 1024\n\n );\n\n io::stdout().flush().unwrap();\n\n let mut buf = vec![0u8; DATA_LEN];\n\n let mut rng = XorShiftRng::from_seed([42u8; 16]);\n\n rng.fill_bytes(&mut buf);\n\n println!(\"done\\n\");\n\n buf\n\n}\n\n\n", "file_path": "tests/perf.rs", "rank": 33, "score": 188263.21042576816 }, { "content": "/// Transable trait, be able to be added in transaction\n\npub trait Transable: Debug + Id + Send + Sync {\n\n fn action(&self) -> Action;\n\n fn commit(&mut self, vol: &VolumeRef) -> Result<()>;\n\n fn complete_commit(&mut self);\n\n fn abort(&mut self);\n\n}\n\n\n\npub type TransableRef = Arc<RwLock<dyn Transable>>;\n\n\n\n/// Transaction\n\npub struct Trans {\n\n txid: Txid,\n\n cohorts: LinkedHashMap<Eid, TransableRef>,\n\n wal: Wal,\n\n wal_armor: VolumeWalArmor<Wal>,\n\n wal_saved: bool,\n\n}\n\n\n\nimpl Trans {\n\n pub fn new(txid: Txid, vol: &VolumeRef) -> Self {\n", "file_path": "src/trans/trans.rs", "rank": 34, "score": 186367.7803754248 }, { "content": "pub fn check_error(code: LZ4FErrorCode) -> IoResult<usize> {\n\n unsafe {\n\n if LZ4F_isError(code) != 0 {\n\n let error_name = LZ4F_getErrorName(code);\n\n return Err(IoError::new(\n\n ErrorKind::Other,\n\n LZ4Error(\n\n str::from_utf8(CStr::from_ptr(error_name).to_bytes())\n\n .unwrap()\n\n .to_string(),\n\n ),\n\n ));\n\n }\n\n }\n\n Ok(code as usize)\n\n}\n\n\n\n/* =============================================\n\n * Encoder\n\n * ============================================= */\n", "file_path": "src/base/lz4.rs", "rank": 35, "score": 178643.9991523354 }, { "content": "#[inline]\n\npub fn align_ceil(x: usize, size: usize) -> usize {\n\n if x == 0 {\n\n return size;\n\n }\n\n x + (-(x as isize) & (size as isize - 1)) as usize\n\n}\n\n\n\n/// Align u64 integer to ceil, size must be 2^n integer\n\n/// Note: when x is on size boundary, it will align to next ceil\n", "file_path": "src/base/utils.rs", "rank": 36, "score": 172015.82762476194 }, { "content": "#[inline]\n\npub fn align_offset(x: usize, size: usize) -> usize {\n\n x & (size - 1)\n\n}\n\n\n\n/// Align usize to floor, size must be 2^n integer\n", "file_path": "src/base/utils.rs", "rank": 37, "score": 172015.82762476194 }, { "content": "#[allow(dead_code)]\n\n#[inline]\n\npub fn align_floor(x: usize, size: usize) -> usize {\n\n x - (x & (size - 1))\n\n}\n\n\n\n/// Align u64 to floor, size must be 2^n integer\n", "file_path": "src/base/utils.rs", "rank": 38, "score": 172015.66632848416 }, { "content": "// fuzz tester trait\n\npub trait Testable: Debug + Send + Sync {\n\n fn test_round(\n\n &self,\n\n fuzzer: &mut Fuzzer,\n\n step: &Step,\n\n ctlgrp: &mut ControlGroup,\n\n );\n\n}\n\n\n\n// fuzzer\n\n#[derive(Debug)]\n\npub struct Fuzzer {\n\n pub batch: String,\n\n pub path: PathBuf,\n\n pub uri: String,\n\n pub repo_handle: RepoHandle,\n\n pub seed: crypto::RandomSeed,\n\n pub ctlr: Controller,\n\n pub init_rounds: usize,\n\n pub data: Vec<u8>,\n", "file_path": "tests/common/fuzzer.rs", "rank": 39, "score": 170642.08024574377 }, { "content": "// reset and clean up statement\n\nfn reset_stmt(stmt: *mut ffi::sqlite3_stmt) -> Result<()> {\n\n let result = unsafe { ffi::sqlite3_reset(stmt) };\n\n check_result(result)?;\n\n let result = unsafe { ffi::sqlite3_clear_bindings(stmt) };\n\n check_result(result)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/volume/storage/sqlite/sqlite.rs", "rank": 40, "score": 168834.11118183215 }, { "content": "// run DML statement, such as INSERT and DELETE\n\nfn run_dml(stmt: *mut ffi::sqlite3_stmt) -> Result<()> {\n\n let result = unsafe { ffi::sqlite3_step(stmt) };\n\n match result {\n\n ffi::SQLITE_DONE => Ok(()),\n\n _ => Err(Error::from(ffi::Error::new(result))),\n\n }\n\n}\n\n\n", "file_path": "src/volume/storage/sqlite/sqlite.rs", "rank": 41, "score": 168834.11118183215 }, { "content": "#[inline]\n\npub fn align_ceil_chunk(x: usize, size: usize) -> usize {\n\n align_ceil(x, size) / size\n\n}\n\n\n\n/// Output human friendly speed string\n", "file_path": "src/base/utils.rs", "rank": 42, "score": 168794.88473341003 }, { "content": "#[allow(dead_code)]\n\n#[inline]\n\npub fn align_floor_chunk(x: usize, size: usize) -> usize {\n\n align_floor(x, size) / size\n\n}\n\n\n\n/// Align usize integer to ceil, size must be 2^n integer\n", "file_path": "src/base/utils.rs", "rank": 43, "score": 168794.72343713226 }, { "content": "#[test]\n\nfn dir_read() {\n\n let mut env = common::TestEnv::new();\n\n let repo = &mut env.repo;\n\n\n\n repo.create_dir_all(\"/aaa/aaa1/aaa11\").unwrap();\n\n repo.create_dir_all(\"/aaa/aaa1/aaa12\").unwrap();\n\n repo.create_dir_all(\"/aaa/aaa2/\").unwrap();\n\n repo.create_dir(\"/aaa/aaa2/xxx\").unwrap();\n\n repo.create_dir_all(\"/bbb/bbb1\").unwrap();\n\n repo.create_dir(\"/bbb/xxx\").unwrap();\n\n repo.create_dir_all(\"/ccc\").unwrap();\n\n\n\n let dirs = repo.read_dir(\"/\").unwrap();\n\n assert_eq!(dirs.len(), 3);\n\n let dirs = repo.read_dir(\"/aaa\").unwrap();\n\n assert_eq!(dirs.len(), 2);\n\n let dirs = repo.read_dir(\"/bbb\").unwrap();\n\n assert_eq!(dirs.len(), 2);\n\n let dirs = repo.read_dir(\"/ccc\").unwrap();\n\n assert_eq!(dirs.len(), 0);\n\n}\n\n\n", "file_path": "tests/dir.rs", "rank": 44, "score": 168749.44175702689 }, { "content": "fn print_result(\n\n read_time: &Duration,\n\n write_time: &Duration,\n\n tx_time: &Duration,\n\n) {\n\n println!(\n\n \"read: {}, write: {}, tps: {}\",\n\n speed_str(&read_time),\n\n speed_str(&write_time),\n\n tps_str(&tx_time),\n\n );\n\n}\n\n\n", "file_path": "tests/perf.rs", "rank": 45, "score": 168683.3427916657 }, { "content": "pub fn init(max_level: Level) -> Result<(), SetLoggerError> {\n\n let logger = WasmLogger {};\n\n log::set_boxed_logger(Box::new(logger)).and_then(|_| {\n\n log::set_max_level(max_level.to_level_filter());\n\n Ok(())\n\n })\n\n}\n", "file_path": "src/base/wasm_logger.rs", "rank": 46, "score": 167901.23316436674 }, { "content": "#[inline]\n\nfn create_ok_response() -> Result<Response> {\n\n create_response(StatusCode::OK, Vec::new())\n\n}\n\n\n", "file_path": "src/volume/storage/zbox/transport/faulty.rs", "rank": 47, "score": 162501.93491057627 }, { "content": "fn make_files(repo: &mut Repo) -> Vec<File> {\n\n let mut files: Vec<File> = Vec::new();\n\n for i in 0..ROUND {\n\n let filename = format!(\"/file_{}\", i);\n\n let file = OpenOptions::new()\n\n .create(true)\n\n .open(repo, filename)\n\n .unwrap();\n\n files.push(file);\n\n }\n\n files\n\n}\n\n\n", "file_path": "tests/perf.rs", "rank": 48, "score": 160190.5846724758 }, { "content": "#[test]\n\nfn file_read_write_mt() {\n\n let env_ref = Arc::new(RwLock::new(common::TestEnv::new()));\n\n let worker_cnt = 4;\n\n let task_cnt = 8;\n\n\n\n // concurrent write to different files\n\n let mut workers = Vec::new();\n\n for i in 0..worker_cnt {\n\n let env = env_ref.clone();\n\n workers.push(thread::spawn(move || {\n\n let base = i * task_cnt;\n\n for j in base..base + task_cnt {\n\n let path = format!(\"/{}\", j);\n\n let buf = [j; 3];\n\n let mut env = env.write().unwrap();\n\n let mut f = OpenOptions::new()\n\n .create(true)\n\n .open(&mut env.repo, &path)\n\n .unwrap();\n\n f.write_once(&buf[..]).unwrap();\n", "file_path": "tests/file.rs", "rank": 49, "score": 158916.26639556533 }, { "content": "#[test]\n\nfn file_read_write_st() {\n\n let mut env = common::TestEnv::new();\n\n let mut repo = &mut env.repo;\n\n\n\n let buf = [1u8, 2u8, 3u8];\n\n let buf2 = [4u8, 5u8, 6u8, 7u8, 8u8];\n\n let mut buf3 = Vec::new();\n\n buf3.extend_from_slice(&buf);\n\n buf3.extend_from_slice(&buf2);\n\n\n\n // #1, create and write a new file\n\n {\n\n let mut f = OpenOptions::new()\n\n .create(true)\n\n .open(&mut repo, \"/file\")\n\n .unwrap();\n\n f.write_once(&buf[..]).unwrap();\n\n verify_content(&mut f, &buf);\n\n\n\n // use repo file creation shortcut\n", "file_path": "tests/file.rs", "rank": 50, "score": 158916.26639556533 }, { "content": "// make a response object from status code and body\n\nfn create_response(status: i32, body: Vec<u8>) -> Result<Response> {\n\n let mut builder = HttpResponse::builder();\n\n builder.status(status as u16);\n\n let rdr = Cursor::new(body);\n\n let ret = Response::new(builder.body(Box::new(rdr) as Box<dyn Read>)?);\n\n Ok(ret)\n\n}\n\n\n\n// transport using jni http layer\n\npub struct JniTransport {\n\n jvm: JavaVM,\n\n}\n\n\n\nimpl JniTransport {\n\n pub fn new(timeout: u32) -> Result<Self> {\n\n let jvm = unsafe {\n\n let jvm = JVM.lock().unwrap();\n\n JavaVM::from_raw(jvm.get_java_vm_pointer())?\n\n };\n\n let ret = JniTransport { jvm };\n", "file_path": "src/volume/storage/zbox/transport/jni.rs", "rank": 51, "score": 156332.74582858558 }, { "content": "// permutation\n\n// item in permutation sequence:\n\n// (span in random data buffer, position in data buffer)\n\ntype Permu = Vec<(Span, usize)>;\n\n\n\n// repository handle\n\n#[derive(Debug)]\n\npub struct RepoHandle {\n\n pub repo: Repo,\n\n}\n\n\n\nimpl RepoHandle {\n\n fn new(repo: Repo) -> Self {\n\n RepoHandle { repo }\n\n }\n\n}\n\n\n", "file_path": "tests/common/fuzzer.rs", "rank": 52, "score": 154457.42832462175 }, { "content": "fn create_response(status: StatusCode, body: Vec<u8>) -> Result<Response> {\n\n let mut builder = HttpResponse::builder();\n\n builder.status(status);\n\n let body = Cursor::new(body);\n\n let ret = Response::new(builder.body(Box::new(body) as Box<dyn Read>)?);\n\n Ok(ret)\n\n}\n\n\n", "file_path": "src/volume/storage/zbox/transport/faulty.rs", "rank": 53, "score": 153713.38063229035 }, { "content": "// check result code returned by sqlite\n\nfn check_result(result: c_int) -> Result<()> {\n\n if result != ffi::SQLITE_OK {\n\n let err = ffi::Error::new(result);\n\n return Err(Error::from(err));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/volume/storage/sqlite/sqlite.rs", "rank": 54, "score": 153654.46337426465 }, { "content": "/// Wrap type into reference type Arc<RwLock<T>>\n\npub trait IntoRef: Sized {\n\n fn into_ref(self) -> Arc<RwLock<Self>> {\n\n Arc::new(RwLock::new(self))\n\n }\n\n}\n", "file_path": "src/base/mod.rs", "rank": 55, "score": 146809.93947198166 }, { "content": "#[derive(Default, Clone, Deserialize, Serialize)]\n\nstruct WalQueue {\n\n id: Eid,\n\n seq: u64,\n\n arm: Arm,\n\n\n\n // txid and block watermark\n\n txid_wmark: u64,\n\n blk_wmark: usize,\n\n\n\n // completed tx queue\n\n done: VecDeque<Txid>,\n\n\n\n // in-progress tx id list\n\n doing: HashSet<Txid>,\n\n\n\n #[serde(skip_serializing, skip_deserializing, default)]\n\n aborting: HashMap<Txid, Wal>,\n\n\n\n #[serde(skip_serializing, skip_deserializing, default)]\n\n wal_armor: VolumeWalArmor<Wal>,\n", "file_path": "src/trans/wal.rs", "rank": 56, "score": 139596.0584349816 }, { "content": "#[test]\n\nfn perf_test() {\n\n init_env();\n\n\n\n let mut dir = env::temp_dir();\n\n dir.push(\"zbox_perf_test\");\n\n if dir.exists() {\n\n fs::remove_dir_all(&dir).unwrap();\n\n }\n\n fs::create_dir(&dir).unwrap();\n\n\n\n let data = make_test_data();\n\n test_baseline(&data, &dir);\n\n test_mem_perf(&data);\n\n test_file_perf(&data, &dir);\n\n\n\n fs::remove_dir_all(&dir).unwrap();\n\n}\n", "file_path": "tests/perf.rs", "rank": 57, "score": 136341.75384477433 }, { "content": "#[test]\n\nfn fuzz_test() {\n\n // increase below numbers to perform intensive fuzz test\n\n let batches = 1; // number of fuzz test batches\n\n let init_rounds = 100; // initial rounds without random errors\n\n let rounds = 50; // number of rounds in one batch\n\n let worker_cnt = 2; // worker thread count\n\n\n\n for _ in 0..batches {\n\n let tester = Tester {};\n\n let fuzzer = Fuzzer::new(init_rounds).into_ref();\n\n Fuzzer::run(fuzzer, tester.into_ref(), rounds, worker_cnt);\n\n }\n\n}\n\n\n\n// enable this to reproduce the failed fuzz test case\n", "file_path": "tests/fuzz.rs", "rank": 58, "score": 136341.75384477433 }, { "content": "#[derive(Debug)]\n\nstruct Tester;\n\n\n\nimpl Tester {\n\n #[allow(dead_code)]\n\n fn into_ref(self) -> Arc<RwLock<Self>> {\n\n Arc::new(RwLock::new(self))\n\n }\n\n}\n\n\n\nimpl Testable for Tester {\n\n fn test_round(\n\n &self,\n\n fuzzer: &mut Fuzzer,\n\n step: &Step,\n\n ctlgrp: &mut ControlGroup,\n\n ) {\n\n let node = ctlgrp[step.node_idx].clone();\n\n //println!(\"===> node: {:?}, step: {:?}\", node, step);\n\n\n\n match step.action {\n", "file_path": "tests/fuzz.rs", "rank": 59, "score": 135322.03485425963 }, { "content": "#[test]\n\nfn repo_smoke_test() {\n\n let tmpdir = TempDir::new(\"zbox_test\").expect(\"Create temp dir failed\");\n\n let uri;\n\n\n\n #[cfg(feature = \"storage-file\")]\n\n {\n\n let base = \"file://\".to_string() + tmpdir.path().to_str().unwrap();\n\n uri = base + \"/repo\";\n\n }\n\n\n\n #[cfg(feature = \"storage-sqlite\")]\n\n {\n\n let file = tmpdir.path().join(\"zbox.db\");\n\n uri = \"sqlite://\".to_string() + file.to_str().unwrap();\n\n }\n\n\n\n #[cfg(feature = \"storage-redis\")]\n\n {\n\n let _ = tmpdir;\n\n uri = \"redis://localhost:6379\".to_string();\n\n }\n\n\n\n smoke_test(uri);\n\n}\n", "file_path": "tests/repo.rs", "rank": 60, "score": 133086.08048156375 }, { "content": "#[test]\n\n#[ignore]\n\nfn fuzz_test_rerun() {\n\n let tester = Tester {};\n\n // copy batch number from output and replace it below\n\n Fuzzer::rerun(\"1551267522\", Box::new(tester));\n\n}\n", "file_path": "tests/fuzz.rs", "rank": 61, "score": 133086.0246646944 }, { "content": "// open a regular file with options\n\nfn open_file_with_options<P: AsRef<Path>>(\n\n fs: &mut Fs,\n\n path: P,\n\n open_opts: &OpenOptions,\n\n) -> Result<File> {\n\n if fs.is_read_only()\n\n && (open_opts.write\n\n || open_opts.append\n\n || open_opts.truncate\n\n || open_opts.create\n\n || open_opts.create_new)\n\n {\n\n return Err(Error::ReadOnly);\n\n }\n\n\n\n let path = path.as_ref();\n\n\n\n match fs.resolve(path) {\n\n Ok(_) => {\n\n if open_opts.create_new {\n", "file_path": "src/repo.rs", "rank": 62, "score": 133043.21219642053 }, { "content": "#[test]\n\nfn file_truncate() {\n\n let mut env = common::TestEnv::new();\n\n let mut repo = &mut env.repo;\n\n\n\n let buf = [1u8, 2u8, 3u8];\n\n\n\n // write\n\n {\n\n let mut f = OpenOptions::new()\n\n .create(true)\n\n .open(&mut repo, \"/file\")\n\n .unwrap();\n\n f.write_once(&buf[..]).unwrap();\n\n }\n\n\n\n // open file in truncate mode\n\n {\n\n let mut f = OpenOptions::new()\n\n .truncate(true)\n\n .open(&mut repo, \"/file\")\n", "file_path": "tests/file.rs", "rank": 63, "score": 125784.94924170927 }, { "content": "#[test]\n\nfn file_rename() {\n\n let mut env = common::TestEnv::new();\n\n let repo = &mut env.repo;\n\n\n\n // #1, rename non-existing file\n\n {\n\n assert_eq!(\n\n repo.rename(\"/non-existing\", \"/foo\").unwrap_err(),\n\n Error::NotFound\n\n );\n\n }\n\n\n\n // #2, rename existing file to non-existing file\n\n {\n\n repo.create_file(\"/file2\").unwrap();\n\n repo.rename(\"/file2\", \"/file2a\").unwrap();\n\n\n\n assert!(!repo.path_exists(\"/file2\").unwrap());\n\n assert!(repo.path_exists(\"/file2a\").unwrap());\n\n let dirs = repo.read_dir(\"/\").unwrap();\n", "file_path": "tests/file.rs", "rank": 64, "score": 125784.94924170927 }, { "content": "#[test]\n\nfn file_seek() {\n\n let mut env = common::TestEnv::new();\n\n let mut repo = &mut env.repo;\n\n\n\n let buf = [1u8, 2u8, 3u8];\n\n\n\n // write\n\n {\n\n let mut f = OpenOptions::new()\n\n .create(true)\n\n .open(&mut repo, \"/file\")\n\n .unwrap();\n\n f.write_once(&buf[..]).unwrap();\n\n }\n\n\n\n // #1: seek and read\n\n {\n\n let mut f = repo.open_file(\"/file\").unwrap();\n\n\n\n // seek from start\n", "file_path": "tests/file.rs", "rank": 65, "score": 125784.94924170927 }, { "content": "#[test]\n\nfn dir_remove() {\n\n let mut env = common::TestEnv::new();\n\n let repo = &mut env.repo;\n\n\n\n repo.create_dir_all(\"/aaa/bbb/ccc\").unwrap();\n\n repo.create_dir_all(\"/aaa/bbb/ddd\").unwrap();\n\n assert!(repo.remove_dir(\"/aaa\").is_err());\n\n assert!(repo.remove_dir(\"/aaa/bbb\").is_err());\n\n repo.remove_dir(\"/aaa/bbb/ccc\").unwrap();\n\n assert!(repo.remove_dir(\"/not_exist\").is_err());\n\n repo.remove_dir_all(\"/aaa\").unwrap();\n\n assert!(repo.remove_dir(\"/aaa\").is_err());\n\n assert!(repo.remove_dir(\"/\").is_err());\n\n}\n\n\n", "file_path": "tests/dir.rs", "rank": 66, "score": 125784.94924170927 }, { "content": "#[test]\n\nfn dir_rename() {\n\n let mut env = common::TestEnv::new();\n\n let repo = &mut env.repo;\n\n\n\n assert!(repo.rename(\"/\", \"/xxx\").is_err());\n\n assert!(repo.rename(\"/not_exist\", \"/xxx\").is_err());\n\n\n\n repo.create_dir_all(\"/aaa/bbb/ccc\").unwrap();\n\n repo.rename(\"/aaa/bbb/ccc\", \"/aaa/ddd\").unwrap();\n\n let dirs = repo.read_dir(\"/aaa/ddd\").unwrap();\n\n assert_eq!(dirs.len(), 0);\n\n let dirs = repo.read_dir(\"/aaa\").unwrap();\n\n assert_eq!(dirs.len(), 2);\n\n\n\n repo.create_dir_all(\"/3/8\").unwrap();\n\n repo.rename(\"/3/8\", \"/3/14\").unwrap();\n\n let dirs = repo.read_dir(\"/3\").unwrap();\n\n assert_eq!(dirs.len(), 1);\n\n assert_eq!(dirs[0].path().to_str().unwrap(), \"/3/14\");\n\n\n", "file_path": "tests/dir.rs", "rank": 67, "score": 125784.94924170927 }, { "content": "#[test]\n\nfn file_delete() {\n\n let mut env = common::TestEnv::new();\n\n let mut repo = &mut env.repo;\n\n\n\n // create empty file then delete\n\n {\n\n OpenOptions::new()\n\n .create(true)\n\n .open(&mut repo, \"/file\")\n\n .unwrap();\n\n repo.remove_file(\"/file\").unwrap();\n\n }\n\n\n\n // write to file then delete\n\n {\n\n // write #1\n\n {\n\n let mut f = OpenOptions::new()\n\n .create(true)\n\n .open(&mut repo, \"/file\")\n", "file_path": "tests/file.rs", "rank": 68, "score": 125784.94924170927 }, { "content": "#[test]\n\nfn file_copy() {\n\n let mut env = common::TestEnv::new();\n\n let mut repo = &mut env.repo;\n\n\n\n let buf = [1u8, 2u8, 3u8];\n\n let buf2 = [4u8, 5u8, 6u8];\n\n\n\n {\n\n let mut f = OpenOptions::new()\n\n .create(true)\n\n .open(&mut repo, \"/file\")\n\n .unwrap();\n\n f.write_once(&buf[..]).unwrap();\n\n }\n\n\n\n // #1, copy to non-existing file\n\n repo.copy(\"/file\", \"/file2\").unwrap();\n\n\n\n // #2, copy to existing file\n\n repo.copy(\"/file\", \"/file2\").unwrap();\n", "file_path": "tests/file.rs", "rank": 69, "score": 125784.94924170927 }, { "content": "#[test]\n\nfn file_shrink() {\n\n let mut env = common::TestEnv::new();\n\n let mut repo = &mut env.repo;\n\n\n\n let mut rng = XorShiftRng::from_seed([42u8; 16]);\n\n let mut buf = vec![0; 16 * 1024 * 1024];\n\n rng.fill_bytes(&mut buf);\n\n\n\n let mut f = OpenOptions::new()\n\n .create(true)\n\n .version_limit(1)\n\n .open(&mut repo, \"/file\")\n\n .unwrap();\n\n f.write_once(&buf[..]).unwrap();\n\n\n\n // those operations will shrink the segment, turn on debug log\n\n // and watch the output\n\n f.set_len(3).unwrap();\n\n f.set_len(2).unwrap();\n\n f.set_len(1).unwrap();\n\n}\n\n\n", "file_path": "tests/file.rs", "rank": 70, "score": 125784.94924170927 }, { "content": "#[cfg(feature = \"storage-file\")]\n\n#[test]\n\nfn repo_oper() {\n\n init_env();\n\n\n\n let pwd = \"pwd\";\n\n let tmpdir = TempDir::new(\"zbox_test\").expect(\"Create temp dir failed\");\n\n let dir = tmpdir.path().to_path_buf();\n\n //let dir = std::path::PathBuf::from(\"./tt\");\n\n if dir.exists() {\n\n std::fs::remove_dir_all(&dir).unwrap();\n\n }\n\n let base = \"file://\".to_string() + dir.to_str().unwrap();\n\n\n\n // case #1: create a new repo with default options and then re-open it\n\n let path = base.clone() + \"/repo\";\n\n RepoOpener::new().create(true).open(&path, &pwd).unwrap();\n\n RepoOpener::new().open(&path, &pwd).unwrap();\n\n\n\n // case #2: create a new repo with custom options and then re-open it\n\n let path = base.clone() + \"/repo2\";\n\n RepoOpener::new()\n", "file_path": "tests/repo.rs", "rank": 71, "score": 125784.7327663294 }, { "content": "fn handle_rename(\n\n new_path: &Path,\n\n node: &Node,\n\n ctlgrp: &mut ControlGroup,\n\n repo: &mut Repo,\n\n) -> Result<()> {\n\n let mut new_path_exists = false;\n\n let mut new_path_is_dir = false;\n\n if let Some(nd) = ctlgrp.find_node(&new_path) {\n\n new_path_exists = true;\n\n new_path_is_dir = nd.is_dir();\n\n }\n\n let new_path_has_child = ctlgrp\n\n .0\n\n .iter()\n\n .filter(|n| n.path.starts_with(&new_path))\n\n .count()\n\n > 1;\n\n\n\n let result = repo.rename(&node.path, &new_path);\n", "file_path": "tests/fuzz.rs", "rank": 72, "score": 125779.61231384915 }, { "content": "/// Trait for entity can be wrapped in cow\n\npub trait Cowable: Debug + Default + Clone + Send + Sync {\n\n fn on_commit(&mut self, _vol: &VolumeRef) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n fn on_complete_commit(&mut self) {}\n\n}\n\n\n\n/// Copy-on-write wrapper\n\n#[derive(Default, Deserialize, Serialize)]\n\npub struct Cow<T: Cowable> {\n\n id: Eid,\n\n seq: u64,\n\n arm: Arm,\n\n left: Option<T>,\n\n right: Option<T>,\n\n\n\n #[serde(skip_serializing, skip_deserializing, default)]\n\n txid: Option<Txid>,\n\n #[serde(skip_serializing, skip_deserializing, default)]\n", "file_path": "src/trans/cow.rs", "rank": 73, "score": 123273.86632473019 }, { "content": "#[test]\n\nfn file_content_dedup() {\n\n let mut env = common::TestEnv::new();\n\n let mut repo = &mut env.repo;\n\n\n\n let buf = [42u8; 16];\n\n\n\n {\n\n let mut f = OpenOptions::new()\n\n .create(true)\n\n .version_limit(1)\n\n .open(&mut repo, \"/file\")\n\n .unwrap();\n\n let mut f2 = OpenOptions::new()\n\n .create(true)\n\n .version_limit(1)\n\n .dedup_chunk(false)\n\n .open(&mut repo, \"/file2\")\n\n .unwrap();\n\n let mut f3 = OpenOptions::new()\n\n .create(true)\n", "file_path": "tests/file.rs", "rank": 74, "score": 122218.69534192915 }, { "content": "#[test]\n\nfn dir_create_mt() {\n\n let env = Arc::new(RwLock::new(common::TestEnv::new()));\n\n let worker_cnt = 4;\n\n let task_cnt = 8;\n\n\n\n let mut workers = Vec::new();\n\n for i in 0..worker_cnt {\n\n let env = env.clone();\n\n workers.push(thread::spawn(move || {\n\n let base = i * task_cnt;\n\n for j in base..base + task_cnt {\n\n let path = format!(\"/mt/{}\", j);\n\n let mut env = env.write().unwrap();\n\n env.repo.create_dir_all(&path).unwrap();\n\n }\n\n }));\n\n }\n\n for w in workers {\n\n w.join().unwrap();\n\n }\n\n\n\n // verify\n\n let env = env.read().unwrap();\n\n let dirs = env.repo.read_dir(\"/mt\").unwrap();\n\n assert_eq!(dirs.len(), worker_cnt * task_cnt);\n\n}\n\n\n", "file_path": "tests/dir.rs", "rank": 75, "score": 122218.69534192915 }, { "content": "#[test]\n\nfn dir_create_st() {\n\n let mut env = common::TestEnv::new();\n\n let repo = &mut env.repo;\n\n\n\n // #1: basic test\n\n repo.create_dir(\"/dir\").unwrap();\n\n assert!(repo.create_dir(\"/dir\").is_err());\n\n assert!(repo.create_dir(\"/xxx/yyy\").is_err());\n\n repo.create_dir(\"/dir2\").unwrap();\n\n repo.create_dir(\"/dir3\").unwrap();\n\n assert!(repo.is_dir(\"/dir\").unwrap());\n\n assert!(repo.is_dir(\"/dir2\").unwrap());\n\n assert!(repo.is_dir(\"/dir3\").unwrap());\n\n\n\n // #2: test create_dir_all\n\n repo.create_dir_all(\"/xxx/yyy\").unwrap();\n\n repo.create_dir_all(\"/xxx/111/222\").unwrap();\n\n\n\n // #3: check dir modify time\n\n let m = repo.metadata(\"/xxx/111/222\").unwrap();\n\n thread::sleep(time::Duration::from_millis(1500));\n\n repo.create_dir_all(\"/xxx/111/222/333\").unwrap();\n\n let m2 = repo.metadata(\"/xxx/111/222\").unwrap();\n\n assert!(m2.modified_at() > m.modified_at());\n\n}\n\n\n", "file_path": "tests/dir.rs", "rank": 76, "score": 122218.69534192915 }, { "content": "#[test]\n\nfn file_open_close() {\n\n let mut env = common::TestEnv::new();\n\n let mut repo = &mut env.repo;\n\n\n\n let f = OpenOptions::new()\n\n .create(true)\n\n .open(&mut repo, \"/file\")\n\n .unwrap();\n\n assert!(f.metadata().unwrap().is_file());\n\n assert!(repo.path_exists(\"/file\").unwrap());\n\n assert!(repo.is_file(\"/file\").unwrap());\n\n}\n\n\n", "file_path": "tests/file.rs", "rank": 77, "score": 122218.69534192915 }, { "content": "#[derive(Clone)]\n\nstruct Headers {\n\n map: HeaderMap,\n\n}\n\n\n\nimpl Headers {\n\n fn new() -> Self {\n\n let mut map = HeaderMap::new();\n\n let ver = Version::lib_version().to_string();\n\n\n\n // add zbox version header\n\n let version_header = HeaderName::from_static(\"zbox-version\");\n\n let version_value = HeaderValue::from_str(&ver).unwrap();\n\n map.insert(version_header, version_value);\n\n\n\n // set headers for non-browser request only, because some browsers will\n\n // not allow us to do that\n\n #[cfg(not(target_arch = \"wasm32\"))]\n\n {\n\n map.insert(\n\n header::USER_AGENT,\n", "file_path": "src/volume/storage/zbox/http_client.rs", "rank": 78, "score": 122060.8094587991 }, { "content": "/// Finish trait, used with writer which implements std::io::Write trait\n\npub trait Finish: Write {\n\n fn finish(self) -> Result<()>;\n\n}\n", "file_path": "src/trans/mod.rs", "rank": 79, "score": 119952.81723113489 }, { "content": "struct SafeBoxVisitor<T> {\n\n _marker: PhantomData<T>,\n\n}\n\n\n\nimpl<T> SafeBoxVisitor<T> {\n\n fn new() -> Self {\n\n SafeBoxVisitor {\n\n _marker: PhantomData::<T>,\n\n }\n\n }\n\n}\n\n\n\nimpl<'de, T> de::Visitor<'de> for SafeBoxVisitor<T> {\n\n type Value = SafeBox<T>;\n\n\n\n fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {\n\n write!(formatter, \"bytes array with length {}\", mem::size_of::<T>())\n\n }\n\n\n\n fn visit_bytes<E>(self, value: &[u8]) -> StdResult<Self::Value, E>\n", "file_path": "src/base/crypto.rs", "rank": 80, "score": 119353.08678426905 }, { "content": "// convert reqwest response to response\n\nfn create_response(resp: NativeResponse) -> Result<Response> {\n\n let mut builder = HttpResponse::builder();\n\n builder.status(resp.status()).version(resp.version());\n\n for (name, value) in resp.headers() {\n\n builder.header(name, value);\n\n }\n\n let ret = Response::new(builder.body(Box::new(resp) as Box<dyn Read>)?);\n\n Ok(ret)\n\n}\n\n\n\n// transport using native http layer\n\npub struct NativeTransport {\n\n client: Client,\n\n}\n\n\n\nimpl NativeTransport {\n\n pub fn new(timeout: u32) -> Result<Self> {\n\n let client = Client::builder()\n\n .timeout(Duration::from_secs(u64::from(timeout)))\n\n .build()?;\n", "file_path": "src/volume/storage/zbox/transport/native.rs", "rank": 81, "score": 117555.23610026037 }, { "content": "#[inline]\n\nfn align_piece_offset(n: usize) -> usize {\n\n utils::align_offset(n, PIECE_SIZE)\n\n}\n\n\n", "file_path": "src/content/merkle_tree.rs", "rank": 82, "score": 117295.42277307744 }, { "content": "#[inline]\n\nfn align_piece_floor(n: usize) -> usize {\n\n utils::align_floor(n, PIECE_SIZE)\n\n}\n\n\n", "file_path": "src/content/merkle_tree.rs", "rank": 83, "score": 117295.42277307744 }, { "content": "// bind EID parameter\n\nfn bind_id(\n\n stmt: *mut ffi::sqlite3_stmt,\n\n col_idx: c_int,\n\n id_str: &CStr,\n\n) -> Result<()> {\n\n let result = unsafe {\n\n ffi::sqlite3_bind_text(\n\n stmt,\n\n col_idx,\n\n id_str.as_ptr(),\n\n -1,\n\n ffi::SQLITE_STATIC(),\n\n )\n\n };\n\n check_result(result)\n\n}\n\n\n", "file_path": "src/volume/storage/sqlite/sqlite.rs", "rank": 84, "score": 116521.61617692967 }, { "content": "#[derive(Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct RepoExistsResp {\n\n result: bool,\n\n}\n\n\n\n// remote session open response\n", "file_path": "src/volume/storage/zbox/http_client.rs", "rank": 85, "score": 116377.54299224477 }, { "content": "#[derive(Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\nstruct SessionOpenResp {\n\n _status: String,\n\n session_token: String,\n\n update_seq: u64,\n\n ttl: u64,\n\n}\n\n\n\n// http headers\n", "file_path": "src/volume/storage/zbox/http_client.rs", "rank": 86, "score": 116377.54299224477 }, { "content": "/// Arm access trait\n\npub trait ArmAccess<'de>: Id + Seq + Deserialize<'de> + Serialize {\n\n fn arm(&self) -> Arm;\n\n fn arm_mut(&mut self) -> &mut Arm;\n\n}\n\n\n", "file_path": "src/volume/armor.rs", "rank": 87, "score": 116300.80543688757 }, { "content": "// encrypt/decrypt function type\n\ntype EncryptFn = unsafe extern \"C\" fn(\n\n c: *mut u8,\n\n clen_p: *const u64,\n\n m: *const u8,\n\n mlen: u64,\n\n ad: *const u8,\n\n adlen: u64,\n\n nsec: *const u8,\n\n npub: *const u8,\n\n k: *const u8,\n\n) -> i32;\n", "file_path": "src/base/crypto.rs", "rank": 88, "score": 115663.74733808245 }, { "content": "type DecryptFn = unsafe extern \"C\" fn(\n\n m: *mut u8,\n\n mlen_p: *const u64,\n\n nsec: *const u8,\n\n c: *const u8,\n\n clen: u64,\n\n ad: *const u8,\n\n adlen: u64,\n\n npub: *const u8,\n\n k: *const u8,\n\n) -> i32;\n\n\n\n/// Crypto\n\n#[derive(Debug, Clone)]\n\npub struct Crypto {\n\n pub cost: Cost,\n\n pub cipher: Cipher,\n\n enc_fn: EncryptFn, // encrypt function pointer\n\n dec_fn: DecryptFn, // decrypt function pointer\n\n}\n", "file_path": "src/base/crypto.rs", "rank": 89, "score": 115663.74733808245 }, { "content": "// get response from XHR\n\nfn create_response(xhr: XmlHttpRequest) -> Result<Response> {\n\n // check response status\n\n let ready_state = xhr.ready_state();\n\n let status = map_req_err!(xhr.status())?;\n\n if ready_state != READY_STATE_DONE {\n\n return Err(Error::RequestError);\n\n }\n\n\n\n let mut builder = HttpResponse::builder();\n\n\n\n // extract response status\n\n let status_code = map_req_err!(StatusCode::from_u16(status))?;\n\n builder.status(status_code);\n\n\n\n // extract response headers\n\n let headers_str = map_req_err!(xhr.get_all_response_headers())?;\n\n if !headers_str.is_empty() {\n\n headers_str.trim_end().split(\"\\r\\n\").for_each(|ent| {\n\n let ent: Vec<&str> = ent.split(\": \").collect();\n\n let name = HeaderName::from_lowercase(ent[0].as_bytes()).unwrap();\n", "file_path": "src/volume/storage/zbox/transport/wasm.rs", "rank": 90, "score": 115509.49168723298 }, { "content": "#[inline]\n\nfn align_piece_floor_chunk(n: usize) -> usize {\n\n utils::align_floor_chunk(n, PIECE_SIZE)\n\n}\n\n\n", "file_path": "src/content/merkle_tree.rs", "rank": 91, "score": 114838.14322701024 }, { "content": "#[inline]\n\nfn align_piece_ceil_chunk(n: usize) -> usize {\n\n utils::align_ceil_chunk(n, PIECE_SIZE)\n\n}\n\n\n", "file_path": "src/content/merkle_tree.rs", "rank": 92, "score": 114838.14322701024 }, { "content": "type Nonce = [u8; AES_NONCE_SIZE];\n\n\n", "file_path": "src/base/crypto.rs", "rank": 93, "score": 114688.99833101474 }, { "content": "// calculate total number of tree nodes, including leaf nodes\n\nfn tree_node_cnt(leaf_cnt: usize) -> usize {\n\n let mut s = 1;\n\n let mut n = leaf_cnt;\n\n while n > 1 {\n\n s += n;\n\n n = (n + 1) / 2;\n\n }\n\n s\n\n}\n\n\n\n#[derive(Debug, Default, Clone)]\n\npub struct Leaves {\n\n offset: usize,\n\n len: usize,\n\n nodes: Vec<Hash>,\n\n}\n\n\n\nimpl Leaves {\n\n #[inline]\n\n pub fn new() -> Self {\n", "file_path": "src/content/merkle_tree.rs", "rank": 94, "score": 112501.05097868119 }, { "content": "// get parent node index\n\nfn parent(n: usize, lvl_begin: usize, lvl_node_cnt: usize) -> usize {\n\n if lvl_node_cnt == 0 {\n\n return 0;\n\n }\n\n let upper_lvl_node_cnt = (lvl_node_cnt + 1) / 2;\n\n let upper_lvl_begin = lvl_begin - upper_lvl_node_cnt;\n\n upper_lvl_begin + (n - lvl_begin) / 2\n\n}\n\n\n", "file_path": "src/content/merkle_tree.rs", "rank": 95, "score": 112354.43215231835 }, { "content": "/// Transport trait\n\npub trait Transport: Send + Sync {\n\n // HTTP GET request\n\n fn get(&self, uri: &Uri, headers: &HeaderMap) -> Result<Response>;\n\n\n\n // HTTP PUT request\n\n fn put(\n\n &mut self,\n\n uri: &Uri,\n\n headers: &HeaderMap,\n\n body: &[u8],\n\n ) -> Result<Response>;\n\n\n\n // HTTP DELETE request\n\n fn delete(&mut self, uri: &Uri, headers: &HeaderMap) -> Result<Response>;\n\n\n\n // HTTP bulk DELETE request\n\n fn delete_bulk(\n\n &mut self,\n\n uri: &Uri,\n\n headers: &HeaderMap,\n", "file_path": "src/volume/storage/zbox/transport/mod.rs", "rank": 96, "score": 106757.18824417153 }, { "content": "#[derive(Debug)]\n\npub struct TestEnv {\n\n pub repo: Repo,\n\n pub tmpdir: Option<TempDir>,\n\n}\n\n\n\ncfg_if! {\n\n if #[cfg(feature = \"storage-file\")] {\n\n impl TestEnv {\n\n pub fn new() -> Self {\n\n init_env();\n\n let tmpdir = TempDir::new(\"zbox_test\").expect(\"Create temp dir failed\");\n\n let dir = tmpdir.path().join(\"repo\");\n\n if dir.exists() {\n\n std::fs::remove_dir_all(&dir).unwrap();\n\n }\n\n let uri = \"file://\".to_string() + dir.to_str().unwrap();\n\n let repo = RepoOpener::new()\n\n .create_new(true)\n\n .open(&uri, \"pwd\")\n", "file_path": "tests/common/mod.rs", "rank": 97, "score": 104621.02305650251 }, { "content": " let uri = \"redis://localhost:6379\".to_string();\n\n let repo = RepoOpener::new()\n\n .create_new(true)\n\n .open(&uri, \"pwd\")\n\n .unwrap();\n\n TestEnv { repo, tmpdir: None }\n\n }\n\n }\n\n } else {\n\n impl TestEnv {\n\n pub fn new() -> Self {\n\n init_env();\n\n let uri = \"mem://foo\";\n\n let repo = RepoOpener::new()\n\n .create_new(true)\n\n .open(&uri, \"pwd\")\n\n .unwrap();\n\n TestEnv { repo, tmpdir: None }\n\n }\n\n }\n\n }\n\n}\n", "file_path": "tests/common/mod.rs", "rank": 98, "score": 104616.25616967859 }, { "content": " init_env();\n\n let tmpdir = TempDir::new(\"zbox_test\").expect(\"Create temp dir failed\");\n\n let file = tmpdir.path().join(\"zbox.db\");\n\n let uri = \"sqlite://\".to_string() + file.to_str().unwrap();\n\n let repo = RepoOpener::new()\n\n .create_new(true)\n\n .open(&uri, \"pwd\")\n\n .unwrap();\n\n TestEnv { repo, tmpdir: Some(tmpdir) }\n\n }\n\n }\n\n } else if #[cfg(feature = \"storage-redis\")] {\n\n // to test redis storage, start a local redis server first:\n\n // docker run --rm -p 6379:6379 redis:latest\n\n //\n\n // Note: test cases should run one by one and clear redis db before\n\n // start the next test case\n\n impl TestEnv {\n\n pub fn new() -> Self {\n\n init_env();\n", "file_path": "tests/common/mod.rs", "rank": 99, "score": 104615.03630944218 } ]
Rust
crates/holochain_zome_types/src/dna_def.rs
guillemcordoba/holochain
fa4acd2067176757327328446368b1e09bfa2a34
use super::zome; use crate::prelude::*; #[cfg(feature = "full-dna-def")] use crate::zome::error::ZomeError; #[cfg(feature = "full-dna-def")] use holo_hash::*; pub type IntegrityZomes = Vec<(ZomeName, zome::IntegrityZomeDef)>; pub type CoordinatorZomes = Vec<(ZomeName, zome::CoordinatorZomeDef)>; pub type Uid = String; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, SerializedBytes)] #[cfg_attr(feature = "full-dna-def", derive(derive_builder::Builder))] #[cfg_attr(feature = "full-dna-def", builder(public))] pub struct DnaDef { #[cfg_attr( feature = "full-dna-def", builder(default = "\"Generated DnaDef\".to_string()") )] pub name: String, pub uid: String, #[cfg_attr(feature = "full-dna-def", builder(default = "().try_into().unwrap()"))] pub properties: SerializedBytes, #[cfg_attr(feature = "full-dna-def", builder(default = "Timestamp::now()"))] pub origin_time: Timestamp, pub integrity_zomes: IntegrityZomes, pub coordinator_zomes: CoordinatorZomes, } #[derive(Serialize, Debug, PartialEq, Eq)] struct DnaDefHash<'a> { name: &'a String, uid: &'a String, properties: &'a SerializedBytes, integrity_zomes: &'a IntegrityZomes, } #[cfg(feature = "test_utils")] impl DnaDef { pub fn unique_from_zomes( integrity: Vec<IntegrityZome>, coordinator: Vec<CoordinatorZome>, ) -> DnaDef { let integrity = integrity.into_iter().map(|z| z.into_inner()).collect(); let coordinator = coordinator.into_iter().map(|z| z.into_inner()).collect(); DnaDefBuilder::default() .integrity_zomes(integrity) .coordinator_zomes(coordinator) .random_uid() .build() .unwrap() } } impl DnaDef { pub fn all_zomes(&self) -> impl Iterator<Item = (&ZomeName, &zome::ZomeDef)> { self.integrity_zomes .iter() .map(|(n, def)| (n, def.as_any_zome_def())) .chain( self.coordinator_zomes .iter() .map(|(n, def)| (n, def.as_any_zome_def())), ) } } #[cfg(feature = "full-dna-def")] impl DnaDef { pub fn get_integrity_zome( &self, zome_name: &ZomeName, ) -> Result<zome::IntegrityZome, ZomeError> { self.integrity_zomes .iter() .find(|(name, _)| name == zome_name) .cloned() .map(|(name, def)| IntegrityZome::new(name, def)) .ok_or_else(|| ZomeError::ZomeNotFound(format!("Zome '{}' not found", &zome_name,))) } pub fn is_integrity_zome(&self, zome_name: &ZomeName) -> bool { self.integrity_zomes .iter() .any(|(name, _)| name == zome_name) } pub fn get_coordinator_zome( &self, zome_name: &ZomeName, ) -> Result<zome::CoordinatorZome, ZomeError> { self.coordinator_zomes .iter() .find(|(name, _)| name == zome_name) .cloned() .map(|(name, def)| CoordinatorZome::new(name, def)) .ok_or_else(|| ZomeError::ZomeNotFound(format!("Zome '{}' not found", &zome_name,))) } pub fn get_zome(&self, zome_name: &ZomeName) -> Result<zome::Zome, ZomeError> { self.integrity_zomes .iter() .find(|(name, _)| name == zome_name) .cloned() .map(|(name, def)| Zome::new(name, def.erase_type())) .or_else(|| { self.coordinator_zomes .iter() .find(|(name, _)| name == zome_name) .cloned() .map(|(name, def)| Zome::new(name, def.erase_type())) }) .ok_or_else(|| ZomeError::ZomeNotFound(format!("Zome '{}' not found", &zome_name,))) } pub fn get_all_coordinators(&self) -> Vec<zome::CoordinatorZome> { self.coordinator_zomes .iter() .cloned() .map(|(name, def)| CoordinatorZome::new(name, def)) .collect() } pub fn get_wasm_zome(&self, zome_name: &ZomeName) -> Result<&zome::WasmZome, ZomeError> { self.all_zomes() .find(|(name, _)| *name == zome_name) .map(|(_, def)| def) .ok_or_else(|| ZomeError::ZomeNotFound(format!("Zome '{}' not found", &zome_name,))) .and_then(|def| { if let ZomeDef::Wasm(wasm_zome) = def { Ok(wasm_zome) } else { Err(ZomeError::NonWasmZome(zome_name.clone())) } }) } pub fn modify_phenotype(&self, uid: Uid, properties: SerializedBytes) -> Self { let mut clone = self.clone(); clone.properties = properties; clone.uid = uid; clone } } #[cfg(feature = "full-dna-def")] pub fn random_uid() -> String { nanoid::nanoid!() } #[cfg(feature = "full-dna-def")] impl DnaDefBuilder { pub fn random_uid(&mut self) -> &mut Self { self.uid = Some(random_uid()); self } } #[cfg(feature = "full-dna-def")] pub type DnaDefHashed = HoloHashed<DnaDef>; #[cfg(feature = "full-dna-def")] impl HashableContent for DnaDef { type HashType = holo_hash::hash_type::Dna; fn hash_type(&self) -> Self::HashType { holo_hash::hash_type::Dna::new() } fn hashable_content(&self) -> HashableContentBytes { let hash = DnaDefHash { name: &self.name, uid: &self.uid, properties: &self.properties, integrity_zomes: &self.integrity_zomes, }; HashableContentBytes::Content( holochain_serialized_bytes::UnsafeBytes::from( holochain_serialized_bytes::encode(&hash) .expect("Could not serialize HashableContent"), ) .into(), ) } }
use super::zome; use crate::prelude::*; #[cfg(feature = "full-dna-def")] use crate::zome::error::ZomeError; #[cfg(feature = "full-dna-def")] use holo_hash::*; pub type IntegrityZomes = Vec<(ZomeName, zome::IntegrityZomeDef)>; pub type CoordinatorZomes = Vec<(ZomeName, zome::CoordinatorZomeDef)>; pub type Uid = String; #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, SerializedBytes)] #[cfg_attr(feature = "full-dna-def", derive(derive_builder::Builder))] #[cfg_attr(feature = "full-dna-def", builder(public))] pub struct DnaDef { #[cfg_attr( feature = "full-dna-def", builder(default = "\"Generated DnaDef\".to_string()") )] pub name: String, pub uid: String, #[cfg_attr(feature = "full-dna-def", builder(default = "().try_into().unwrap()"))] pub properties: SerializedBytes, #[cfg_attr(feature = "full-dna-def", builder(default = "Timestamp::now()"))] pub origin_time: Timestamp, pub integrity_zomes: IntegrityZomes, pub coordinator_zomes: CoordinatorZomes, } #[derive(Serialize, Debug, PartialEq, Eq)] struct DnaDefHash<'a> { name: &'a String, uid: &'a String, properties: &'a SerializedBytes, integrity_zomes: &'a IntegrityZomes, } #[cfg(feature = "test_utils")] impl DnaDef { pub fn unique_from_zomes( integrity: Vec<IntegrityZome>, coordinator: Vec<CoordinatorZome>, ) -> DnaDef { let integrity = integrity.into_iter().map(|z| z.into_inner()).collect(); let coordinator = coordinator.into_iter().map(|z| z.into_inner()).collect(); DnaDefBuilder::default() .integrity_zomes(integrity) .coordinator_zomes(coordinator) .random_uid() .build() .unwrap() } } impl DnaDef { pub fn all_zomes(&self) -> impl Iterator<Item = (&ZomeName, &zome::ZomeDef)> { self.integrity_zomes .iter() .map(|(n, def)| (n, def.as_any_zome_def())) .chain( self.coordinator_zomes .iter() .map(|(n, def)| (n, def.as_any_zome_def())), ) } } #[cfg(feature = "full-dna-def")] impl DnaDef { pub fn get_integrity_zome( &self, zome_name: &ZomeName, ) -> Result<zome::IntegrityZome, ZomeError> { self.integrity_zomes .iter() .find(|(name, _)| name == zome_name) .cloned() .map(|(name, def)| IntegrityZome::new(name, def)) .ok_or_else(|| ZomeError::ZomeNotFound(format!("Zome '{}' not found", &zome_name,))) } pub fn is_integrity_zome(&self, zome_name: &ZomeName) -> bool { self.integrity_zomes .iter() .any(|(name, _)| name == zome_name) } pub fn get_coordinator_zome( &self, zome_name: &ZomeName, ) -> Result<zome::CoordinatorZome, ZomeError> { self.coordinator_zomes .iter() .find(|(name, _)| name == zome_name) .cloned() .map(|(name, def)| CoordinatorZome::new(name, def)) .ok_or_else(|| ZomeError::ZomeNotFound(format!("Zome '{}' not found", &zome_name,))) } pub fn get_zome(&self, zome_name: &ZomeName) -> Result<zome::Zome, ZomeError> { self.integrity_zomes .iter() .find(|(name, _)| name == zome_name) .cloned() .map(|(name, def)| Zome::new(name, def.erase_type())) .or_else(|| { self.coordinator_zomes .iter() .find(|(name, _)| name == zome_name) .cloned() .map(|(name, def)| Zome::new(name, def.erase_type())) }) .ok_or_else(|| ZomeError::ZomeNotFound(format!("Zome '{}' not found", &zome_name,))) } pub fn get_all_coordinators(&self) -> Vec<zome::CoordinatorZome> { self.coordinator_zomes .iter() .cloned() .map(|(name, def)| CoordinatorZome::new(name, def)) .collect() } pub fn get_wasm_zome(&self, zome_name: &ZomeName) -> Result<&zome::WasmZome, ZomeError> { sel
pub fn modify_phenotype(&self, uid: Uid, properties: SerializedBytes) -> Self { let mut clone = self.clone(); clone.properties = properties; clone.uid = uid; clone } } #[cfg(feature = "full-dna-def")] pub fn random_uid() -> String { nanoid::nanoid!() } #[cfg(feature = "full-dna-def")] impl DnaDefBuilder { pub fn random_uid(&mut self) -> &mut Self { self.uid = Some(random_uid()); self } } #[cfg(feature = "full-dna-def")] pub type DnaDefHashed = HoloHashed<DnaDef>; #[cfg(feature = "full-dna-def")] impl HashableContent for DnaDef { type HashType = holo_hash::hash_type::Dna; fn hash_type(&self) -> Self::HashType { holo_hash::hash_type::Dna::new() } fn hashable_content(&self) -> HashableContentBytes { let hash = DnaDefHash { name: &self.name, uid: &self.uid, properties: &self.properties, integrity_zomes: &self.integrity_zomes, }; HashableContentBytes::Content( holochain_serialized_bytes::UnsafeBytes::from( holochain_serialized_bytes::encode(&hash) .expect("Could not serialize HashableContent"), ) .into(), ) } }
f.all_zomes() .find(|(name, _)| *name == zome_name) .map(|(_, def)| def) .ok_or_else(|| ZomeError::ZomeNotFound(format!("Zome '{}' not found", &zome_name,))) .and_then(|def| { if let ZomeDef::Wasm(wasm_zome) = def { Ok(wasm_zome) } else { Err(ZomeError::NonWasmZome(zome_name.clone())) } }) }
function_block-function_prefixed
[]
Rust
benches/hashmap.rs
junghan0611/node-replication
c06f653051987a8bbc097486f1f815bd9787cb81
#![allow(dead_code)] #![feature(test)] use std::collections::HashMap; use std::fmt::Debug; use std::marker::Sync; use rand::seq::SliceRandom; use rand::{distributions::Distribution, Rng, RngCore}; use zipf::ZipfDistribution; use node_replication::Dispatch; use node_replication::Replica; mod hashmap_comparisons; mod mkbench; mod utils; use hashmap_comparisons::*; use mkbench::ReplicaTrait; use utils::benchmark::*; use utils::topology::ThreadMapping; use utils::Operation; #[cfg(feature = "smokebench")] pub const INITIAL_CAPACITY: usize = 1 << 22; #[cfg(not(feature = "smokebench"))] pub const INITIAL_CAPACITY: usize = 1 << 26; #[cfg(feature = "smokebench")] pub const KEY_SPACE: usize = 5_000_000; #[cfg(not(feature = "smokebench"))] pub const KEY_SPACE: usize = 50_000_000; pub const UNIFORM: &'static str = "uniform"; #[cfg(feature = "smokebench")] pub const NOP: usize = 2_500_000; #[cfg(not(feature = "smokebench"))] pub const NOP: usize = 25_000_000; #[derive(Debug, Eq, PartialEq, Clone, Copy)] pub enum OpWr { Put(u64, u64), } #[derive(Debug, Eq, PartialEq, Clone, Copy)] pub enum OpRd { Get(u64), } #[derive(Debug, Eq, PartialEq, Clone, Copy)] pub enum OpConcurrent { Get(u64), Put(u64, u64), } #[derive(Debug, Clone)] pub struct NrHashMap { storage: HashMap<u64, u64>, } impl NrHashMap { pub fn put(&mut self, key: u64, val: u64) { self.storage.insert(key, val); } pub fn get(&self, key: u64) -> Option<u64> { self.storage.get(&key).map(|v| *v) } } impl Default for NrHashMap { fn default() -> NrHashMap { let mut storage = HashMap::with_capacity(INITIAL_CAPACITY); for i in 0..INITIAL_CAPACITY { storage.insert(i as u64, (i + 1) as u64); } NrHashMap { storage } } } impl Dispatch for NrHashMap { type ReadOperation = OpRd; type WriteOperation = OpWr; type Response = Result<Option<u64>, ()>; fn dispatch(&self, op: Self::ReadOperation) -> Self::Response { match op { OpRd::Get(key) => return Ok(self.get(key)), } } fn dispatch_mut(&mut self, op: Self::WriteOperation) -> Self::Response { match op { OpWr::Put(key, val) => { self.put(key, val); Ok(None) } } } } pub fn generate_operations( nop: usize, write_ratio: usize, span: usize, distribution: &'static str, ) -> Vec<Operation<OpRd, OpWr>> { assert!(distribution == "skewed" || distribution == "uniform"); let mut ops = Vec::with_capacity(nop); let skewed = distribution == "skewed"; let mut t_rng = rand::thread_rng(); let zipf = ZipfDistribution::new(span, 1.03).unwrap(); for idx in 0..nop { let id = if skewed { zipf.sample(&mut t_rng) as u64 } else { t_rng.gen_range(0, span as u64) }; if idx % 100 < write_ratio { ops.push(Operation::WriteOperation(OpWr::Put(id, t_rng.next_u64()))); } else { ops.push(Operation::ReadOperation(OpRd::Get(id))); } } ops.shuffle(&mut t_rng); ops } pub fn generate_operations_concurrent( nop: usize, write_ratio: usize, span: usize, distribution: &'static str, ) -> Vec<Operation<OpConcurrent, ()>> { assert!(distribution == "skewed" || distribution == "uniform"); let mut ops = Vec::with_capacity(nop); let skewed = distribution == "skewed"; let mut t_rng = rand::thread_rng(); let zipf = ZipfDistribution::new(span, 1.03).unwrap(); for idx in 0..nop { let id = if skewed { zipf.sample(&mut t_rng) as u64 } else { t_rng.gen_range(0, span as u64) }; if idx % 100 < write_ratio { ops.push(Operation::ReadOperation(OpConcurrent::Put( id, t_rng.next_u64(), ))); } else { ops.push(Operation::ReadOperation(OpConcurrent::Get(id))); } } ops.shuffle(&mut t_rng); ops } fn hashmap_single_threaded(c: &mut TestHarness) { const LOG_SIZE_BYTES: usize = 2 * 1024 * 1024; const NOP: usize = 1000; const KEY_SPACE: usize = 10_000; const UNIFORM: &'static str = "uniform"; const WRITE_RATIO: usize = 10; let ops = generate_operations(NOP, WRITE_RATIO, KEY_SPACE, UNIFORM); mkbench::baseline_comparison::<Replica<NrHashMap>>(c, "hashmap", ops, LOG_SIZE_BYTES); } fn hashmap_scale_out<R>(c: &mut TestHarness, name: &str, write_ratio: usize) where R: ReplicaTrait + Send + Sync + 'static, R::D: Send, R::D: Dispatch<ReadOperation = OpRd>, R::D: Dispatch<WriteOperation = OpWr>, <R::D as Dispatch>::WriteOperation: Send + Sync, <R::D as Dispatch>::ReadOperation: Send + Sync, <R::D as Dispatch>::Response: Sync + Send + Debug, { let ops = generate_operations(NOP, write_ratio, KEY_SPACE, UNIFORM); let bench_name = format!("{}-scaleout-wr{}", name, write_ratio); mkbench::ScaleBenchBuilder::<R>::new(ops) .thread_defaults() .update_batch(128) .log_size(32 * 1024 * 1024) .replica_strategy(mkbench::ReplicaStrategy::One) .replica_strategy(mkbench::ReplicaStrategy::Socket) .thread_mapping(ThreadMapping::Interleave) .log_strategy(mkbench::LogStrategy::One) .configure( c, &bench_name, |_cid, rid, _log, replica, op, _batch_size| match op { Operation::ReadOperation(op) => { replica.exec_ro(*op, rid); } Operation::WriteOperation(op) => { replica.exec(*op, rid); } }, ); } fn partitioned_hashmap_scale_out(c: &mut TestHarness, name: &str, write_ratio: usize) { let ops = generate_operations(NOP, write_ratio, KEY_SPACE, UNIFORM); let bench_name = format!("{}-scaleout-wr{}", name, write_ratio); mkbench::ScaleBenchBuilder::<Partitioner<NrHashMap>>::new(ops) .thread_defaults() .replica_strategy(mkbench::ReplicaStrategy::PerThread) .thread_mapping(ThreadMapping::Interleave) .log_strategy(mkbench::LogStrategy::One) .update_batch(128) .configure( c, &bench_name, |_cid, rid, _log, replica, op, _batch_size| match op { Operation::ReadOperation(op) => { replica.exec_ro(*op, rid).unwrap(); } Operation::WriteOperation(op) => { replica.exec(*op, rid).unwrap(); } }, ); } fn concurrent_ds_scale_out<T>(c: &mut TestHarness, name: &str, write_ratio: usize) where T: Dispatch<ReadOperation = OpConcurrent>, T: Dispatch<WriteOperation = ()>, T: 'static, T: Dispatch + Sync + Default + Send, <T as Dispatch>::Response: Send + Sync + Debug, { let ops = generate_operations_concurrent(NOP, write_ratio, KEY_SPACE, UNIFORM); let bench_name = format!("{}-scaleout-wr{}", name, write_ratio); mkbench::ScaleBenchBuilder::<ConcurrentDs<T>>::new(ops) .thread_defaults() .replica_strategy(mkbench::ReplicaStrategy::One) .update_batch(128) .thread_mapping(ThreadMapping::Interleave) .log_strategy(mkbench::LogStrategy::One) .configure( c, &bench_name, |_cid, rid, _log, replica, op, _batch_size| match op { Operation::ReadOperation(op) => { replica.exec_ro(*op, rid); } Operation::WriteOperation(op) => { replica.exec(*op, rid); } }, ); } fn main() { let _r = env_logger::try_init(); if cfg!(feature = "smokebench") { log::warn!("Running with feature 'smokebench' may not get the desired results"); } utils::disable_dvfs(); let mut harness = Default::default(); let write_ratios = vec![0, 10, 20, 40, 60, 80, 100]; unsafe { urcu_sys::rcu_init(); } hashmap_single_threaded(&mut harness); for write_ratio in write_ratios.into_iter() { hashmap_scale_out::<Replica<NrHashMap>>(&mut harness, "hashmap", write_ratio); #[cfg(feature = "cmp")] { partitioned_hashmap_scale_out(&mut harness, "partitioned-hashmap", write_ratio); concurrent_ds_scale_out::<CHashMapWrapper>(&mut harness, "chashmap", write_ratio); concurrent_ds_scale_out::<StdWrapper>(&mut harness, "std", write_ratio); concurrent_ds_scale_out::<FlurryWrapper>(&mut harness, "flurry", write_ratio); concurrent_ds_scale_out::<RcuHashMap>(&mut harness, "urcu", write_ratio); concurrent_ds_scale_out::<DashWrapper>(&mut harness, "dashmap", write_ratio); } } }
#![allow(dead_code)] #![feature(test)] use std::collections::HashMap; use std::fmt::Debug; use std::marker::Sync; use rand::seq::SliceRandom; use rand::{distributions::Distribution, Rng, RngCore}; use zipf::ZipfDistribution; use node_replication::Dispatch; use node_replication::Replica; mod hashmap_comparisons; mod mkbench; mod utils; use hashmap_comparisons::*; use mkbench::ReplicaTrait; use utils::benchmark::*; use utils::topology::ThreadMapping; use utils::Operation; #[cfg(feature = "smokebench")] pub const INITIAL_CAPACITY: usize = 1 << 22; #[cfg(not(feature = "smokebench"))] pub const INITIAL_CAPACITY: usize = 1 << 26; #[cfg(feature = "smokebench")] pub const KEY_SPACE: usize = 5_000_000; #[cfg(not(feature = "smokebench"))] pub const KEY_SPACE: usize = 50_000_000; pub const UNIFORM: &'static str = "uniform"; #[cfg(feature = "smokebench")] pub const NOP: usize = 2_500_000; #[cfg(not(feature = "smokebench"))] pub const NOP: usize = 25_000_000; #[derive(Debug, Eq, PartialEq, Clone, Copy)] pub enum OpWr { Put(u64, u64), } #[derive(Debug, Eq, PartialEq, Clone, Copy)] pub enum OpRd { Get(u64), } #[derive(Debug, Eq, PartialEq, Clone, Copy)] pub enum OpConcurrent { Get(u64), Put(u64, u64), } #[derive(Debug, Clone)] pub struct NrHashMap { storage: HashMap<u64, u64>, } impl NrHashMap { pub fn put(&mut self, key: u64, val: u64) { self.storage.insert(key, val); } pub fn get(&self, key: u64) -> Option<u64> { self.storage.get(&key).map(|v| *v) } } impl Default for NrHashMap { fn default() -> NrHashMap { let mut storage = HashMap::with_capacity(INITIAL_CAPACITY); for i in 0..INITIAL_CAPACITY { storage.insert(i as u64, (i + 1) as u64); } NrHashMap { storage } } } impl Dispatch for NrHashMap { type ReadOperation = OpRd; type WriteOperation = OpWr; type Response = Result<Option<u64>, ()>; fn dispatch(&self, op: Self::ReadOperation) -> Self::Response { match op { OpRd::Get(key) => return Ok(self.get(key)), } } fn dispatch_mut(&mut self, op: Self::WriteOperation) -> Self::Response { match op { OpWr::Put(key, val) => { self.put(key, val); Ok(None) } } } } pub fn generate_operations( nop: usize, write_ratio: usize, span: usize, distribution: &'static str, ) -> Vec<Operation<OpRd, OpWr>> { assert!(distribution == "skewed" || distribution == "uniform"); let mut ops = Vec::with_capacity(nop); let skewed = distribution == "skewed"; let mut t_rng = rand::thread_rng(); let zipf = ZipfDistribution::new(span, 1.03).unwrap(); for idx in 0..nop { let id = if skewed { zipf.sample(&mut t_rng) as u64 } else { t_rng.gen_range(0, span as u64) }; if idx % 100 < write_ratio { ops.push(Operation::WriteOperation(OpWr::Put(id, t_rng.next_u64()))); } else { ops.push(Operation::ReadOperation(OpRd::Get(id))); } } ops.shuffle(&mut t_rng); ops } pub fn generate_operations_concurrent( nop: usize, write_ratio: usize, span: usize, distribution: &'static str, ) -> Vec<Operation<OpConcurrent, ()>> { assert!(distribution == "skewed" || distribution == "uniform"); let mut ops = Vec::with_capacity(nop); let skewed = distribution == "skewed"; let mut t_rng = rand::thread_rng(); let zipf = ZipfDistribution::new(span, 1.03).unwrap(); for idx in 0..nop { let id = if skewed { zipf.sample(&mut t_rng) as u64 } else { t_rng.gen_range(0, span as u64) }; if idx % 100 < write_ratio { ops.push(Operation::ReadOperation(OpConcurrent::Put( id, t_rng.next_u64(), ))); } else { ops.push(Operation::ReadOperation(OpConcurrent::Get(id))); } } ops.shuffle(&mut t_rng); ops } fn hashmap_single_threaded(c: &mut TestHarness) { const LOG_SIZE_BYTES: usize = 2 * 1024 * 1024; const NOP: usize = 1000; const KEY_SPACE: usize = 10_000; const UNIFORM: &'static str = "uniform"; const WRITE_RATIO: usize = 10; let ops = generate_operations(NOP, WRITE_RATIO, KEY_SPACE, UNIFORM); mkbench::baseline_comparison::<Replica<NrHashMap>>(c, "hashmap", ops, LOG_SIZE_BYTES); } fn hashmap_scale_out<R>(c: &mut TestHarness, name: &str, write_ratio: usize) where R: ReplicaTrait + Send + Sync + 'static, R::D: Send, R::D: Dispatch<ReadOperation = OpRd>, R::D: Dispatch<WriteOperation = OpWr>, <R::D as Dispatch>::WriteOperation: Send + Sync, <R::D as Dispatch>::ReadOperation: Send + Sync, <R::D as Dispatch>::Response: Sync + Send + Debug, { let ops = generate_operations(NOP, write_ratio, KEY_SPACE, UNIFORM); let bench_name = format!("{}-scaleout-wr{}", name, write_ratio); mkbench::ScaleBenchBuilder::<R>::new(ops) .thread_defaults() .update_batch(128) .log_size(32 * 1024 * 1024) .replica_strategy(mkbench::ReplicaStrategy::One) .replica_strategy(mkbench::ReplicaStrategy::Socket) .thread_mapping(ThreadMapping::Interleave) .log_strategy(mkbench::LogStrategy::One) .configure( c, &bench_name, |_cid, rid, _log, replica, op, _batch_size|
, ); } fn partitioned_hashmap_scale_out(c: &mut TestHarness, name: &str, write_ratio: usize) { let ops = generate_operations(NOP, write_ratio, KEY_SPACE, UNIFORM); let bench_name = format!("{}-scaleout-wr{}", name, write_ratio); mkbench::ScaleBenchBuilder::<Partitioner<NrHashMap>>::new(ops) .thread_defaults() .replica_strategy(mkbench::ReplicaStrategy::PerThread) .thread_mapping(ThreadMapping::Interleave) .log_strategy(mkbench::LogStrategy::One) .update_batch(128) .configure( c, &bench_name, |_cid, rid, _log, replica, op, _batch_size| match op { Operation::ReadOperation(op) => { replica.exec_ro(*op, rid).unwrap(); } Operation::WriteOperation(op) => { replica.exec(*op, rid).unwrap(); } }, ); } fn concurrent_ds_scale_out<T>(c: &mut TestHarness, name: &str, write_ratio: usize) where T: Dispatch<ReadOperation = OpConcurrent>, T: Dispatch<WriteOperation = ()>, T: 'static, T: Dispatch + Sync + Default + Send, <T as Dispatch>::Response: Send + Sync + Debug, { let ops = generate_operations_concurrent(NOP, write_ratio, KEY_SPACE, UNIFORM); let bench_name = format!("{}-scaleout-wr{}", name, write_ratio); mkbench::ScaleBenchBuilder::<ConcurrentDs<T>>::new(ops) .thread_defaults() .replica_strategy(mkbench::ReplicaStrategy::One) .update_batch(128) .thread_mapping(ThreadMapping::Interleave) .log_strategy(mkbench::LogStrategy::One) .configure( c, &bench_name, |_cid, rid, _log, replica, op, _batch_size| match op { Operation::ReadOperation(op) => { replica.exec_ro(*op, rid); } Operation::WriteOperation(op) => { replica.exec(*op, rid); } }, ); } fn main() { let _r = env_logger::try_init(); if cfg!(feature = "smokebench") { log::warn!("Running with feature 'smokebench' may not get the desired results"); } utils::disable_dvfs(); let mut harness = Default::default(); let write_ratios = vec![0, 10, 20, 40, 60, 80, 100]; unsafe { urcu_sys::rcu_init(); } hashmap_single_threaded(&mut harness); for write_ratio in write_ratios.into_iter() { hashmap_scale_out::<Replica<NrHashMap>>(&mut harness, "hashmap", write_ratio); #[cfg(feature = "cmp")] { partitioned_hashmap_scale_out(&mut harness, "partitioned-hashmap", write_ratio); concurrent_ds_scale_out::<CHashMapWrapper>(&mut harness, "chashmap", write_ratio); concurrent_ds_scale_out::<StdWrapper>(&mut harness, "std", write_ratio); concurrent_ds_scale_out::<FlurryWrapper>(&mut harness, "flurry", write_ratio); concurrent_ds_scale_out::<RcuHashMap>(&mut harness, "urcu", write_ratio); concurrent_ds_scale_out::<DashWrapper>(&mut harness, "dashmap", write_ratio); } } }
match op { Operation::ReadOperation(op) => { replica.exec_ro(*op, rid); } Operation::WriteOperation(op) => { replica.exec(*op, rid); } }
if_condition
[ { "content": "fn concurrent_ds_nr_scale_out<R>(c: &mut TestHarness, name: &str, write_ratio: usize)\n\nwhere\n\n R: ReplicaTrait + Send + Sync + 'static,\n\n R::D: Send,\n\n R::D: Dispatch<ReadOperation = SkipListConcurrent>,\n\n R::D: Dispatch<WriteOperation = OpWr>,\n\n <R::D as Dispatch>::WriteOperation: Send + Sync,\n\n <R::D as Dispatch>::ReadOperation: Send + Sync,\n\n <R::D as Dispatch>::Response: Sync + Send + Debug,\n\n{\n\n let ops = generate_sops_concurrent(NOP, write_ratio, KEY_SPACE);\n\n let topology = MachineTopology::new();\n\n let sockets = topology.sockets();\n\n let cores_on_socket = topology.cpus_on_socket(sockets[0]).len();\n\n\n\n let increment = if topology.cores() > 120 { 8 } else { 4 };\n\n\n\n let mut nlog = 0;\n\n while nlog <= cores_on_socket {\n\n let logs = if nlog == 0 { 1 } else { nlog };\n", "file_path": "benches/lockfree.rs", "rank": 2, "score": 234878.03932615384 }, { "content": "/// Generate a random sequence of operations that we'll perform:\n\npub fn generate_operations(nop: usize) -> Vec<Operation<OpRd, OpWr>> {\n\n let mut orng = thread_rng();\n\n let mut arng = thread_rng();\n\n\n\n let mut ops = Vec::with_capacity(nop);\n\n for _i in 0..nop {\n\n let op: usize = orng.gen();\n\n match op % 2usize {\n\n 0usize => ops.push(Operation::WriteOperation(OpWr::Pop)),\n\n 1usize => ops.push(Operation::WriteOperation(OpWr::Push(arng.gen()))),\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n ops\n\n}\n\n\n", "file_path": "benches/stack.rs", "rank": 3, "score": 228078.65569364646 }, { "content": "#[cfg(feature = \"c_nr\")]\n\ntype BenchFn<R> = fn(\n\n crate::utils::ThreadId,\n\n ReplicaToken,\n\n &Vec<Arc<Log<'static, <<R as ReplicaTrait>::D as Dispatch>::WriteOperation>>>,\n\n &Arc<R>,\n\n &Operation<\n\n <<R as ReplicaTrait>::D as Dispatch>::ReadOperation,\n\n <<R as ReplicaTrait>::D as Dispatch>::WriteOperation,\n\n >,\n\n usize,\n\n);\n\n\n", "file_path": "benches/mkbench.rs", "rank": 5, "score": 199352.21137855024 }, { "content": "fn bench(r: Arc<Replica<Stack>>, nop: usize, barrier: Arc<Barrier>) -> (u64, u64) {\n\n let idx = r.register().expect(\"Failed to register with Replica.\");\n\n\n\n let mut orng = thread_rng();\n\n let mut arng = thread_rng();\n\n\n\n let mut ops = Vec::with_capacity(nop);\n\n for _i in 0..nop {\n\n let op: usize = orng.gen();\n\n match op % 2usize {\n\n 0usize => ops.push(OpWr::Pop),\n\n 1usize => ops.push(OpWr::Push(arng.gen())),\n\n _ => unreachable!(),\n\n }\n\n }\n\n barrier.wait();\n\n\n\n for i in 0..nop {\n\n r.execute_mut(ops[i], idx);\n\n }\n\n\n\n barrier.wait();\n\n\n\n (0, 0)\n\n}\n\n\n\n/// Verify that 2 replicas are equal after a set of random\n\n/// operations have been executed against the log.\n", "file_path": "nr/tests/stack.rs", "rank": 6, "score": 193321.96418760982 }, { "content": "fn bench(r: Arc<Replica<Stack>>, nop: usize, barrier: Arc<Barrier>) -> (u64, u64) {\n\n let idx = r.register().expect(\"Failed to register with Replica.\");\n\n\n\n let mut orng = thread_rng();\n\n let mut arng = thread_rng();\n\n\n\n let mut ops = Vec::with_capacity(nop);\n\n for _i in 0..nop {\n\n let op: usize = orng.gen();\n\n match op % 2usize {\n\n 0usize => ops.push(OpWr::Pop),\n\n 1usize => ops.push(OpWr::Push(arng.gen())),\n\n _ => unreachable!(),\n\n }\n\n }\n\n barrier.wait();\n\n\n\n for i in 0..nop {\n\n r.execute_mut(ops[i], idx);\n\n }\n\n\n\n barrier.wait();\n\n\n\n (0, 0)\n\n}\n\n\n\n/// Verify that 2 replicas are equal after a set of random\n\n/// operations have been executed against the log.\n", "file_path": "cnr/tests/stack.rs", "rank": 7, "score": 193321.96418760982 }, { "content": "fn generate_nrfs_ops(write_ratio: usize) -> Vec<Operation<OpRd, OpWr>> {\n\n let nop = 10000;\n\n let mut ops = Vec::with_capacity(nop);\n\n\n\n for idx in 0..nop {\n\n if idx % 100 < write_ratio {\n\n ops.push(Operation::WriteOperation(OpWr::FileWrite(0)));\n\n } else {\n\n ops.push(Operation::ReadOperation(OpRd::FileRead(0)));\n\n }\n\n }\n\n ops\n\n}\n\n\n", "file_path": "benches/nrfs.rs", "rank": 8, "score": 173317.95208518492 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn disable_dvfs() {\n\n log::warn!(\"Can't disable DVFS, expect non-optimal test results!\");\n\n}\n", "file_path": "benches/utils/mod.rs", "rank": 9, "score": 168129.03727374348 }, { "content": "#[cfg(not(target_os = \"linux\"))]\n\npub fn pin_thread(_core_id: topology::Cpu) {\n\n log::warn!(\"Can't pin threads explicitly for benchmarking.\");\n\n}\n\n\n", "file_path": "benches/utils/mod.rs", "rank": 10, "score": 163794.67749853627 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn pin_thread(core_id: topology::Cpu) {\n\n core_affinity::set_for_current(core_affinity::CoreId {\n\n id: core_id as usize,\n\n });\n\n}\n\n\n", "file_path": "benches/utils/mod.rs", "rank": 11, "score": 163794.67749853627 }, { "content": "#[derive(Eq, PartialEq, Clone, Copy, Debug)]\n\nenum OpRd {\n\n Peek,\n\n}\n\n\n", "file_path": "cnr/tests/stack.rs", "rank": 13, "score": 152989.27914423915 }, { "content": "#[derive(Eq, PartialEq, Clone, Copy, Debug)]\n\nenum OpRd {\n\n Peek,\n\n}\n\n\n", "file_path": "nr/tests/stack.rs", "rank": 14, "score": 152989.27914423912 }, { "content": "pub trait ReplicaTrait {\n\n type D: Dispatch + Default + Sync;\n\n\n\n fn new_arc(log: Vec<Arc<Log<'static, <Self::D as Dispatch>::WriteOperation>>>) -> Arc<Self>;\n\n\n\n fn register_me(&self) -> Option<ReplicaToken>;\n\n\n\n fn sync_me(&self, idx: ReplicaToken);\n\n\n\n fn log_sync(&self, idx: ReplicaToken, logid: usize);\n\n\n\n fn exec(\n\n &self,\n\n op: <Self::D as Dispatch>::WriteOperation,\n\n idx: ReplicaToken,\n\n ) -> <Self::D as Dispatch>::Response;\n\n\n\n fn exec_ro(\n\n &self,\n\n op: <Self::D as Dispatch>::ReadOperation,\n", "file_path": "benches/mkbench.rs", "rank": 15, "score": 146879.70969959415 }, { "content": "fn nrfs_scale_out(c: &mut TestHarness, num_cpus: usize, write_ratio: usize) {\n\n let ops = generate_nrfs_ops(write_ratio);\n\n let logs = num_cpus;\n\n let bench_name = format!(\"nrfs-mlnr{}-scaleout-wr{}\", logs, write_ratio);\n\n\n\n mkbench::ScaleBenchBuilder::<Replica<NrFilesystem>>::new(ops)\n\n .thread_defaults()\n\n .replica_strategy(mkbench::ReplicaStrategy::Socket)\n\n .update_batch(128)\n\n .thread_mapping(ThreadMapping::Sequential)\n\n .log_strategy(mkbench::LogStrategy::Custom(logs))\n\n .configure(\n\n c,\n\n &bench_name,\n\n |_cid, rid, _log, replica, op, _batch_size| match op {\n\n Operation::ReadOperation(op) => {\n\n let op = match op {\n\n OpRd::FileRead(_mnode) => OpRd::FileRead(rid.id() as u64 + 1),\n\n };\n\n let _ignore = replica.exec_ro(op, rid);\n", "file_path": "benches/nrfs.rs", "rank": 16, "score": 142492.96842099936 }, { "content": "fn generate_operations(nop: usize) -> Vec<Operation<OpcodeRd, OpcodeWr>> {\n\n let mut ops = Vec::with_capacity(nop);\n\n let mut rng = thread_rng();\n\n\n\n const PAGE_RANGE_MASK: u64 = !0xffff_0000_0000_0fff;\n\n const MAP_SIZE_MASK: u64 = !0xffff_ffff_f000_0fff;\n\n for _i in 0..nop {\n\n match rng.gen::<usize>() % 3 {\n\n 0 => ops.push(Operation::ReadOperation(OpcodeRd::Identify(\n\n rng.gen::<u64>(),\n\n ))),\n\n 1 => ops.push(Operation::WriteOperation(OpcodeWr::Map(\n\n VAddr::from(rng.gen::<u64>() & PAGE_RANGE_MASK),\n\n rng.gen::<usize>() & MAP_SIZE_MASK as usize,\n\n MapAction::ReadWriteUser,\n\n PAddr::from(rng.gen::<u64>() & PAGE_RANGE_MASK),\n\n ))),\n\n 2 => ops.push(Operation::WriteOperation(OpcodeWr::MapDevice(\n\n VAddr::from(rng.gen::<u64>() & PAGE_RANGE_MASK),\n\n PAddr::from(rng.gen::<u64>() & PAGE_RANGE_MASK),\n\n rng.gen::<usize>() & MAP_SIZE_MASK as usize,\n\n ))),\n\n _ => unreachable!(),\n\n }\n\n }\n\n ops\n\n}\n\n\n", "file_path": "benches/vspace.rs", "rank": 19, "score": 126513.07030227281 }, { "content": "#[derive(Default)]\n\nstruct VSpaceDispatcher {\n\n vspace: VSpace,\n\n}\n\n\n\nimpl Dispatch for VSpaceDispatcher {\n\n type ReadOperation = OpcodeRd;\n\n type WriteOperation = OpcodeWr;\n\n type Response = Result<(u64, u64), VSpaceError>;\n\n\n\n fn dispatch(&self, op: Self::ReadOperation) -> Self::Response {\n\n match op {\n\n OpcodeRd::Identify(base) => {\n\n let paddr = self.vspace.resolve_addr(VAddr::from(base));\n\n Ok((paddr.map(|pnum| pnum.as_u64()).unwrap_or(0x0), 0x0))\n\n }\n\n }\n\n }\n\n\n\n fn dispatch_mut(&mut self, op: Self::WriteOperation) -> Self::Response {\n\n match op {\n", "file_path": "benches/vspace.rs", "rank": 20, "score": 123943.97343905896 }, { "content": "fn generate_fs_operations(nop: usize, write_ratio: usize) -> Vec<Operation<(), OperationWr>> {\n\n let mut ops = Vec::with_capacity(nop);\n\n let mut rng = rand::thread_rng();\n\n\n\n for idx in 0..nop {\n\n if idx % 100 < write_ratio {\n\n ops.push(Operation::WriteOperation(OperationWr::Write {\n\n ino: 5, // XXX: hard-coded ino of file `00000001`\n\n fh: 0,\n\n offset: rng.gen_range(0, 4096 - 256),\n\n data: &[3; 128],\n\n flags: 0,\n\n }))\n\n } else {\n\n let offset = rng.gen_range(0, 4096 - 256);\n\n let size = rng.gen_range(0, 128);\n\n\n\n ops.push(Operation::WriteOperation(OperationWr::Read {\n\n ino: 5, // XXX: hard-coded ino of file `00000001`\n\n fh: 0,\n\n offset: offset,\n\n size: size,\n\n }))\n\n }\n\n }\n\n\n\n ops.shuffle(&mut thread_rng());\n\n ops\n\n}\n\n\n", "file_path": "benches/memfs.rs", "rank": 21, "score": 123847.66176873542 }, { "content": "#[derive(Eq, PartialEq, Clone, Copy, Debug)]\n\nenum OpWr {\n\n Push(u32),\n\n Pop,\n\n}\n\n\n", "file_path": "nr/tests/stack.rs", "rank": 22, "score": 120932.44846473011 }, { "content": "#[derive(Eq, PartialEq, Clone, Copy, Debug)]\n\nenum OpWr {\n\n Push(u32),\n\n Pop,\n\n}\n\n\n", "file_path": "cnr/tests/stack.rs", "rank": 23, "score": 120932.44846473011 }, { "content": "/// Compare scalability of a node-replicated stack.\n\nfn stack_scale_out(c: &mut TestHarness) {\n\n // How many operations per iteration\n\n const NOP: usize = 10_000;\n\n let ops = generate_operations(NOP);\n\n\n\n mkbench::ScaleBenchBuilder::<Replica<Stack>>::new(ops)\n\n .machine_defaults()\n\n .log_strategy(mkbench::LogStrategy::One)\n\n .configure(\n\n c,\n\n \"stack-scaleout\",\n\n |_cid, rid, _log, replica, op, _batch_size| {\n\n match op {\n\n Operation::WriteOperation(op) => replica.execute_mut(*op, rid),\n\n Operation::ReadOperation(op) => unreachable!(),\n\n _ => unreachable!(),\n\n };\n\n },\n\n );\n\n}\n\n\n", "file_path": "benches/stack.rs", "rank": 24, "score": 118962.39623127412 }, { "content": "/// Compare scale-out behaviour of memfs.\n\nfn memfs_scale_out(c: &mut TestHarness) {\n\n const NOP: usize = 50;\n\n const WRITE_RATIO: usize = 10; //% out of 100\n\n\n\n let ops = generate_fs_operations(NOP, WRITE_RATIO);\n\n\n\n mkbench::ScaleBenchBuilder::<Replica<NrMemFilesystem>>::new(ops)\n\n .machine_defaults()\n\n // The only benchmark that actually seems to slightly\n\n // regress with 2 MiB logsize, set to 16 MiB\n\n .log_size(16 * 1024 * 1024)\n\n .configure(\n\n c,\n\n \"memfs-scaleout\",\n\n |_cid, rid, _log, replica, op, _batch_size| match op {\n\n Operation::ReadOperation(o) => {\n\n replica.execute(*o, rid).unwrap();\n\n }\n\n Operation::WriteOperation(o) => {\n\n replica.execute_mut(*o, rid).unwrap();\n\n }\n\n },\n\n );\n\n}\n\n\n", "file_path": "benches/memfs.rs", "rank": 25, "score": 118962.39623127412 }, { "content": "fn vspace_scale_out(c: &mut TestHarness) {\n\n const NOP: usize = 3000;\n\n let ops = generate_operations(NOP);\n\n\n\n mkbench::ScaleBenchBuilder::<Replica<VSpaceDispatcher>>::new(ops)\n\n .machine_defaults()\n\n .log_strategy(mkbench::LogStrategy::One)\n\n .configure(\n\n c,\n\n \"vspace-scaleout\",\n\n |_cid, rid, _log, replica, op, _batch_size| match op {\n\n Operation::ReadOperation(o) => {\n\n let _r = replica.execute(*o, rid);\n\n }\n\n Operation::WriteOperation(o) => {\n\n let _r = replica.execute_mut(*o, rid);\n\n }\n\n },\n\n );\n\n}\n\n\n", "file_path": "benches/vspace.rs", "rank": 26, "score": 118962.39623127412 }, { "content": "/// Compare scale-out behaviour of synthetic data-structure.\n\nfn synthetic_scale_out(c: &mut TestHarness) {\n\n // How many operations per iteration\n\n const NOP: usize = 10_000;\n\n // Operations to perform\n\n let ops = generate_operations(NOP, 0, false, false, true);\n\n\n\n mkbench::ScaleBenchBuilder::<Replica<AbstractDataStructure>>::new(ops)\n\n .machine_defaults()\n\n .log_strategy(mkbench::LogStrategy::One)\n\n .configure(\n\n c,\n\n \"synthetic-scaleout\",\n\n |cid, rid, _log, replica, op, _batch_size| match op {\n\n Operation::ReadOperation(mut o) => {\n\n o.set_tid(cid as usize);\n\n replica.execute(o, rid).unwrap();\n\n }\n\n Operation::WriteOperation(mut o) => {\n\n o.set_tid(cid as usize);\n\n replica.execute_mut(o, rid).unwrap();\n\n }\n\n },\n\n );\n\n}\n\n\n", "file_path": "benches/synthetic.rs", "rank": 27, "score": 118962.39623127412 }, { "content": "#[derive(Default)]\n\nstruct NrHashMap {\n\n storage: HashMap<u64, u64>,\n\n}\n\n\n\n/// We support mutable put operation on the hashmap.\n", "file_path": "cnr/examples/hashmap.rs", "rank": 28, "score": 117247.56050305377 }, { "content": "#[derive(Default)]\n\nstruct NrHashMap {\n\n storage: HashMap<u64, u64>,\n\n}\n\n\n\n/// We support mutable put operation on the hashmap.\n", "file_path": "nr/examples/hashmap.rs", "rank": 29, "score": 117247.56050305377 }, { "content": "fn memfs_single_threaded(c: &mut TestHarness) {\n\n const LOG_SIZE_BYTES: usize = 16 * 1024 * 1024;\n\n const NOP: usize = 50;\n\n const WRITE_RATIO: usize = 10; //% out of 100\n\n\n\n let ops = generate_fs_operations(NOP, WRITE_RATIO);\n\n mkbench::baseline_comparison::<Replica<NrMemFilesystem>>(c, \"memfs\", ops, LOG_SIZE_BYTES);\n\n}\n\n\n", "file_path": "benches/memfs.rs", "rank": 30, "score": 116389.17820098714 }, { "content": "/// Compare scale-out behaviour of log.\n\nfn log_scale_bench(c: &mut TestHarness) {\n\n env_logger::try_init();\n\n\n\n /// Log size (needs to be big as we don't have GC in this case but high tput)\n\n const LOG_SIZE_BYTES: usize = 12 * 1024 * 1024 * 1024;\n\n\n\n /// Benchmark #operations per iteration\n\n const NOP: usize = 50_000;\n\n\n\n let mut operations = Vec::new();\n\n for e in 0..NOP {\n\n operations.push(Operation::WriteOperation(e));\n\n }\n\n\n\n mkbench::ScaleBenchBuilder::<Replica<Nop>>::new(operations)\n\n .machine_defaults()\n\n .log_size(LOG_SIZE_BYTES)\n\n .log_strategy(mkbench::LogStrategy::One)\n\n .update_batch(8)\n\n .reset_log()\n", "file_path": "benches/log.rs", "rank": 31, "score": 116389.17820098714 }, { "content": "/// Compare against a stack with and without a log in-front.\n\nfn stack_single_threaded(c: &mut TestHarness) {\n\n // Number of operations\n\n const NOP: usize = 1_000;\n\n // Log size\n\n const LOG_SIZE_BYTES: usize = 2 * 1024 * 1024;\n\n let ops = generate_operations(NOP);\n\n mkbench::baseline_comparison::<Replica<Stack>>(c, \"stack\", ops, LOG_SIZE_BYTES);\n\n}\n\n\n", "file_path": "benches/stack.rs", "rank": 32, "score": 116389.17820098714 }, { "content": "fn vspace_single_threaded(c: &mut TestHarness) {\n\n const NOP: usize = 3000;\n\n const LOG_SIZE_BYTES: usize = 16 * 1024 * 1024;\n\n mkbench::baseline_comparison::<Replica<VSpaceDispatcher>>(\n\n c,\n\n \"vspace\",\n\n generate_operations(NOP),\n\n LOG_SIZE_BYTES,\n\n );\n\n}\n\n\n", "file_path": "benches/vspace.rs", "rank": 33, "score": 116389.17820098714 }, { "content": "/// Compare a synthetic benchmark against a single-threaded implementation.\n\nfn synthetic_single_threaded(c: &mut TestHarness) {\n\n // How many operations per iteration\n\n const NOP: usize = 1_000;\n\n // Size of the log.\n\n const LOG_SIZE_BYTES: usize = 2 * 1024 * 1024;\n\n\n\n let ops = generate_operations(NOP, 0, false, false, true);\n\n mkbench::baseline_comparison::<Replica<AbstractDataStructure>>(\n\n c,\n\n \"synthetic\",\n\n ops,\n\n LOG_SIZE_BYTES,\n\n );\n\n}\n\n\n", "file_path": "benches/synthetic.rs", "rank": 34, "score": 116389.17820098714 }, { "content": "/// Trait that a data structure must implement to be usable with this library.\n\n///\n\n/// When this library executes a read-only operation against the data structure,\n\n/// it invokes the `dispatch()` method with the operation as an argument.\n\n///\n\n/// When this library executes a write operation against the data structure, it\n\n/// invokes the `dispatch_mut()` method with the operation as an argument.\n\npub trait Dispatch {\n\n /// A read-only operation. When executed against the data structure, an operation\n\n /// of this type must not mutate the data structure in anyway. Otherwise, the\n\n /// assumptions made by this library no longer hold.\n\n type ReadOperation: Sized + Clone + PartialEq + Debug;\n\n\n\n /// A write operation. When executed against the data structure, an operation of\n\n /// this type is allowed to mutate state. The library ensures that this is done so\n\n /// in a thread-safe manner.\n\n type WriteOperation: Sized + Clone + PartialEq + Debug + Send;\n\n\n\n /// The type on the value returned by the data structure when a `ReadOperation` or a\n\n /// `WriteOperation` successfully executes against it.\n\n type Response: Sized + Clone;\n\n\n\n /// Method on the data structure that allows a read-only operation to be\n\n /// executed against it.\n\n fn dispatch(&self, op: Self::ReadOperation) -> Self::Response;\n\n\n\n /// Method on the data structure that allows a write operation to be\n\n /// executed against it.\n\n fn dispatch_mut(&mut self, op: Self::WriteOperation) -> Self::Response;\n\n}\n", "file_path": "nr/src/lib.rs", "rank": 35, "score": 115928.66157430987 }, { "content": "/// Trait that a data structure must implement to be usable with this library.\n\n///\n\n/// When this library executes a read-only operation against the data structure,\n\n/// it invokes the `dispatch()` method with the operation as an argument.\n\n///\n\n/// When this library executes a write operation against the data structure, it\n\n/// invokes the `dispatch_mut()` method with the operation as an argument.\n\npub trait Dispatch {\n\n /// A read-only operation. When executed against the data structure, an operation\n\n /// of this type must not mutate the data structure in anyway. Otherwise, the\n\n /// assumptions made by this library no longer hold.\n\n type ReadOperation: Sized + Clone + PartialEq + Debug + LogMapper;\n\n\n\n /// A write operation. When executed against the data structure, an operation of\n\n /// this type is allowed to mutate state. The library ensures that this is done so\n\n /// in a thread-safe manner.\n\n type WriteOperation: Sized + Clone + PartialEq + Debug + Send + LogMapper;\n\n\n\n /// The type on the value returned by the data structure when a `ReadOperation` or a\n\n /// `WriteOperation` successfully executes against it.\n\n type Response: Sized + Clone;\n\n\n\n /// Method on the data structure that allows a read-only operation to be\n\n /// executed against it.\n\n fn dispatch(&self, op: Self::ReadOperation) -> Self::Response;\n\n\n\n /// Method on the data structure that allows a write operation to be\n\n /// executed against it.\n\n fn dispatch_mut(&self, op: Self::WriteOperation) -> Self::Response;\n\n}\n", "file_path": "cnr/src/lib.rs", "rank": 36, "score": 115928.66157430987 }, { "content": "/// A pending operation is a combination of the its op-code (T),\n\n/// and the corresponding result (R).\n\ntype PendingOperation<T, R> = Cell<(Option<T>, Option<usize>, Option<R>)>;\n\n\n\n/// Contains all state local to a particular thread.\n\n///\n\n/// The primary purpose of this type is to batch operations issued on a thread before\n\n/// appending them to the shared log. This is achieved using a fixed sized array. Once\n\n/// executed against the replica, the results of these operations are stored back into\n\n/// the same array.\n\n///\n\n/// `T` is a type parameter required by the struct. `T` should identify operations\n\n/// issued by the thread (an opcode of sorts) and should also contain arguments/parameters\n\n/// required to execute these operations on the replicas.\n\n///\n\n/// `R` is a type parameter required by the struct. It is the type on the result obtained\n\n/// when an operation is executed against the replica.\n\n#[repr(align(64))]\n\npub(crate) struct Context<T, R>\n\nwhere\n\n T: Sized + Clone,\n\n R: Sized + Clone,\n", "file_path": "cnr/src/context.rs", "rank": 37, "score": 114527.05483435182 }, { "content": "/// Log the baseline comparision results to a CSV file\n\n///\n\n/// # TODO\n\n/// Ideally this can go into the runner that was previously\n\n/// not possible since we used criterion for the runner.\n\nfn write_results(name: String, duration: Duration, results: Vec<usize>) -> std::io::Result<()> {\n\n let file_name = \"baseline_comparison.csv\";\n\n let write_headers = !Path::new(file_name).exists(); // write headers only to new file\n\n let mut csv_file = OpenOptions::new()\n\n .append(true)\n\n .create(true)\n\n .open(file_name)?;\n\n\n\n let mut wtr = WriterBuilder::new()\n\n .has_headers(write_headers)\n\n .from_writer(csv_file);\n\n\n\n #[derive(Serialize)]\n\n struct Record {\n\n name: String,\n\n batch_size: usize,\n\n duration: f64,\n\n exp_time_in_sec: usize,\n\n iterations: usize,\n\n };\n", "file_path": "benches/mkbench.rs", "rank": 38, "score": 113129.51534912846 }, { "content": "/// Generate a random sequence of operations that we'll perform.\n\n///\n\n/// Flag determines which types of operation we allow on the data-structure.\n\n/// The split is approximately equal among the operations we allow.\n\npub fn generate_operations(\n\n nop: usize,\n\n tid: usize,\n\n readonly: bool,\n\n writeonly: bool,\n\n readwrite: bool,\n\n) -> Vec<Operation<OpRd, OpWr>> {\n\n let mut orng = thread_rng();\n\n let mut arng = thread_rng();\n\n\n\n let mut ops = Vec::with_capacity(nop);\n\n for _i in 0..nop {\n\n let op: usize = orng.gen();\n\n\n\n match (readonly, writeonly, readwrite) {\n\n (true, true, true) => match op % 3 {\n\n 0 => ops.push(Operation::ReadOperation(OpRd::ReadOnly(\n\n tid,\n\n arng.gen(),\n\n arng.gen(),\n", "file_path": "benches/synthetic.rs", "rank": 39, "score": 110397.44814441384 }, { "content": "pub fn generate_qops_concurrent(\n\n nop: usize,\n\n write_ratio: usize,\n\n span: usize,\n\n) -> Vec<Operation<QueueConcurrent, ()>> {\n\n let mut ops = Vec::with_capacity(nop);\n\n\n\n let mut t_rng = rand::thread_rng();\n\n\n\n for idx in 0..nop {\n\n let _id = t_rng.gen_range(0, span as u64);\n\n\n\n if idx % 100 < write_ratio {\n\n if idx % 2 == 0 {\n\n ops.push(Operation::ReadOperation(QueueConcurrent::Push(\n\n t_rng.next_u64(),\n\n )));\n\n } else {\n\n ops.push(Operation::ReadOperation(QueueConcurrent::Pop));\n\n }\n\n } else {\n\n ops.push(Operation::ReadOperation(QueueConcurrent::Len));\n\n }\n\n }\n\n\n\n ops.shuffle(&mut t_rng);\n\n ops\n\n}\n\n\n", "file_path": "benches/lockfree.rs", "rank": 40, "score": 107276.87049927768 }, { "content": "pub fn generate_sops_concurrent(\n\n nop: usize,\n\n write_ratio: usize,\n\n span: usize,\n\n) -> Vec<Operation<SkipListConcurrent, OpWr>> {\n\n let mut ops = Vec::with_capacity(nop);\n\n\n\n let mut t_rng = rand::thread_rng();\n\n\n\n for idx in 0..nop {\n\n let _id = t_rng.gen_range(0, span as u64);\n\n\n\n if idx % 100 < write_ratio {\n\n ops.push(Operation::WriteOperation(OpWr::Push(\n\n t_rng.next_u64(),\n\n t_rng.next_u64(),\n\n )));\n\n } else {\n\n ops.push(Operation::ReadOperation(SkipListConcurrent::Get(\n\n t_rng.next_u64(),\n\n )));\n\n }\n\n }\n\n\n\n ops.shuffle(&mut t_rng);\n\n ops\n\n}\n\n\n", "file_path": "benches/lockfree.rs", "rank": 41, "score": 107276.87049927768 }, { "content": "pub fn generate_sops_partitioned_concurrent(\n\n nop: usize,\n\n write_ratio: usize,\n\n span: usize,\n\n) -> Vec<Operation<SkipListConcurrent, OpWr>> {\n\n let mut ops = Vec::with_capacity(nop);\n\n\n\n let mut t_rng = rand::thread_rng();\n\n\n\n for idx in 0..nop {\n\n let _id = t_rng.gen_range(0, span as u64);\n\n\n\n if idx % 100 < write_ratio {\n\n ops.push(Operation::WriteOperation(OpWr::Push(\n\n t_rng.next_u64() % 25_000_000,\n\n t_rng.next_u64(),\n\n )));\n\n } else {\n\n ops.push(Operation::ReadOperation(SkipListConcurrent::Get(\n\n t_rng.next_u64() % 25_000_000,\n", "file_path": "benches/lockfree.rs", "rank": 42, "score": 104451.42084538426 }, { "content": "/// A pending operation is a combination of the its op-code (T),\n\n/// and the corresponding result (R).\n\ntype PendingOperation<T, R> = Cell<(Option<T>, Option<R>)>;\n\n\n\n/// Contains all state local to a particular thread.\n\n///\n\n/// The primary purpose of this type is to batch operations issued on a thread before\n\n/// appending them to the shared log. This is achieved using a fixed sized array. Once\n\n/// executed against the replica, the results of these operations are stored back into\n\n/// the same array.\n\n///\n\n/// `T` is a type parameter required by the struct. `T` should identify operations\n\n/// issued by the thread (an opcode of sorts) and should also contain arguments/parameters\n\n/// required to execute these operations on the replicas.\n\n///\n\n/// `R` is a type parameter required by the struct. It is the type on the result obtained\n\n/// when an operation is executed against the replica.\n\n#[repr(align(64))]\n\npub(crate) struct Context<T, R>\n\nwhere\n\n T: Sized + Clone,\n\n R: Sized + Clone,\n", "file_path": "nr/src/context.rs", "rank": 43, "score": 99882.09241017862 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq)]\n\nenum OpcodeRd {\n\n Identify(u64),\n\n}\n\n\n", "file_path": "benches/vspace.rs", "rank": 44, "score": 95638.72086559949 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Modify {\n\n Put(u64, u64),\n\n}\n\n\n\nimpl LogMapper for Modify {\n\n fn hash(&self) -> usize {\n\n 0\n\n }\n\n}\n\n\n\n/// We support an immutable read operation to lookup a key from the hashmap.\n", "file_path": "cnr/examples/hashmap.rs", "rank": 45, "score": 95236.68581291466 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Access {\n\n Get(u64),\n\n}\n\n\n\n/// The Dispatch traits executes `ReadOperation` (our Access enum)\n\n/// and `WriteOperation` (our `Modify` enum) against the replicated\n\n/// data-structure.\n\nimpl Dispatch for NrHashMap {\n\n type ReadOperation = Access;\n\n type WriteOperation = Modify;\n\n type Response = Option<u64>;\n\n\n\n /// The `dispatch` function applies the immutable operations.\n\n fn dispatch(&self, op: Self::ReadOperation) -> Self::Response {\n\n match op {\n\n Access::Get(key) => self.storage.get(&key).map(|v| *v),\n\n }\n\n }\n\n\n\n /// The `dispatch_mut` function applies the mutable operations.\n\n fn dispatch_mut(&mut self, op: Self::WriteOperation) -> Self::Response {\n\n match op {\n\n Modify::Put(key, value) => self.storage.insert(key, value),\n\n }\n\n }\n\n}\n\n\n", "file_path": "nr/examples/hashmap.rs", "rank": 46, "score": 95236.68581291466 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Access {\n\n Get(u64),\n\n}\n\n\n\nimpl LogMapper for Access {\n\n fn hash(&self) -> usize {\n\n 0\n\n }\n\n}\n\n\n\n/// The Dispatch traits executes `ReadOperation` (our Access enum)\n\n/// and `WriteOperation` (our `Modify` enum) against the replicated\n\n/// data-structure.\n\nimpl Dispatch for NrHashMap {\n\n type ReadOperation = Access;\n\n type WriteOperation = Modify;\n\n type Response = Option<u64>;\n\n\n\n /// The `dispatch` function applies the immutable operations.\n\n fn dispatch(&self, op: Self::ReadOperation) -> Self::Response {\n", "file_path": "cnr/examples/hashmap.rs", "rank": 47, "score": 95236.68581291466 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Modify {\n\n Put(u64, u64),\n\n}\n\n\n\n/// We support an immutable read operation to lookup a key from the hashmap.\n", "file_path": "nr/examples/hashmap.rs", "rank": 48, "score": 95236.68581291466 }, { "content": "fn kernel_vaddr_to_paddr(v: VAddr) -> PAddr {\n\n let vaddr_val: usize = v.into();\n\n PAddr::from(vaddr_val as u64 - 0x0)\n\n}\n\n\n", "file_path": "benches/vspace.rs", "rank": 50, "score": 93111.43113881044 }, { "content": "struct ReplicaAndToken<'a> {\n\n replica: sync::Arc<Replica<'a, NrHashMap>>,\n\n token: ReplicaToken,\n\n}\n\n\n\nimpl<'a> ReplicaAndToken<'a> {\n\n fn new(replica: sync::Arc<Replica<'a, NrHashMap>>) -> ReplicaAndToken<'a> {\n\n let token = replica.register().unwrap();\n\n ReplicaAndToken { replica, token }\n\n }\n\n}\n\n\n\nimpl<'a> Backend for ReplicaAndToken<'a> {\n\n fn b_get(&self, key: u64) -> u64 {\n\n match self.replica.execute(OpRd::Get(key), self.token) {\n\n Ok(res) => return res,\n\n Err(_) => unreachable!(),\n\n }\n\n }\n\n\n\n fn b_put(&self, key: u64, value: u64) {\n\n self.replica\n\n .execute_mut(OpWr::Put(key, value), self.token)\n\n .unwrap();\n\n }\n\n}\n", "file_path": "benches/chashbench.rs", "rank": 51, "score": 92050.96697649902 }, { "content": "struct ReplicaAndToken<'a> {\n\n replica: sync::Arc<Replica<'a, NrHashMap>>,\n\n token: ReplicaToken,\n\n}\n\n\n\nimpl<'a> ReplicaAndToken<'a> {\n\n fn new(replica: sync::Arc<Replica<'a, NrHashMap>>) -> ReplicaAndToken<'a> {\n\n let token = replica.register().unwrap();\n\n ReplicaAndToken { replica, token }\n\n }\n\n}\n\n\n\nimpl<'a> Backend for ReplicaAndToken<'a> {\n\n fn b_get(&mut self, key: u64) -> u64 {\n\n match self.replica.execute(OpRd::Get(key), self.token) {\n\n Ok(res) => return res,\n\n Err(_) => unreachable!(),\n\n }\n\n }\n\n\n\n fn b_put(&mut self, key: u64, value: u64) {\n\n self.replica\n\n .execute_mut(OpWr::Put(key, value), self.token)\n\n .unwrap();\n\n }\n\n}\n\n\n", "file_path": "benches/hashbench.rs", "rank": 52, "score": 92050.96697649902 }, { "content": "#[derive(Debug, Default, Clone, Copy, PartialEq, Eq)]\n\nstruct VSpaceError {\n\n at: u64,\n\n}\n\n\n\n/// Type of resource we're trying to allocate\n\n#[derive(Debug, PartialEq, Eq, Copy, Clone)]\n\npub enum ResourceType {\n\n /// ELF Binary data\n\n Binary,\n\n /// Physical memory\n\n Memory,\n\n /// Page-table meta-data\n\n PageTable,\n\n}\n\n\n\n/// Mapping rights to give to address translation.\n\n#[derive(Debug, PartialEq, Eq, Copy, Clone)]\n\n#[allow(unused)]\n\npub enum MapAction {\n\n /// Don't map\n", "file_path": "benches/vspace.rs", "rank": 53, "score": 91882.70020083155 }, { "content": "/// We initialize a log, and two replicas for a hashmap, register with the replica\n\n/// and then execute operations.\n\nfn main() {\n\n // The operation log for storing `WriteOperation`, it has a size of 2 MiB:\n\n let log = Arc::new(Log::<<NrHashMap as Dispatch>::WriteOperation>::new(\n\n 2 * 1024 * 1024,\n\n 1,\n\n ));\n\n\n\n // Next, we create two replicas of the hashmap\n\n let replica1 = Replica::<NrHashMap>::new(vec![log.clone()]);\n\n let replica2 = Replica::<NrHashMap>::new(vec![log.clone()]);\n\n\n\n // The replica executes a Modify or Access operations by calling\n\n // `execute_mut` and `execute`. Eventually they end up in the `Dispatch` trait.\n\n let thread_loop = |replica: &Arc<Replica<NrHashMap>>, ridx| {\n\n for i in 0..2048 {\n\n let _r = match i % 2 {\n\n 0 => replica.execute_mut(Modify::Put(i, i + 1), ridx),\n\n 1 => {\n\n let response = replica.execute(Access::Get(i - 1), ridx);\n\n assert_eq!(response, Some(i));\n", "file_path": "cnr/examples/hashmap.rs", "rank": 54, "score": 90072.22722399925 }, { "content": "/// We initialize a log, and two replicas for a hashmap, register with the replica\n\n/// and then execute operations.\n\nfn main() {\n\n // The operation log for storing `WriteOperation`, it has a size of 2 MiB:\n\n let log = Arc::new(Log::<<NrHashMap as Dispatch>::WriteOperation>::new(\n\n 2 * 1024 * 1024,\n\n ));\n\n\n\n // Next, we create two replicas of the hashmap\n\n let replica1 = Replica::<NrHashMap>::new(&log);\n\n let replica2 = Replica::<NrHashMap>::new(&log);\n\n\n\n // The replica executes a Modify or Access operations by calling\n\n // `execute_mut` and `execute`. Eventually they end up in the `Dispatch` trait.\n\n let thread_loop = |replica: &Arc<Replica<NrHashMap>>, ridx| {\n\n for i in 0..2048 {\n\n let _r = match i % 2 {\n\n 0 => replica.execute_mut(Modify::Put(i, i + 1), ridx),\n\n 1 => {\n\n let response = replica.execute(Access::Get(i - 1), ridx);\n\n assert_eq!(response, Some(i));\n\n response\n", "file_path": "nr/examples/hashmap.rs", "rank": 55, "score": 90072.22722399925 }, { "content": "#[repr(C)]\n\nstruct lfht_test_node {\n\n node: urcu_sys::cds_lfht_node,\n\n key: u64,\n\n data: u64,\n\n /* cache-cold for iteration */\n\n head: urcu_sys::rcu_head,\n\n}\n\n\n\nunsafe extern \"C\" fn test_match(node: *mut urcu_sys::cds_lfht_node, key: *const c_void) -> i32 {\n\n let my_key = key as u64;\n\n let test_node: *mut lfht_test_node = to_test_node(node);\n\n (my_key == (*test_node).key) as i32\n\n}\n\n\n\nunsafe fn to_test_node(node: *mut urcu_sys::cds_lfht_node) -> *mut lfht_test_node {\n\n mem::transmute(node)\n\n}\n\n\n\nimpl Dispatch for RcuHashMap {\n\n type ReadOperation = OpConcurrent;\n", "file_path": "benches/hashmap_comparisons.rs", "rank": 56, "score": 88354.05714428233 }, { "content": "#[test]\n\nfn replicas_are_equal() {\n\n let t = 4usize;\n\n let r = 2usize;\n\n let l = 1usize;\n\n let n = 50usize;\n\n\n\n let log = Arc::new(Log::<<Stack as Dispatch>::WriteOperation>::new(\n\n l * 1024 * 1024 * 1024,\n\n ));\n\n\n\n let mut replicas = Vec::with_capacity(r);\n\n for _i in 0..r {\n\n replicas.push(Replica::<Stack>::new(&log));\n\n }\n\n\n\n let mut threads = Vec::new();\n\n let barrier = Arc::new(Barrier::new(t * r));\n\n\n\n for i in 0..r {\n\n for _j in 0..t {\n", "file_path": "cnr/tests/stack.rs", "rank": 57, "score": 86275.76526013858 }, { "content": "#[test]\n\nfn replicas_are_equal() {\n\n let t = 4usize;\n\n let r = 2usize;\n\n let l = 1usize;\n\n let n = 50usize;\n\n\n\n let log = Arc::new(Log::<<Stack as Dispatch>::WriteOperation>::new(\n\n l * 1024 * 1024 * 1024,\n\n ));\n\n\n\n let mut replicas = Vec::with_capacity(r);\n\n for _i in 0..r {\n\n replicas.push(Replica::<Stack>::new(&log));\n\n }\n\n\n\n let mut threads = Vec::new();\n\n let barrier = Arc::new(Barrier::new(t * r));\n\n\n\n for i in 0..r {\n\n for _j in 0..t {\n", "file_path": "nr/tests/stack.rs", "rank": 58, "score": 86275.76526013858 }, { "content": "fn compare_vectors<T: PartialEq>(a: &Vec<T>, b: &Vec<T>) -> bool {\n\n let matching = a.iter().zip(b.iter()).filter(|&(a, b)| a == b).count();\n\n matching == a.len() && matching == b.len()\n\n}\n\n\n\nimpl Stack {\n\n pub fn push(&mut self, data: u32) {\n\n self.storage.push(data);\n\n }\n\n\n\n pub fn pop(&mut self) -> Option<u32> {\n\n let r = self.storage.pop();\n\n self.popped.push(r);\n\n return r;\n\n }\n\n\n\n pub fn peek(&self) -> Option<u32> {\n\n let mut r = None;\n\n let len = self.storage.len();\n\n if len > 0 {\n", "file_path": "nr/tests/stack.rs", "rank": 59, "score": 72714.02253969273 }, { "content": "fn compare_vectors<T: PartialEq>(a: &Vec<T>, b: &Vec<T>) -> bool {\n\n let matching = a.iter().zip(b.iter()).filter(|&(a, b)| a == b).count();\n\n matching == a.len() && matching == b.len()\n\n}\n\n\n\nimpl Stack {\n\n pub fn push(&mut self, data: u32) {\n\n self.storage.push(data);\n\n }\n\n\n\n pub fn pop(&mut self) -> Option<u32> {\n\n let r = self.storage.pop();\n\n self.popped.push(r);\n\n return r;\n\n }\n\n\n\n pub fn peek(&self) -> Option<u32> {\n\n let mut r = None;\n\n let len = self.storage.len();\n\n if len > 0 {\n", "file_path": "cnr/tests/stack.rs", "rank": 60, "score": 72714.02253969273 }, { "content": "// Copyright © 2019-2020 VMware, Inc. All Rights Reserved.\n\n// SPDX-License-Identifier: Apache-2.0 OR MIT\n\n\n\n//! Utility functions to do multi-threaded benchmarking of the log infrastructure.\n\n#![allow(dead_code)]\n\n\n\nuse std::fmt::Debug;\n\n\n\npub mod benchmark;\n\npub mod topology;\n\n\n\n/// A wrapper type to distinguish between arbitrary generated read or write operations\n\n/// in the test harness.\n\n#[derive(Debug, Clone)]\n\npub enum Operation<R: Sized + Clone + PartialEq + Debug, W: Sized + Clone + PartialEq + Debug> {\n\n ReadOperation(R),\n\n WriteOperation(W),\n\n}\n\n\n\n/// Type to identify an OS thread.\n\n/// Ideally in our benchmark we should have one OS thread per core.\n\n/// On MacOS this is not guaranteed.\n\npub type ThreadId = u64;\n\n\n\n// Pin a thread to a core\n\n#[cfg(target_os = \"linux\")]\n", "file_path": "benches/utils/mod.rs", "rank": 61, "score": 71376.62371091847 }, { "content": "fn paddr_to_kernel_vaddr(p: PAddr) -> VAddr {\n\n let paddr_val: u64 = p.into();\n\n VAddr::from((paddr_val + 0x0) as usize)\n\n}\n\n\n", "file_path": "benches/vspace.rs", "rank": 62, "score": 70786.71751531653 }, { "content": "fn run_std(lock: Arc<StdLock<usize>>, end: time::Instant, write: bool) -> (bool, usize) {\n\n let mut ops = 0;\n\n let mut t_rng = rand::thread_rng();\n\n\n\n while time::Instant::now() < end {\n\n if write {\n\n let mut ele = lock.write().unwrap();\n\n *ele = t_rng.next_u64() as usize;\n\n } else {\n\n let ele = lock.read().unwrap();\n\n let _a = *ele;\n\n }\n\n ops += 1;\n\n }\n\n (write, ops)\n\n}\n\n\n", "file_path": "benches/rwlockbench.rs", "rank": 63, "score": 62426.873011365926 }, { "content": "#[derive(Copy, Clone, Debug, PartialEq)]\n\nenum OpcodeWr {\n\n Map(VAddr, usize, MapAction, PAddr),\n\n MapDevice(VAddr, PAddr, usize),\n\n}\n\n\n", "file_path": "benches/vspace.rs", "rank": 64, "score": 61840.01301983535 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Access {\n\n Peek,\n\n}\n\n\n\nimpl LogMapper for Access {\n\n fn hash(&self) -> usize {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "cnr/examples/stack.rs", "rank": 65, "score": 61835.65800939007 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Modify {\n\n Push(u32),\n\n Pop,\n\n}\n\n\n\nimpl LogMapper for Modify {\n\n fn hash(&self) -> usize {\n\n 0\n\n }\n\n}\n\n\n\n/// We support an immutable read operation to peek the stack.\n", "file_path": "cnr/examples/stack.rs", "rank": 66, "score": 61835.65800939007 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Access {\n\n Peek,\n\n}\n\n\n", "file_path": "nr/examples/stack.rs", "rank": 67, "score": 61835.65800939007 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nenum Modify {\n\n Push(u32),\n\n Pop,\n\n}\n\n\n\n/// We support an immutable read operation to peek the stack.\n", "file_path": "nr/examples/stack.rs", "rank": 68, "score": 61835.65800939007 }, { "content": "enum EvHandle {\n\n Read(evmap::ReadHandle<u64, u64>),\n\n Write(sync::Arc<parking_lot::Mutex<(evmap::WriteHandle<u64, u64>, usize, usize)>>),\n\n}\n\n\n\nimpl Backend for EvHandle {\n\n fn b_get(&mut self, key: u64) -> u64 {\n\n if let EvHandle::Read(ref r) = *self {\n\n r.get_and(&key, |v| v[0]).unwrap_or(0)\n\n } else {\n\n unreachable!();\n\n }\n\n }\n\n\n\n fn b_put(&mut self, key: u64, value: u64) {\n\n if let EvHandle::Write(ref w) = *self {\n\n let mut w = w.lock();\n\n w.0.update(key, value);\n\n w.1 += 1;\n\n if w.1 == w.2 {\n", "file_path": "benches/hashbench.rs", "rank": 69, "score": 61822.235872737336 }, { "content": "/// The actual stack, it uses a single-threaded Vec.\n\nstruct Stack {\n\n storage: Vec<u32>,\n\n}\n\n\n\nimpl Default for Stack {\n\n /// The stack Default implementation, as it is\n\n /// executed for every Replica.\n\n ///\n\n /// This should be deterministic as it is used to create multiple instances\n\n /// of a Stack for every replica.\n\n fn default() -> Stack {\n\n const DEFAULT_STACK_SIZE: u32 = 1_000u32;\n\n\n\n let mut s = Stack {\n\n storage: Default::default(),\n\n };\n\n\n\n for e in 0..DEFAULT_STACK_SIZE {\n\n s.storage.push(e);\n\n }\n", "file_path": "nr/examples/stack.rs", "rank": 70, "score": 61529.75858259585 }, { "content": "/// The actual stack, it uses a single-threaded Vec.\n\nstruct Stack {\n\n storage: SegQueue<u32>,\n\n}\n\n\n\nimpl Default for Stack {\n\n /// The stack Default implementation, as it is\n\n /// executed for every Replica.\n\n ///\n\n /// This should be deterministic as it is used to create multiple instances\n\n /// of a Stack for every replica.\n\n fn default() -> Stack {\n\n const DEFAULT_STACK_SIZE: u32 = 1_000u32;\n\n\n\n let s = Stack {\n\n storage: Default::default(),\n\n };\n\n\n\n for e in 0..DEFAULT_STACK_SIZE {\n\n s.storage.push(e);\n\n }\n", "file_path": "cnr/examples/stack.rs", "rank": 71, "score": 61529.75858259585 }, { "content": "struct NrFilesystem {\n\n memfs: MemFS,\n\n write_buffer: Vec<Vec<u8>>,\n\n read_buffer: Vec<UnsafeCell<Vec<u8>>>,\n\n}\n\n\n\nimpl Default for NrFilesystem {\n\n fn default() -> NrFilesystem {\n\n let topology = MachineTopology::new();\n\n let sockets = topology.sockets();\n\n let num_cpus = topology.cpus_on_socket(sockets[0]).len();\n\n\n\n let memfs = MemFS::default();\n\n //Create a private file for each core.\n\n let buffer = vec![0xb; 4096];\n\n for i in 0..num_cpus {\n\n let filename = format!(\"file-{}\", i);\n\n match memfs.create(&filename, u64::from(FileModes::S_IRWXU)) {\n\n Ok(mnode_num) => {\n\n memfs\n", "file_path": "benches/nrfs.rs", "rank": 72, "score": 61525.38112870516 }, { "content": "struct Stack {\n\n storage: Vec<u32>,\n\n popped: Vec<Option<u32>>,\n\n peeked: RwLock<Vec<Option<u32>>>,\n\n}\n\n\n", "file_path": "nr/tests/stack.rs", "rank": 73, "score": 61525.38112870516 }, { "content": "struct Stack {\n\n storage: Vec<u32>,\n\n popped: Vec<Option<u32>>,\n\n peeked: RwLock<Vec<Option<u32>>>,\n\n}\n\n\n", "file_path": "cnr/tests/stack.rs", "rank": 74, "score": 61525.38112870516 }, { "content": "#[derive(Eq, PartialEq)]\n\nstruct VerifyStack {\n\n storage: Vec<u32>,\n\n per_replica_counter: HashMap<u16, u16>,\n\n}\n\n\n\nimpl VerifyStack {\n\n pub fn push(&mut self, data: u32) {\n\n self.storage.push(data);\n\n }\n\n\n\n pub fn pop(&mut self) -> u32 {\n\n self.storage.pop().unwrap()\n\n }\n\n\n\n pub fn peek(&self) -> u32 {\n\n self.storage.last().unwrap().clone()\n\n }\n\n}\n\n\n\nimpl Default for VerifyStack {\n", "file_path": "cnr/tests/stack.rs", "rank": 75, "score": 59793.89578671095 }, { "content": "#[derive(Eq, PartialEq)]\n\nstruct VerifyStack {\n\n storage: Vec<u32>,\n\n per_replica_counter: HashMap<u16, u16>,\n\n}\n\n\n\nimpl VerifyStack {\n\n pub fn push(&mut self, data: u32) {\n\n self.storage.push(data);\n\n }\n\n\n\n pub fn pop(&mut self) -> u32 {\n\n self.storage.pop().unwrap()\n\n }\n\n\n\n pub fn peek(&self) -> u32 {\n\n self.storage.last().unwrap().clone()\n\n }\n\n}\n\n\n\nimpl Default for VerifyStack {\n", "file_path": "nr/tests/stack.rs", "rank": 76, "score": 59793.89578671095 }, { "content": "#[repr(C)]\n\nstruct lfht_test_node {\n\n node: urcu_sys::cds_lfht_node,\n\n key: u64,\n\n data: u64,\n\n /* cache-cold for iteration */\n\n head: urcu_sys::rcu_head,\n\n}\n\n\n\nunsafe fn to_test_node(node: *mut urcu_sys::cds_lfht_node) -> *mut lfht_test_node {\n\n mem::transmute(node)\n\n}\n\n\n\nunsafe extern \"C\" fn test_match(node: *mut urcu_sys::cds_lfht_node, key: *const c_void) -> i32 {\n\n let my_key = key as u64;\n\n let test_node: *mut lfht_test_node = to_test_node(node);\n\n (my_key == (*test_node).key) as i32\n\n}\n\n\n\nimpl Backend for sync::Arc<RcuHashMap> {\n\n fn b_get(&mut self, key: u64) -> u64 {\n", "file_path": "benches/hashbench.rs", "rank": 77, "score": 59793.51156484682 }, { "content": "struct RcuHashMap {\n\n test_ht: *mut urcu_sys::cds_lfht,\n\n}\n\n\n\nunsafe impl Sync for RcuHashMap {}\n\nunsafe impl Send for RcuHashMap {}\n\n\n\nimpl RcuHashMap {\n\n fn new() -> RcuHashMap {\n\n unsafe {\n\n // Not quite using 5M entries since cds_lfht needs power-of-twos\n\n let test_ht: *mut urcu_sys::cds_lfht = urcu_sys::cds_lfht_new(\n\n 4194304, // initial hash-buckes 2^22\n\n 4194304, // minimal hash-buckets 2^22\n\n 8388608, // maximum hash-buckets 2^23\n\n urcu_sys::CDS_LFHT_AUTO_RESIZE as i32,\n\n ptr::null_mut(),\n\n );\n\n assert_ne!(test_ht, ptr::null_mut());\n\n RcuHashMap { test_ht }\n\n }\n\n }\n\n}\n\n\n", "file_path": "benches/hashbench.rs", "rank": 78, "score": 59788.69682512646 }, { "content": "fn main() {\n\n let mut harness = TestHarness::new(std::time::Duration::from_secs(3));\n\n log_scale_bench(&mut harness);\n\n}\n", "file_path": "benches/log.rs", "rank": 79, "score": 58576.430066691726 }, { "content": "fn main() {\n\n let args = std::env::args().filter(|e| e != \"--bench\");\n\n let matches = App::new(\"Concurrent HashMap Benchmarker\")\n\n .version(crate_version!())\n\n .author(\"Jon Gjengset <[email protected]>, Gerd Zellweger <[email protected]>\")\n\n .about(\n\n \"Benchmark multiple implementations of concurrent HashMaps with varying read/write load\",\n\n )\n\n .arg(\n\n Arg::with_name(\"readers\")\n\n .short(\"r\")\n\n .long(\"readers\")\n\n .help(\"Set the number of readers\")\n\n .required(true)\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"writers\")\n\n .short(\"w\")\n\n .long(\"writers\")\n", "file_path": "benches/chashbench.rs", "rank": 80, "score": 58576.430066691726 }, { "content": "fn main() {\n\n let _r = env_logger::try_init();\n\n let mut harness = Default::default();\n\n\n\n stack_single_threaded(&mut harness);\n\n stack_scale_out(&mut harness);\n\n}\n", "file_path": "benches/stack.rs", "rank": 81, "score": 58576.430066691726 }, { "content": "fn main() {\n\n let _r = env_logger::try_init();\n\n utils::disable_dvfs();\n\n\n\n let mut harness = Default::default();\n\n // This translate to drbl and dwol in fxmark.\n\n let write_ratios = vec![0, 100];\n\n\n\n let topology = MachineTopology::new();\n\n let sockets = topology.sockets();\n\n let num_cpus = topology.cpus_on_socket(sockets[0]).len();\n\n\n\n for write_ratio in write_ratios.into_iter() {\n\n nrfs_scale_out(&mut harness, num_cpus, write_ratio);\n\n }\n\n}\n", "file_path": "benches/nrfs.rs", "rank": 82, "score": 58576.430066691726 }, { "content": "fn main() {\n\n let args = std::env::args().filter(|e| e != \"--bench\");\n\n let matches = App::new(\"Concurrent HashMap Benchmarker\")\n\n .version(crate_version!())\n\n .author(\"Jon Gjengset <[email protected]>, Gerd Zellweger <[email protected]>\")\n\n .about(\n\n \"Benchmark multiple implementations of concurrent HashMaps with varying read/write load\",\n\n )\n\n .arg(\n\n Arg::with_name(\"readers\")\n\n .short(\"r\")\n\n .long(\"readers\")\n\n .help(\"Set the number of readers\")\n\n .required(true)\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"compare\")\n\n .short(\"c\")\n\n .multiple(true)\n", "file_path": "benches/hashbench.rs", "rank": 83, "score": 58576.430066691726 }, { "content": "fn main() {\n\n let _r = env_logger::try_init();\n\n let mut harness = Default::default();\n\n\n\n vspace_single_threaded(&mut harness);\n\n vspace_scale_out(&mut harness);\n\n}\n", "file_path": "benches/vspace.rs", "rank": 84, "score": 58576.430066691726 }, { "content": "fn main() {\n\n let _r = env_logger::try_init();\n\n let mut harness = Default::default();\n\n\n\n synthetic_single_threaded(&mut harness);\n\n synthetic_scale_out(&mut harness);\n\n}\n", "file_path": "benches/synthetic.rs", "rank": 85, "score": 58576.430066691726 }, { "content": "fn main() {\n\n let args = std::env::args().filter(|e| e != \"--bench\");\n\n let matches = App::new(\"RwLock Benchmarker\")\n\n .version(crate_version!())\n\n .about(\"Benchmark read/write lock\")\n\n .arg(\n\n Arg::with_name(\"readers\")\n\n .short(\"r\")\n\n .long(\"readers\")\n\n .help(\"Set the number of readers\")\n\n .required(true)\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"writers\")\n\n .short(\"w\")\n\n .long(\"writers\")\n\n .required(true)\n\n .help(\"Set the number of writers\")\n\n .takes_value(true),\n", "file_path": "benches/rwlockbench.rs", "rank": 86, "score": 58576.430066691726 }, { "content": "fn main() {\n\n let _r = env_logger::try_init();\n\n let mut harness = Default::default();\n\n\n\n memfs_single_threaded(&mut harness);\n\n memfs_scale_out(&mut harness);\n\n}\n", "file_path": "benches/memfs.rs", "rank": 87, "score": 58576.430066691726 }, { "content": "fn main() {\n\n let _r = env_logger::try_init();\n\n if cfg!(feature = \"smokebench\") {\n\n log::warn!(\"Running with feature 'smokebench' may not get the desired results\");\n\n }\n\n\n\n utils::disable_dvfs();\n\n\n\n let mut harness = Default::default();\n\n let write_ratios = vec![0, 10, 80, 100];\n\n\n\n for write_ratio in write_ratios.into_iter() {\n\n /*concurrent_ds_scale_out::<SegQueueWrapper>(\n\n &mut harness,\n\n \"segqueue\",\n\n write_ratio,\n\n Box::new(move || generate_qops_concurrent(NOP, write_ratio, KEY_SPACE)),\n\n );*/\n\n\n\n /*concurrent_ds_scale_out::<SkipListWrapper>(\n", "file_path": "benches/lockfree.rs", "rank": 88, "score": 58576.430066691726 }, { "content": "#[derive(Default)]\n\n#[repr(align(64))]\n\nstruct Entry<T>\n\nwhere\n\n T: Sized + Clone,\n\n{\n\n /// The operation that this entry represents.\n\n operation: Option<T>,\n\n\n\n /// Identifies the replica that issued the above operation.\n\n replica: usize,\n\n\n\n /// Indicates whether this entry represents a valid operation when on the log.\n\n alivef: AtomicBool,\n\n}\n\n\n\n/// A log of operations that is typically accessed by multiple\n\n/// [Replica](struct.Replica.html).\n\n///\n\n/// Operations can be added to the log by calling the `append()` method and\n\n/// providing a list of operations to be performed.\n\n///\n", "file_path": "nr/src/log.rs", "rank": 89, "score": 57285.582548974526 }, { "content": "#[derive(Default)]\n\n#[repr(align(64))]\n\nstruct Entry<T>\n\nwhere\n\n T: Sized + Clone,\n\n{\n\n /// The operation that this entry represents.\n\n operation: Option<T>,\n\n\n\n /// Identifies the replica that issued the above operation.\n\n replica: usize,\n\n\n\n /// Indicates whether this entry represents a valid operation when on the log.\n\n alivef: AtomicBool,\n\n}\n\n\n\n/// A log of operations that is typically accessed by multiple\n\n/// [Replica](struct.Replica.html).\n\n///\n\n/// Operations can be added to the log by calling the `append()` method and\n\n/// providing a list of operations to be performed.\n\n///\n", "file_path": "cnr/src/log.rs", "rank": 90, "score": 57285.582548974526 }, { "content": "/// We initialize a log, and two replicas for a stack, register with the replica\n\n/// and then execute operations.\n\nfn main() {\n\n // The operation log for storing `WriteOperation`, it has a size of 2 MiB:\n\n let log = Arc::new(Log::<<Stack as Dispatch>::WriteOperation>::new(\n\n 2 * 1024 * 1024,\n\n ));\n\n\n\n // Next, we create two replicas of the stack\n\n let replica1 = Replica::<Stack>::new(&log);\n\n let replica2 = Replica::<Stack>::new(&log);\n\n\n\n // The replica executes a Modify or Access operations by calling\n\n // `execute_mut` and `execute`. Eventually they end up in the `Dispatch` trait.\n\n let thread_loop = |replica: &Arc<Replica<Stack>>, ridx| {\n\n for i in 0..2048 {\n\n let _r = match i % 3 {\n\n 0 => replica.execute_mut(Modify::Push(i as u32), ridx),\n\n 1 => replica.execute_mut(Modify::Pop, ridx),\n\n 2 => replica.execute(Access::Peek, ridx),\n\n _ => unreachable!(),\n\n };\n", "file_path": "nr/examples/stack.rs", "rank": 91, "score": 56666.81859563867 }, { "content": "/// We initialize a log, and two replicas for a stack, register with the replica\n\n/// and then execute operations.\n\nfn main() {\n\n // The operation log for storing `WriteOperation`, it has a size of 2 MiB:\n\n let log = Arc::new(Log::<<Stack as Dispatch>::WriteOperation>::new(\n\n 2 * 1024 * 1024,\n\n 1,\n\n ));\n\n\n\n // Next, we create two replicas of the stack\n\n let replica1 = Replica::<Stack>::new(vec![log.clone()]);\n\n let replica2 = Replica::<Stack>::new(vec![log.clone()]);\n\n\n\n // The replica executes a Modify or Access operations by calling\n\n // `execute_mut` and `execute`. Eventually they end up in the `Dispatch` trait.\n\n let thread_loop = |replica: &Arc<Replica<Stack>>, ridx| {\n\n for i in 0..2048 {\n\n let _r = match i % 3 {\n\n 0 => replica.execute_mut(Modify::Push(i as u32), ridx),\n\n 1 => replica.execute_mut(Modify::Pop, ridx),\n\n 2 => replica.execute(Access::Peek, ridx),\n\n _ => unreachable!(),\n", "file_path": "cnr/examples/stack.rs", "rank": 92, "score": 56666.81859563867 }, { "content": "fn run_rwlock(\n\n lock: Arc<RwLock<usize>>,\n\n end: time::Instant,\n\n write: bool,\n\n tid: usize,\n\n readers: usize,\n\n) -> (bool, usize) {\n\n let mut ops = 0;\n\n let mut t_rng = rand::thread_rng();\n\n\n\n while time::Instant::now() < end {\n\n if write {\n\n let mut ele = lock.write(readers);\n\n *ele = t_rng.next_u64() as usize;\n\n } else {\n\n let ele = lock.read(tid);\n\n let _a = *ele;\n\n }\n\n ops += 1;\n\n }\n\n (write, ops)\n\n}\n", "file_path": "benches/rwlockbench.rs", "rank": 93, "score": 56662.01140214996 }, { "content": "/// Every data structure must implement [LogMapper](trait.LogMapper.html) trait\n\n/// for [ReadOperation](trait.Dispatch.html#associatedtype.ReadOperation) and\n\n/// [WriteOperation](trait.Dispatch.html#associatedtype.WriteOperation).\n\n///\n\n/// Data structure implement `hash` that is used to map each operation to a log.\n\n/// All the conflicting operations must map to a single log and the commutative\n\n/// operations can map to same or different logs based on the operation argument.\n\n///\n\n/// [Replica](struct.Replica.html) internally performs a modulo operation on `hash`\n\n/// return value with the total number of logs. The data structure can implement\n\n/// trait to return a value between 0 and (#logs-1) to avoid the modulo operation.\n\npub trait LogMapper {\n\n /// Method to convert the operation and it's arguments to a log number.\n\n fn hash(&self) -> usize;\n\n}\n\n\n", "file_path": "cnr/src/lib.rs", "rank": 94, "score": 55999.03954202541 }, { "content": "#[test]\n\nfn sequential_test() {\n\n let log = Arc::new(Log::<<Stack as Dispatch>::WriteOperation>::new(\n\n 4 * 1024 * 1024,\n\n ));\n\n\n\n let mut orng = thread_rng();\n\n let nop = 50;\n\n\n\n let r = Replica::<Stack>::new(&log);\n\n let idx = r.register().expect(\"Failed to register with Replica.\");\n\n let mut correct_stack: Vec<u32> = Vec::new();\n\n let mut correct_popped: Vec<Option<u32>> = Vec::new();\n\n let mut correct_peeked: Vec<Option<u32>> = Vec::new();\n\n\n\n // Populate with some initial data\n\n for _i in 0..50 {\n\n let element = orng.gen();\n\n r.execute_mut(OpWr::Push(element), idx).unwrap();\n\n correct_stack.push(element);\n\n }\n", "file_path": "cnr/tests/stack.rs", "rank": 95, "score": 54944.918752178644 }, { "content": "#[test]\n\nfn sequential_test() {\n\n let log = Arc::new(Log::<<Stack as Dispatch>::WriteOperation>::new(\n\n 4 * 1024 * 1024,\n\n ));\n\n\n\n let mut orng = thread_rng();\n\n let nop = 50;\n\n\n\n let r = Replica::<Stack>::new(&log);\n\n let idx = r.register().expect(\"Failed to register with Replica.\");\n\n let mut correct_stack: Vec<u32> = Vec::new();\n\n let mut correct_popped: Vec<Option<u32>> = Vec::new();\n\n let mut correct_peeked: Vec<Option<u32>> = Vec::new();\n\n\n\n // Populate with some initial data\n\n for _i in 0..50 {\n\n let element = orng.gen();\n\n r.execute_mut(OpWr::Push(element), idx).unwrap();\n\n correct_stack.push(element);\n\n }\n", "file_path": "nr/tests/stack.rs", "rank": 96, "score": 54944.918752178644 }, { "content": "struct NrMemFilesystem(MemFilesystem);\n\n\n\nimpl Default for NrMemFilesystem {\n\n fn default() -> NrMemFilesystem {\n\n let mut memfs = MemFilesystem::new();\n\n\n\n fn setup_initial_structure(memfs: &mut MemFilesystem) -> Result<(), Error> {\n\n let ino = 1; // TODO: hard-coded root inode, get through a lookup()\n\n let ino = memfs.mkdir(ino, &OsStr::new(\"tmp\"), 0)?.ino;\n\n let ino = memfs.mkdir(ino, &OsStr::new(\"largefile1\"), 0)?.ino;\n\n\n\n let ino = memfs.create(ino, &OsStr::new(\"00000001\"), 0, 0)?.ino;\n\n memfs.write(ino, 0, 0, &[1; 4096], 0)?;\n\n assert_eq!(ino, 5, \"Adjust `generate_fs_operation` accordingly!\");\n\n Ok(())\n\n }\n\n\n\n setup_initial_structure(&mut memfs).expect(\"Can't initialize FS\");\n\n\n\n NrMemFilesystem(memfs)\n", "file_path": "benches/memfs.rs", "rank": 97, "score": 54294.4769829414 }, { "content": "\n\nmod mkbench;\n\nmod utils;\n\n\n\nuse mkbench::ReplicaTrait;\n\n\n\nuse utils::benchmark::*;\n\nuse utils::Operation;\n\n\n\n#[derive(Debug, Default, Eq, PartialEq, Copy, Clone)]\n\npub struct Nop(usize);\n\n\n\nimpl Dispatch for Nop {\n\n type ReadOperation = ();\n\n type WriteOperation = usize;\n\n type Response = ();\n\n\n\n fn dispatch(&self, _op: Self::ReadOperation) -> Self::Response {\n\n unreachable!();\n\n }\n\n\n\n fn dispatch_mut(&mut self, _op: Self::WriteOperation) -> Self::Response {\n\n unreachable!();\n\n }\n\n}\n\n\n\n/// Compare scale-out behaviour of log.\n", "file_path": "benches/log.rs", "rank": 98, "score": 62.32703752252587 } ]
Rust
src/gen/generate.rs
warpwm/lule
dd429596aa32895bd19ba814951ba1fda65275c3
use rand::prelude::*; use crate::scheme::*; pub fn gen_main_six(col: &Vec<pastel::Color>) -> Vec<pastel::Color> { let mut colors = col.clone(); colors.retain(|x| x.to_lab().l > 20.0); colors.retain(|x| x.to_lab().l < 80.0); colors.sort_by_key(|c| (c.to_lch().l) as i32); colors.reverse(); let mut i = 0; while colors.len() < 6 { colors.push(pastel::Color::complementary(&colors[i])); i = i +1; } let mut main_colors: Vec<pastel::Color> = Vec::new(); for i in 0..6 { main_colors.push(colors[i].clone()) } main_colors.sort_by_key(|c| (c.to_lch().c) as i32); main_colors.reverse(); main_colors } pub fn get_black_white(ac: &pastel::Color, black_mix: f64, white_mix: f64, theme: bool) -> (pastel::Color, pastel::Color) { let black = pastel::Color::from_rgb(0,0,0); let white = pastel::Color::from_rgb(255,255,255); let dark = black.mix::<pastel::RGBA<f64>>(&ac, pastel::Fraction::from(black_mix)); let light = white.mix::<pastel::RGBA<f64>>(&ac, pastel::Fraction::from(white_mix)); if theme { (dark, light) } else { (light, dark) } } pub fn get_two_grays(ac: &pastel::Color, mix: f64, theme: bool) -> (pastel::Color, pastel::Color) { let darker = pastel::Color::from_rgb(100,100,100); let lighter = pastel::Color::from_rgb(170,170,170); let dark = darker.mix::<pastel::RGBA<f64>>(&ac, pastel::Fraction::from(mix)); let light = lighter.mix::<pastel::RGBA<f64>>(&ac, pastel::Fraction::from(mix)); if theme { (dark, light) } else { (light, dark) } } pub fn gen_prime_six(colors: Vec<pastel::Color>, mix: f64, theme: bool) -> Vec<pastel::Color> { let mut second_colors: Vec<pastel::Color> = Vec::new(); for col in colors.iter() { let new_col = if theme { col.lighten(mix) } else { col.darken(mix) }; second_colors.push(new_col) } second_colors } pub fn gen_second_six(colors: Vec<pastel::Color>, mix: f64, theme: bool) -> Vec<pastel::Color> { let mut second_colors: Vec<pastel::Color> = Vec::new(); for col in colors.iter() { let new_col = if ! theme { col.lighten(mix) } else { col.darken(mix) }; second_colors.push(new_col) } second_colors } pub fn gen_shades(colors: Vec<&pastel::Color>, number: u8) -> Vec<pastel::Color>{ let mut color_scale = pastel::ColorScale::empty(); let mut gradients: Vec<pastel::Color> = Vec::new(); for (i, color) in colors.iter().enumerate() { let position = pastel::Fraction::from(i as f64 / (colors.len() as f64 - 1.0)); color_scale.add_stop(color.clone().clone(), position); } let mix = Box::new(|c1: &pastel::Color, c2: &pastel::Color, f: pastel::Fraction| c1.mix::<pastel::Lab>(c2, f)); let count = number + 2; for i in 0..count { let position = pastel::Fraction::from(i as f64 / (count as f64 - 1.0)); let color = color_scale.sample(position, &mix).expect("gradient color"); if i == 0 || i == count-1 { continue; } gradients.push(color) } gradients } pub fn gen_gradients(ac: pastel::Color, col0: pastel::Color, col15: pastel::Color, black: pastel::Color, white: pastel::Color) -> Vec<pastel::Color> { let mut gradients: Vec<pastel::Color> = Vec::new(); gradients.push(black.clone()); let blacks = gen_shades(vec![&black, &col0], 3); gradients.extend(blacks); let middle = gen_shades(vec![&col0, &ac, &col15], 16); gradients.extend(middle); let whites = gen_shades(vec![&col15, &white], 3); gradients.extend(whites); gradients.push(white.clone()); gradients } pub fn get_all_colors(scheme: &mut SCHEME) -> Vec<pastel::Color> { let theme = if scheme.theme().as_ref().unwrap_or(&"dark".to_string()) == "light" { false } else { true }; let mut palette: Vec<pastel::Color> = Vec::new(); if let Some(ref cols) = scheme.pigments() { for c in cols.iter() { palette.push(pastel::Color::from_hex(c)); } } let main = gen_main_six(&palette); let mut black = pastel::Color::from_rgb(0,0,0); let mut white = pastel::Color::from_rgb(255,255,255); if !theme { white = pastel::Color::from_rgb(0,0,0); black = pastel::Color::from_rgb(255,255,255); } let prime = gen_prime_six(main.clone(), 0.1, theme); let acc = prime.get(0).unwrap().clone(); let (col0, col15) = get_black_white(&acc, 0.08, 0.12, theme); let (col7, col8) = get_two_grays(&acc, 0.2, theme); let second = gen_second_six(main.clone(), 0.1, theme); let gradients = gen_gradients(acc.clone(), col0.clone(), col15.clone(), black, white); let mut colors: Vec<pastel::Color> = Vec::new(); colors.push(col0.clone()); colors.extend(prime); colors.push(col7); colors.push(col8); colors.extend(second); colors.push(col15.clone()); for _ in 0..18 { let rng: &mut dyn RngCore = &mut thread_rng(); let hue = rng.gen::<f64>() * 360.0; let saturation = 0.2 + 0.6 * rng.gen::<f64>(); let lightness = 0.3 + 0.4 * rng.gen::<f64>(); colors.extend( gen_shades(vec![&col0, &pastel::Color::from_hsl(hue, saturation, lightness), &col15], 12) ); } colors.extend(gradients); colors }
use rand::prelude::*; use crate::scheme::*; pub fn gen_main_six(col: &Vec<pastel::Color>) -> Vec<pastel::Color> { let mut colors = col.clone(); colors.retain(|x| x.to_lab().l > 20.0); colors.retain(|x| x.to_lab().l < 80.0); colors.sort_by_key(|c| (c.to_lch().l) as i32); colors.reverse(); let mut i = 0; while colors.len() < 6 { colors.push(pastel::Color::complementary(&colors[i])); i = i +1; } let mut main_colors: Vec<pastel::Color> = Vec::new(); for i in 0..6 { main_colors.push(colors[i].clone()) } main_colors.sort_by_key(|c| (c.to_lch().c) as i32); main_colors.reverse(); main_colors } pub fn get_black_white(ac: &pastel::Color, black_mix: f64, white_mix: f64, theme: bool) -> (pastel::Color, pastel::Color) { let black = pastel::Color::from_rgb(0,0,0); let white = pastel::Color::from_rgb(255,255,255); let dark = black.mix::<pastel::RGBA<f64>>(&ac, pastel::Fraction::from(black_mix)); let light = white.mix::<pastel::RGBA<f64>>(&ac, pastel::Fraction::from(white_mix)); if theme { (dark, light) } else { (light, dark) } } pub fn get_two_grays(ac: &pastel::Color, mix: f64, theme: bool) -> (pastel::Color, pastel::Color) { let darker = pastel::Color::from_rgb(100,100,100); let lighter = pastel::Color::from_rgb(170,170,170); let dark = darker.mix::<pastel::RGBA<f64>>(&ac, pastel::Fraction::from(mix)); let light = lighter.mix::<pastel::RGBA<f64>>(&ac, pastel::Fraction::from(mix)); if theme { (dark, light) } else { (light, dark) } } pub fn gen_prime_six(colors: Vec<pastel::Color>, mix: f64, theme: bool) -> Vec<pastel::Color> { let mut second_colors: Vec<pastel::Color> = Vec::new(); for col in colors.iter() { let new_col = if theme { col.lighten(mix) } else { col.darken(mix) }; second_colors.push(new_col) } second_colors } pub fn gen_second_six(colors: Vec<pastel::Color>, mix: f64, theme: bool) -> Vec<pastel::Color> { let mut second_colors: Vec<pastel::Color> = Vec::new(); for col in colors.iter() { let new_col = if ! theme { col.lighten(mix) } else { col.darken(mix) }; second_colors.push(new_col) } second_colors } pub fn gen_shades(colors: Vec<&pastel::Color>, number: u8) -> Vec<pastel::Color>{ let mut color_scale = pastel::ColorScale::empty(); let mut gradients: Vec<pastel::Color> = Vec::new(); for (i, color) in colors.iter().enumerate() { let position = pastel::Fraction::from(i as f64 / (colors.len() as f64 - 1.0)); color_scale.add_stop(color.clone().clone(), position); } let mix = Box::new(|c1: &pastel::Color, c2: &pastel::Color, f: pastel::Fraction| c1.mix::<pastel::Lab>(c2, f)); let count = number + 2; for i in 0..count { let position = pastel::Fraction::from(i as f64 / (count as f64 - 1.0)); let color = color_scale.sample(position, &mix).expect("gradient color"); if i == 0 || i == count-1 { continue; } gradients.push(color) } gradients } pub fn gen_gradients(ac: pastel::Color, col0: pastel::Color, col15: pastel::Color, black: pastel::Color, white: pastel::Color) -> Vec<pastel::Color> { let mut gradients: Vec<pastel::Color> = Vec::new(); gradients.push(black.clone()); let blacks = gen_shades(vec![&black, &col0], 3); gradients.extend(blacks); let middle = gen_shades(vec![&col0, &ac, &col15], 16); gradients.extend(middle); let whites = gen_shades(vec![&col15, &white], 3); gradients.extend(whites); gradients.push(white.clone()); gradients } pub fn get_all_colors(scheme: &mut SCHEME) -> Vec<pastel::Color> { let theme = if scheme.theme().as_ref().unwrap_or(&"dark".to_string()) == "light" { false } else { true }; let mut palette: Vec<pastel::Color> = Vec::new(); if l
et Some(ref cols) = scheme.pigments() { for c in cols.iter() { palette.push(pastel::Color::from_hex(c)); } } let main = gen_main_six(&palette); let mut black = pastel::Color::from_rgb(0,0,0); let mut white = pastel::Color::from_rgb(255,255,255); if !theme { white = pastel::Color::from_rgb(0,0,0); black = pastel::Color::from_rgb(255,255,255); } let prime = gen_prime_six(main.clone(), 0.1, theme); let acc = prime.get(0).unwrap().clone(); let (col0, col15) = get_black_white(&acc, 0.08, 0.12, theme); let (col7, col8) = get_two_grays(&acc, 0.2, theme); let second = gen_second_six(main.clone(), 0.1, theme); let gradients = gen_gradients(acc.clone(), col0.clone(), col15.clone(), black, white); let mut colors: Vec<pastel::Color> = Vec::new(); colors.push(col0.clone()); colors.extend(prime); colors.push(col7); colors.push(col8); colors.extend(second); colors.push(col15.clone()); for _ in 0..18 { let rng: &mut dyn RngCore = &mut thread_rng(); let hue = rng.gen::<f64>() * 360.0; let saturation = 0.2 + 0.6 * rng.gen::<f64>(); let lightness = 0.3 + 0.4 * rng.gen::<f64>(); colors.extend( gen_shades(vec![&col0, &pastel::Color::from_hsl(hue, saturation, lightness), &col15], 12) ); } colors.extend(gradients); colors }
function_block-function_prefixed
[ { "content": "pub fn write_colors(scheme: &mut SCHEME, old: bool) -> Result<()> {\n\n if old {\n\n if let Some(cachepath) = scheme.cache().clone() {\n\n let mut palette_temp = PathBuf::from(&cachepath); palette_temp.push(\"palette\");\n\n scheme.set_pigments(Some(text::lines_to_vec(palette_temp)));\n\n\n\n let mut wall_temp = PathBuf::from(&cachepath); wall_temp.push(\"wallpaper\");\n\n if let Ok(content) = text::file_to_string(wall_temp) {\n\n scheme.set_image(Some(content));\n\n }\n\n\n\n let mut theme_temp = PathBuf::from(&cachepath); theme_temp.push(\"theme\");\n\n if let Ok(content) = text::file_to_string(theme_temp) {\n\n scheme.set_theme(Some(content));\n\n }\n\n }\n\n } else {\n\n let wallpaper = scheme.walldir().clone().unwrap();\n\n if scheme.image().is_none() {\n\n scheme.set_image(Some(text::random_image(&wallpaper)));\n", "file_path": "src/gen/apply.rs", "rank": 5, "score": 221443.50272452686 }, { "content": "pub fn output_to_json(scheme: &mut SCHEME, map: bool) -> Value {\n\n let mut color_map = Map::new();\n\n let mut color_vec = Vec::new();\n\n if let Some(colors) = scheme.colors() {\n\n for (key, color) in colors.iter().enumerate() {\n\n let name = \"color\".to_string() + &key.to_string();\n\n color_map.insert(name, pastel::HEX::from(color).to_string());\n\n color_vec.push(color.to_rgb_hex_string(true));\n\n }\n\n }\n\n let map_profile = ProfileMap {\n\n wallpaper: scheme.image().clone().unwrap(),\n\n theme: scheme.theme().clone().unwrap(),\n\n special: Special {\n\n background: color_vec[0].clone(),\n\n foreground: color_vec[15].clone(),\n\n cursor: color_vec[1].clone()\n\n },\n\n colors: color_map\n\n };\n", "file_path": "src/gen/write.rs", "rank": 7, "score": 199283.73421313218 }, { "content": "pub fn concatinate(scheme: &mut SCHEME) {\n\n let _home_path: PathBuf = dirs::home_dir().expect(\n\n &format!(\"{} {}\", \"error:\".red().bold(), \"Path of home is impossible to get\"));\n\n\n\n let mut lule_configs: PathBuf = dirs::config_dir().expect(\n\n &format!(\"{} {}\", \"error:\".red().bold(), \"Path for configs is impossible to get\"));\n\n lule_configs.push(\"lule\");\n\n\n\n let mut lule_cache: PathBuf = dirs::cache_dir().expect(\n\n &format!(\"{} {}\", \"error:\".red().bold(), \"Path for configs is impossible to get\"));\n\n lule_cache.push(\"lule\");\n\n\n\n\n\n\n\n scheme.set_theme(Some(\"dark\".to_string()));\n\n scheme.set_config(Some(lule_configs.to_str().unwrap().to_string()));\n\n scheme.set_cache(Some(lule_cache.to_str().unwrap().to_string()));\n\n scheme.set_palette(Some(\"pigment\".to_string()));\n\n}\n", "file_path": "src/var/defs.rs", "rank": 9, "score": 173162.66976574948 }, { "content": "pub fn concatinate(scheme: &mut SCHEME) {\n\n\n\n if atty::isnt(atty::Stream::Stdin) {\n\n if let Ok(input) = read_stdin() {\n\n if let Ok(sh) = make_scheme(input) {\n\n *scheme = sh;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/var/pipe.rs", "rank": 10, "score": 173162.66976574948 }, { "content": "pub fn concatinate(scheme: &mut SCHEME) {\n\n\n\n let lule_scheme = text::pather(vec![\"lule_scheme\"], std::env::temp_dir());\n\n if let Ok(scheme_string) = text::file_to_string(lule_scheme) {\n\n if let Ok(sh) = make_scheme(scheme_string) {\n\n *scheme = sh;\n\n }\n\n }\n\n scheme.set_image(None);\n\n}\n\n\n", "file_path": "src/var/temp.rs", "rank": 11, "score": 173162.66976574948 }, { "content": "pub fn concatinate(scheme: &mut SCHEME) {\n\n let env_lule_w = std::env::var(\"LULE_W\");\n\n if env_lule_w.is_ok(){\n\n scheme.set_walldir(Some(env_lule_w.unwrap()));\n\n }\n\n\n\n let env_lule_c = std::env::var(\"LULE_C\");\n\n if env_lule_c.is_ok(){\n\n scheme.set_config(Some(env_lule_c.unwrap()));\n\n }\n\n\n\n let env_lule_s = std::env::var(\"LULE_S\");\n\n if env_lule_s.is_ok(){\n\n let mut newvec = vec![env_lule_s.unwrap()];\n\n match scheme.scripts() {\n\n None => {\n\n scheme.set_scripts(Some(newvec));\n\n }\n\n Some(vec) => {\n\n newvec.append(&mut vec.clone());\n", "file_path": "src/var/envi.rs", "rank": 12, "score": 173162.66976574948 }, { "content": "pub fn run(app: &clap::ArgMatches, scheme: &mut SCHEME) -> Result<()> {\n\n let sub = app.subcommand_matches(\"colors\").unwrap();\n\n var::concatinate(app, scheme);\n\n\n\n\n\n scheme.set_scripts(None);\n\n if sub.is_present(\"gen\") {\n\n apply::write_colors(scheme, false)?;\n\n }\n\n\n\n\n\n if let Some(cachepath) = scheme.cache().clone() {\n\n let mut color_temp = PathBuf::from(&cachepath);\n\n color_temp.push(\"colors\");\n\n if let Ok(content) = palette::colors_from_file(color_temp) {\n\n scheme.set_colors(Some(content));\n\n }\n\n\n\n let mut wall_temp = PathBuf::from(&cachepath);\n\n wall_temp.push(\"wallpaper\");\n", "file_path": "src/cli/colors.rs", "rank": 13, "score": 173038.08722186327 }, { "content": "pub fn command_execution(scheme: &mut SCHEME) {\n\n if let Some(scripts) = scheme.scripts() {\n\n for s in scripts.iter() {\n\n if std::fs::metadata(&s).is_ok() {\n\n external_command(s);\n\n println!(\"running: {}\", s)\n\n } else {\n\n //TODO: better error handle\n\n println!(\"{} is not a valid file\", s )\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gen/execute.rs", "rank": 14, "score": 170195.60027889497 }, { "content": "pub fn pattern_gneration(scheme: &mut SCHEME) -> Result<()> {\n\n\n\n if let Some(patterns) = scheme.patterns() {\n\n for p in patterns.iter() {\n\n if std::fs::metadata(&p.0).is_ok() && std::fs::metadata(&p.1).is_ok() {\n\n generate_template(PathBuf::from(&p.0), PathBuf::from(&p.1), scheme)?;\n\n println!(\"generating :{} into: {}\", p.0, p.1)\n\n } else {\n\n //TODO: better error handle\n\n println!(\"{} or {} is not a valid file\", p.0, p.1)\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/gen/templ.rs", "rank": 15, "score": 165082.95076111352 }, { "content": "pub fn concatinate(app: &clap::ArgMatches, scheme: &mut SCHEME) {\n\n temp::concatinate(scheme);\n\n defs::concatinate(scheme);\n\n envi::concatinate(scheme);\n\n args::concatinate(app, scheme);\n\n pipe::concatinate(scheme);\n\n\n\n if let Some(s) = scheme.scripts() {\n\n let mut scripts = s.to_vec();\n\n scripts.dedup();\n\n scheme.set_scripts(Some(scripts));\n\n }\n\n\n\n if scheme.image().is_none() && scheme.walldir().is_none() {\n\n eprintln!(\"{} {} {} {}\", \"error:\".red().bold(), \"Environment variable\", \"'$LULE_W'\".yellow(), \"is empty\");\n\n eprintln!(\"{} {} {} {}\", \"error:\".red().bold(), \"Argument option\", \"'--wallpath'\".yellow(), \"is not set\");\n\n eprintln!(\"{} {} {} {}\", \"error:\".red().bold(), \"Image argument\", \"'--image'\".yellow(), \"is not given\");\n\n eprintln!(\"\\n{}\\n\\t{}\\n\\n{} {}\", \"USAGE\".yellow(), \"lule help <subcommands>...\", \n\n \"For more information try\", \"--help\".blue() );\n\n std::process::exit(1);\n\n }\n\n}\n", "file_path": "src/var.rs", "rank": 16, "score": 158265.6304616036 }, { "content": "pub fn write_cache_json(scheme: &mut SCHEME, values: Value) {\n\n let cache_path = match scheme.cache() {\n\n Some(value) => value,\n\n None => \"\"\n\n };\n\n let cache_json = text::pather(vec![\"colors.json\"], PathBuf::from(cache_path));\n\n let json_out = serde_json::to_string_pretty(&values).unwrap();\n\n text::write_to_file(cache_json, json_out.as_bytes());\n\n}\n\n\n", "file_path": "src/gen/write.rs", "rank": 17, "score": 157777.80565013736 }, { "content": "pub fn concatinate(app: &clap::ArgMatches, scheme: &mut SCHEME) {\n\n\n\n if let Some(_) = app.values_of(\"script\") {\n\n let vals: Vec<&str> = app.values_of(\"script\").unwrap().collect();\n\n let mut scripts = Vec::new();\n\n if let Some(s) = scheme.scripts() {\n\n scripts = s.to_vec();\n\n }\n\n for val in vals {\n\n scripts.push(val.to_string())\n\n }\n\n scheme.set_scripts(Some(scripts));\n\n }\n\n\n\n if let Some(_) = app.values_of(\"pattern\") {\n\n let vals: Vec<&str> = app.values_of(\"pattern\").unwrap().collect();\n\n let mut patterns = Vec::new();\n\n for val in vals {\n\n let s: Vec<&str> = val.split_terminator(':').collect();\n\n // TODO: better error\n", "file_path": "src/var/args.rs", "rank": 18, "score": 155762.1322900703 }, { "content": "pub fn run(app: &clap::ArgMatches, scheme: &mut SCHEME) -> Result<()> {\n\n test_colors(app, scheme)?;\n\n\n\n templ::pattern_gneration(scheme)?;\n\n\n\n Ok(())\n\n}\n\n\n\n\n", "file_path": "src/cli/test.rs", "rank": 20, "score": 151500.39318321153 }, { "content": "pub fn run(app: &clap::ArgMatches, scheme: &mut SCHEME) -> Result<()> {\n\n // let sub = app.subcommand_matches(\"config\").unwrap();\n\n var::concatinate(app, scheme);\n\n\n\n\n\n let scheme_json = serde_json::to_value(&scheme).unwrap();\n\n let format_scheme = format!(\"{}\", scheme_json);\n\n if atty::isnt(atty::Stream::Stdout) {\n\n println!(\"{}\", scheme_json);\n\n } else {\n\n let mut pipe_name = std::env::temp_dir();\n\n pipe_name.push(\"lule_pipe\");\n\n text::write_to_file(pipe_name, format_scheme.as_bytes());\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/config.rs", "rank": 21, "score": 151500.39318321153 }, { "content": "pub fn run(app: &clap::ArgMatches, scheme: &mut SCHEME) -> Result<()> {\n\n let sub = app.subcommand_matches(\"daemon\").unwrap();\n\n var::concatinate(app, scheme);\n\n\n\n\n\n if atty::isnt(atty::Stream::Stdout) {\n\n println!(\"{}\", \"you cant pipe out form this deamon\");\n\n } else {\n\n if let Some(arg) = sub.value_of(\"action\") {\n\n let mut lule_pipe = std::env::temp_dir(); lule_pipe.push(\"lule_pipe\");\n\n if arg == \"start\" {\n\n deamoned(scheme)?;\n\n }\n\n if arg == \"next\" {\n\n text::write_to_file(lule_pipe.clone(), \"stop\".as_bytes());\n\n }\n\n if arg == \"stop\" {\n\n text::write_to_file(lule_pipe.clone(), \"stop\".as_bytes());\n\n }\n\n if arg == \"detach\" {\n", "file_path": "src/cli/daemon.rs", "rank": 22, "score": 151500.39318321153 }, { "content": "pub fn run(app: &clap::ArgMatches, scheme: &mut SCHEME) -> Result<()> {\n\n let sub = app.subcommand_matches(\"create\").unwrap();\n\n var::concatinate(app, scheme);\n\n\n\n if atty::isnt(atty::Stream::Stdout) {\n\n // write::write_temp(&scheme);\n\n // println!(\"{}\", &values);\n\n } else {\n\n if let Some(arg) = sub.value_of(\"action\") {\n\n if arg == \"set\" {\n\n apply::write_colors(scheme, false)?;\n\n }\n\n if arg == \"regen\" {\n\n apply::write_colors(scheme, true)?;\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/cli/create.rs", "rank": 23, "score": 151500.39318321153 }, { "content": "// * Calculates Delta E(1994) between two colors\n\npub fn _cie94(color0: &pastel::Lab, color: &pastel::Lab) -> f64 {\n\n // for more info see: https://opentextbc.ca/graphicdesign/chapter/4-4-lab-colour-space-and-delta-e-measurements\n\n let xc1 = (color0.a.powi(2) + color0.b.powi(2)).sqrt();\n\n let xc2 = (color.a.powi(2) + color.b.powi(2)).sqrt();\n\n let xdl = color.l - color0.l;\n\n let mut xdc = xc2 - xc1;\n\n let xde = ( (color0.l - color.l).powi(2) + (color0.a - color.a).powi(2) + (color0.b - color.b).powi(2) ).sqrt();\n\n let mut xdh = xde.powi(2) - xdl.powi(2) - xdc.powi(2);\n\n xdh = if xdh > 0.0 { xdh.sqrt() } else { 0.0 };\n\n let xsc = 1.0 + 0.045 * xc1;\n\n let xsh = 1.0 + 0.015 * xc1;\n\n xdc = xdc / xsc;\n\n xdh = xdh / xsh;\n\n\n\n return ( xdl.powi(2) + xdc.powi(2) + xdh.powi(2) ).sqrt();\n\n}\n\n\n\n\n", "file_path": "src/gen/kmeans.rs", "rank": 24, "score": 143687.15547142093 }, { "content": "pub fn nearest(color: &pastel::Lab, colors: &Vec<pastel::Lab>) -> (usize, f64) {\n\n return colors\n\n .iter()\n\n .map(|c| pastel::delta_e::cie76(color, c))\n\n .enumerate()\n\n .min_by(|(_, a), (_, b)| a.partial_cmp(&b).expect(\"NaN encountered\"))\n\n .unwrap();\n\n}\n\n\n", "file_path": "src/gen/kmeans.rs", "rank": 25, "score": 143499.89964894686 }, { "content": "fn test_colors(app: &clap::ArgMatches, scheme: &mut SCHEME) -> Result<()> {\n\n // let mut pipe_name = std::env::temp_dir();\n\n // pipe_name.push(\"lule_pipe\");\n\n\n\n var::defs::concatinate(scheme);\n\n var::envi::concatinate(scheme);\n\n var::args::concatinate(app, scheme);\n\n var::pipe::concatinate(scheme);\n\n\n\n\n\n let wallpaper = scheme.walldir().clone().unwrap();\n\n if scheme.image().is_none() {\n\n scheme.set_image(Some(text::random_image(&wallpaper)));\n\n }\n\n\n\n let palette = palette::palette_from_image(scheme.image().clone().unwrap());\n\n scheme.set_pigments(Some(palette.clone()));\n\n\n\n let allcolors = generate::get_all_colors(scheme);\n\n scheme.set_colors(Some(allcolors));\n", "file_path": "src/cli/test.rs", "rank": 26, "score": 140016.44105853068 }, { "content": "pub fn show_pastel_colors(scheme: &SCHEME, colrange: Range::<usize>) {\n\n let stdout = std::io::stdout();\n\n let mut stdout_lock_handle = stdout.lock();\n\n\n\n for i in colrange {\n\n canvas::show_color(&mut stdout_lock_handle, ansi::Mode::TrueColor, &scheme.colors().clone().unwrap()[i], i).ok();\n\n }\n\n}\n\n\n", "file_path": "src/show/format.rs", "rank": 27, "score": 137336.968320868 }, { "content": "fn deamoned(scheme: &mut SCHEME) -> Result<()> {\n\n let mut lule_pipe = std::env::temp_dir(); lule_pipe.push(\"lule_pipe\");\n\n std::fs::remove_file(lule_pipe.clone()).ok();\n\n let (pipetx, piperx) = channel::<String>();\n\n thread::spawn(move|| { read_pipe(lule_pipe, pipetx); });\n\n\n\n let (timetx, timerx) = channel::<bool>();\n\n let timer = scheme.looop().unwrap().clone();\n\n thread::spawn(move || { time_to_sleep(timer, timetx ) });\n\n\n\n apply::write_colors(scheme, false)?;\n\n loop {\n\n let jsonified = serde_json::to_value(&scheme).unwrap();\n\n println!(\"{}\", serde_json::to_string_pretty(&jsonified).unwrap());\n\n 'inner: loop {\n\n if let Ok(content) = piperx.try_recv() {\n\n if let Ok(profile) = write::json_to_scheme(content.clone()) {\n\n scheme.modi(&mut profile.clone());\n\n println!(\"{}\", scheme.theme().clone().unwrap());\n\n apply::write_colors(scheme, false)?;\n", "file_path": "src/cli/daemon.rs", "rank": 28, "score": 133140.640802397 }, { "content": "pub fn show_colors(scheme: &SCHEME, colrange: Range::<usize>, padding: usize) {\n\n let colors = scheme.colors().clone().unwrap();\n\n for i in colrange {\n\n let val = if true { format!(\" {:#03} \", i) } else { format!(\"{}{}{}\",\n\n \" \".repeat(padding),\n\n colors[i].to_rgb_hex_string(true), \n\n \" \".repeat(padding))\n\n };\n\n if (i % 12 == 4 && i > 16) || (i == 16 || i == 8) { println!() };\n\n if i == 16 || i == 232 { println!() };\n\n print!(\"{}\",\n\n val.on_truecolor(\n\n colors[i].to_rgba().r.into(),\n\n colors[i].to_rgba().g.into(), \n\n colors[i].to_rgba().b.into()\n\n ).color( if colors[i].to_lab().l < 30.0 { \"white\" } else { \"black\" } )\n\n );\n\n }\n\n println!();\n\n}\n\n\n", "file_path": "src/show/format.rs", "rank": 29, "score": 132488.17823385942 }, { "content": "pub fn similar_colors(color: &Color) -> Vec<&NamedColor> {\n\n let mut colors: Vec<&NamedColor> = NAMED_COLORS.iter().collect();\n\n colors.sort_by_key(|nc| (1000.0 * nc.color.distance_delta_e_ciede2000(&color)) as i32);\n\n colors.dedup_by(|n1, n2| n1.color == n2.color);\n\n colors\n\n}\n\n\n", "file_path": "src/show/canvas.rs", "rank": 30, "score": 128044.92411809243 }, { "content": "pub fn write_temp(scheme: &SCHEME) {\n\n let mut record = Vec::new();\n\n if let Some(colors) = scheme.colors() {\n\n for color in colors.iter() {\n\n record.push(format!(\"{}\", color.to_rgb_hex_string(true)));\n\n }\n\n text::write_temp_file(\"lule_colors\", record.join(\"\\n\").as_bytes());\n\n }\n\n if let Some(wallpaper) = scheme.image() {\n\n text::write_temp_file(\"lule_wallpaper\", wallpaper.as_bytes());\n\n }\n\n if let Some(theme) = scheme.theme() {\n\n text::write_temp_file(\"lule_theme\", theme.as_bytes());\n\n }\n\n let scheme_json = serde_json::to_value(&scheme).unwrap();\n\n let format_scheme = format!(\"{}\", scheme_json);\n\n text::write_temp_file(\"lule_scheme\", format_scheme.as_bytes());\n\n}\n\n\n", "file_path": "src/gen/write.rs", "rank": 31, "score": 127549.75470619627 }, { "content": "pub fn write_cache(scheme: &SCHEME) {\n\n let cache_path = match scheme.cache() {\n\n Some(value) => value,\n\n None => \"\"\n\n };\n\n\n\n let lule_colors = text::pather(vec![\"lule_colors\"], env::temp_dir());\n\n let colors = text::pather(vec![\"colors\"], PathBuf::from(cache_path));\n\n text::copy_to(lule_colors, colors);\n\n\n\n let lule_wallpaper = text::pather(vec![\"lule_wallpaper\"], env::temp_dir());\n\n let wallpaper = text::pather(vec![\"wallpaper\"], PathBuf::from(cache_path));\n\n text::copy_to(lule_wallpaper, wallpaper);\n\n\n\n let lule_theme = text::pather(vec![\"lule_theme\"], env::temp_dir());\n\n let theme = text::pather(vec![\"theme\"], PathBuf::from(cache_path));\n\n text::copy_to(lule_theme, theme);\n\n\n\n let lule_palette = text::pather(vec![\"lule_palette\"], env::temp_dir());\n\n let theme = text::pather(vec![\"palette\"], PathBuf::from(cache_path));\n\n text::copy_to(lule_palette, theme);\n\n}\n\n\n", "file_path": "src/gen/write.rs", "rank": 32, "score": 127549.75470619625 }, { "content": "pub fn show_color(handle: &mut dyn Write, mode: ansi::Mode, color: &Color, id: usize) -> Result<(), Box<dyn std::error::Error>> {\n\n let checkerboard_size: usize = 16;\n\n let color_panel_size: usize = 12;\n\n\n\n let checkerboard_position_y: usize = 0;\n\n let checkerboard_position_x: usize = 2;\n\n let color_panel_position_y: usize =\n\n checkerboard_position_y + (checkerboard_size - color_panel_size) / 2;\n\n let color_panel_position_x: usize =\n\n checkerboard_position_x + (checkerboard_size - color_panel_size) / 2;\n\n let text_position_x: usize = checkerboard_size + 2 * checkerboard_position_x;\n\n let text_position_y: usize = 0;\n\n\n\n let mut canvas = Canvas::new(checkerboard_size, 51, ansi::Brush::from_mode(Some(mode)));\n\n canvas.draw_checkerboard(\n\n checkerboard_position_y,\n\n checkerboard_position_x,\n\n checkerboard_size,\n\n checkerboard_size,\n\n &Color::graytone(0.94),\n", "file_path": "src/show/canvas.rs", "rank": 33, "score": 126198.34266191925 }, { "content": "pub fn json_to_scheme(data: String) -> Result<SCHEME> {\n\n let scheme: SCHEME = serde_json::from_str(&data).context(\"something got fucked-up reaading json\")?;\n\n Ok(scheme)\n\n}\n", "file_path": "src/gen/write.rs", "rank": 34, "score": 114648.7824839711 }, { "content": "pub fn colors_from_file(filename: PathBuf) -> Result<Vec<pastel::Color>, Box<dyn std::error::Error>> {\n\n let mut colors = Vec::new();\n\n for line in text::lines_to_vec(filename) {\n\n colors.push(pastel::Color::from_hex(&line));\n\n }\n\n Ok(colors)\n\n}\n", "file_path": "src/gen/palette.rs", "rank": 35, "score": 114413.19795228366 }, { "content": "pub fn show_specified_colors(colors: Vec<pastel::Color>, padding: usize) {\n\n for i in 0..colors.len() {\n\n let val = format!(\"{}{}{}\",\n\n \" \".repeat(padding),\n\n colors[i].to_rgb_hex_string(true), \n\n \" \".repeat(padding));\n\n if (i % 12 == 4 && i > 16) || (i == 16 || i == 8) { println!() };\n\n print!(\"{}\",\n\n val.on_truecolor(\n\n colors[i].to_rgba().r.into(),\n\n colors[i].to_rgba().g.into(), \n\n colors[i].to_rgba().b.into()\n\n ).color( if colors[i].to_lab().l < 30.0 { \"white\" } else { \"black\" } )\n\n );\n\n }\n\n}\n", "file_path": "src/show/format.rs", "rank": 36, "score": 112839.19693078971 }, { "content": "pub fn pigments(image_path: &str, count: u8, iters: Option<u16>) -> Result<Vec<(pastel::Lab, f32)>, Box<dyn std::error::Error>> {\n\n let mut img;\n\n img = image::open(image_path)?;\n\n img = img.resize(512, 512, image::imageops::FilterType::CatmullRom);\n\n\n\n let pixels: Vec<pastel::Lab> = img\n\n .pixels()\n\n .map(|(_, _, pix)| pastel::Color::from_rgba(pix[0], pix[1], pix[2], 1.0).to_lab())\n\n .collect();\n\n\n\n let mut output = palette(&pixels, count, iters);\n\n output.sort_by(|(_, a), (_, b)| b.partial_cmp(a).unwrap());\n\n return Ok(output);\n\n}\n", "file_path": "src/gen/kmeans.rs", "rank": 37, "score": 109512.59095586627 }, { "content": "pub fn write_temp_file(filename: &str, content: &[u8]) {\n\n let mut file_name = env::temp_dir();\n\n file_name.push(filename);\n\n write_to_file(file_name, content);\n\n}\n\n\n", "file_path": "src/fun/text.rs", "rank": 38, "score": 108444.2717113975 }, { "content": "pub fn write_to_file(filename: PathBuf, content: &[u8]) {\n\n let mut file_name = File::create(filename.clone()).\n\n unwrap_or_else(|err| {\n\n eprintln!(\"{} {} {} {} {}\",\n\n \"error:\".red().bold(),\n\n \"Could not create file\",\n\n filename.as_os_str().to_str().unwrap().yellow(),\n\n \"->\", err);\n\n std::process::exit(1);\n\n });\n\n\n\n file_name.write(content).\n\n unwrap_or_else(|err| {\n\n eprintln!(\"{} {} {} {} {}\",\n\n \"error:\".red().bold(),\n\n \"Could not write into\",\n\n filename.as_os_str().to_str().unwrap().yellow(),\n\n \"->\", err);\n\n std::process::exit(1);\n\n });\n\n}\n\n\n", "file_path": "src/fun/text.rs", "rank": 39, "score": 108444.2717113975 }, { "content": "pub fn build_cli(show_logo: bool) -> App<'static, 'static> {\n\n let logo: String = if show_logo { \"\n\n ▐█\".truecolor(255, 50, 0).to_string()+\"\n\n ▐████▄\".truecolor(255, 50, 0).to_string().as_str()+\"\n\n ████████▄▄▄▄\".truecolor(255, 50, 0).to_string().as_str()+\" ▄▄███\".truecolor(75, 200, 0).to_string().as_str()+\"\n\n ██████████████▄▄\".truecolor(255, 50, 0).to_string().as_str()+\" ▄██████\".truecolor(75, 200, 0).to_string().as_str()+\"\n\n ██████ ████████\".truecolor(255, 50, 0).to_string().as_str()+\" ▄██████████\".truecolor(75, 200, 0).to_string().as_str()+\"\n\n ████████▄▄ ▀██████\".truecolor(255, 50, 0).to_string().as_str()+\" ▄█████████████\".truecolor(75, 200, 0).to_string().as_str()+\"\n\n ▐██████████ ▀█████\".truecolor(255, 50, 0).to_string().as_str()+\" ████████ █████\".truecolor(75, 200, 0).to_string().as_str()+\"\n\n ███████████▄ ████\".truecolor(255, 50, 0).to_string().as_str()+\" ██████████ ███████\".truecolor(75, 200, 0).to_string().as_str()+\"\n\n ▀███████████ ▐█▌\".truecolor(255, 50, 0).to_string().as_str()+\" ▐██████████▀ ▐██████\".truecolor(75, 200, 0).to_string().as_str()+\"\n\n ▄█████▄▄\".truecolor(160, 0, 200).to_string().as_str()+\" ▀█████████▌ █\".truecolor(255, 50, 0).to_string().as_str()+\" █████████ ██████\".truecolor(75, 200, 0).to_string().as_str()+\"\n\n ▄██████████████▄\".truecolor(160, 0, 200).to_string().as_str()+\" ▀███████ █\".truecolor(255, 50, 0).to_string().as_str()+\" ████████ ▄█████\".truecolor(75, 200, 0).to_string().as_str()+\"\n\n ▄██████████▀▀▀▀▀████▄\".truecolor(160, 0, 200).to_string().as_str()+\" ▀████\".truecolor(255, 50, 0).to_string().as_str()+\" █████▀ ▄████▀\".truecolor(75, 200, 0).to_string().as_str()+\"\n\n ▄█████████▀▀ ▄▄▄▄ ▀\".truecolor(160, 0, 200).to_string().as_str()+\" ▀██\".truecolor(255, 50, 0).to_string().as_str()+\" ███▀ ▄██▀\".truecolor(75, 200, 0).to_string().as_str()+\"\n\n ██████████ █████████████▄\".truecolor(160, 0, 200).to_string().as_str()+\" ▌\".truecolor(255, 50, 0).to_string().as_str()+\" ▄████████████▄\".truecolor(0, 120, 200).to_string().as_str()+\"\n\n ████████████████████████████▄\".truecolor(160, 0, 200).to_string().as_str()+\" ▄▄▄▄▄▄████████████████████▄\".truecolor(0, 120, 200).to_string().as_str()+\"\n\n ▀██████████████████████▀▀▀▀\".truecolor(160, 0, 200).to_string().as_str()+\" ▀███████████████████████████▄\".truecolor(0, 120, 200).to_string().as_str()+\"\n\n ▀█████████████▀\".truecolor(160, 0, 200).to_string().as_str()+\" █\".truecolor(200, 160, 0).to_string().as_str()+\" ▀████████████▀▀▀███████████\".truecolor(0, 120, 200).to_string().as_str()+\"\n\n ▄▄█ ▄███\".truecolor(0, 200, 160).to_string().as_str()+\" ██\".truecolor(200, 160, 0).to_string().as_str()+\" ▄ ▀▀▀▀▀▀▄▄▄██████▀\".truecolor(0, 120, 200).to_string().as_str()+\"\n", "file_path": "src/cli.rs", "rank": 40, "score": 106472.0144725401 }, { "content": "// * K-means++ clustering\n\npub fn palette(pixels: &Vec<pastel::Lab>, k: u8, max_iter: Option<u16>) -> Vec<(pastel::Lab, f32)> {\n\n const TOLERANCE: f64 = 1e-4;\n\n let mut rng = rand::thread_rng();\n\n\n\n // Randomly pick the starting cluster center\n\n let i: usize = rng.gen_range(0..pixels.len());\n\n let mut means: Vec<pastel::Lab> = vec![pixels[i].clone()];\n\n\n\n // Pick the remaining (k-1) means\n\n for _ in 0..(k - 1) {\n\n // Calculate the (nearest_distance)^2 for every color in the image\n\n let distances: Vec<f64> = pixels\n\n .par_iter()\n\n .map(|color| (nearest(&color, &means).1).powi(2))\n\n .collect();\n\n\n\n // Create a weighted distribution based on distance^2 -> if error, return the means\n\n let dist = match WeightedIndex::new(&distances) {\n\n Ok(t) => t,\n\n Err(_) => {\n", "file_path": "src/gen/kmeans.rs", "rank": 41, "score": 105470.80237242638 }, { "content": "pub fn display_image(scheme: &SCHEME, width: u32, height: u32) -> Result<(), Box<dyn std::error::Error>> {\n\n let (cols, rows) = size()?;\n\n execute!(\n\n stdout(),\n\n SetSize(10, 10),\n\n ScrollUp(5)\n\n )?;\n\n\n\n let filename = scheme.image().clone().unwrap();\n\n let conf = viuer::Config {\n\n // restore_cursor: true,\n\n // use_kitty: true,\n\n width: Some(width),\n\n height: Some(height),\n\n ..Default::default()\n\n };\n\n\n\n viuer::print_from_file(&filename, &conf)?;\n\n\n\n execute!(stdout(), SetSize(cols, rows))?;\n\n execute!(stdout(), Clear(ClearType::FromCursorDown))?;\n\n Ok(())\n\n}\n", "file_path": "src/show/viuwer.rs", "rank": 42, "score": 98295.13130010429 }, { "content": "pub fn palette_from_image(image: String) -> Vec<String> {\n\n let colors_lab = kmeans::pigments(&image, 16, Some(300))\n\n .unwrap_or_else(|err| {\n\n eprintln!(\"{} {} {}\",\n\n \"error:\".red().bold(), \n\n \"Problem creating palette ->\", \n\n err);\n\n std::process::exit(1);\n\n });\n\n\n\n let mut colors = Vec::new();\n\n for (color, _) in colors_lab.iter() {\n\n let lab_color = pastel::Color::from_lab(\n\n color.l.into(),\n\n color.a.into(),\n\n color.b.into(),\n\n 1.into());\n\n colors.push(pastel::Color::from(lab_color.clone()).to_rgb_hex_string(true));\n\n }\n\n colors\n\n}\n\n\n", "file_path": "src/gen/palette.rs", "rank": 43, "score": 96104.273529214 }, { "content": "fn make_scheme(data: String) -> Result<SCHEME> {\n\n let scheme: SCHEME = serde_json::from_str(&data).context(\"something got fucked-up reaading json\")?;\n\n Ok(scheme)\n\n}\n", "file_path": "src/var/temp.rs", "rank": 44, "score": 79040.13843378956 }, { "content": "fn make_scheme(data: String) -> Result<SCHEME> {\n\n let scheme: SCHEME = serde_json::from_str(&data).context(\"something got fucked-up reaading json\")?;\n\n Ok(scheme)\n\n}\n", "file_path": "src/var/pipe.rs", "rank": 45, "score": 79040.13843378956 }, { "content": "fn generate_template(original: PathBuf, replaced: PathBuf, scheme: &SCHEME) -> Result<()> {\n\n\n\n let mut content = String::new();\n\n if let Ok(cont) = text::file_to_string(original) {\n\n content = cont;\n\n }\n\n\n\n // if let Err(e) = templar::Templar::global().parse(&content) {\n\n // println!(\"{}\", e);\n\n // };\n\n\n\n let template = templar::Templar::global().parse(&content)?;\n\n let mut data: templar::Document = templar::Document::default();\n\n if let Some(colors) = scheme.colors() {\n\n for (i, color) in colors.iter().enumerate() {\n\n let name = \"color\".to_string() + &i.to_string();\n\n data[name] = color.to_rgb_hex_string(false).into();\n\n }\n\n data[\"background\"] = colors[0].to_rgb_hex_string(false).into();\n\n data[\"foreground\"] = colors[15].to_rgb_hex_string(false).into();\n", "file_path": "src/gen/templ.rs", "rank": 46, "score": 76140.75731641047 }, { "content": "fn time_to_sleep(time: usize, sender: Sender<bool>) {\n\n loop{\n\n for _ in 0..time {\n\n thread::sleep(time::Duration::from_secs(1));\n\n }\n\n sender.send(true).ok();\n\n }\n\n}\n", "file_path": "src/cli/daemon.rs", "rank": 47, "score": 72154.04677888981 }, { "content": "pub fn vaid_image(path: &str) -> String {\n\n match image::open(path) {\n\n Ok(_) => path.to_owned(),\n\n Err(_) => {\n\n eprintln!(\"{} {} {} {}\",\n\n \"error:\".red().bold(), \"Path\",\n\n path.yellow(),\n\n \"is not a valid image file\");\n\n std::process::exit(1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/fun/text.rs", "rank": 48, "score": 65720.34807338726 }, { "content": "// TODO: check if folder is empty, is valid, exists or has other files than images\n\npub fn random_image(path: &str) -> String {\n\n let mut rng = rand::thread_rng();\n\n let files = fs::read_dir(path).unwrap();\n\n let file = files.choose(&mut rng).unwrap().unwrap();\n\n let filepath = file.path().display().to_string();\n\n vaid_image(&filepath)\n\n}\n\n\n", "file_path": "src/fun/text.rs", "rank": 49, "score": 65720.34807338726 }, { "content": "pub fn lines_to_vec(filename: PathBuf) -> Vec<String> {\n\n // File must exist in current path before this produces output\n\n let mut content = Vec::new();\n\n if let Ok(lines) = read_lines(filename) {\n\n for line in lines {\n\n if let Ok(ip) = line {\n\n content.push(ip)\n\n }\n\n }\n\n }\n\n content\n\n}\n\n\n", "file_path": "src/fun/text.rs", "rank": 50, "score": 62220.52016969948 }, { "content": "pub fn copy_to(dir1: PathBuf, dir2: PathBuf) {\n\n fs::copy(dir1.to_str().unwrap(), dir2.to_str().unwrap());\n\n}\n\n\n", "file_path": "src/fun/text.rs", "rank": 51, "score": 62220.52016969948 }, { "content": "pub fn pather(dirs: Vec<&str>, path: PathBuf) -> PathBuf {\n\n let mut new_path = path.clone();\n\n for s in dirs {\n\n new_path.push(s);\n\n }\n\n new_path\n\n}\n\n\n", "file_path": "src/fun/text.rs", "rank": 52, "score": 58407.20792826435 }, { "content": "pub fn file_to_string(filename: PathBuf) -> Result<String, Box<dyn std::error::Error>> {\n\n let string = file::get_text(filename.to_str().unwrap())?;\n\n Ok(string)\n\n}\n", "file_path": "src/fun/text.rs", "rank": 53, "score": 53500.248321405736 }, { "content": "fn recalculate(colors: &Vec<&pastel::Lab>) -> pastel::Lab {\n\n let mut w_sum = 0.0;\n\n let (mut l, mut a, mut b) = (0.0, 0.0, 0.0);\n\n for col in colors.iter() {\n\n w_sum += 1.0;\n\n l += 1.0 * col.l;\n\n a += 1.0 * col.a;\n\n b += 1.0 * col.b;\n\n }\n\n\n\n pastel::Lab {\n\n l: l/w_sum,\n\n a: a/w_sum,\n\n b: b/w_sum,\n\n alpha: 1.0\n\n }\n\n}\n\n\n", "file_path": "src/gen/kmeans.rs", "rank": 54, "score": 48980.53155847069 }, { "content": "fn main() {\n\n let mut scheme = SCHEME::init();\n\n\n\n let show_logo = if env::args().len() > 1 { false } else { true };\n\n\n\n let app = cli::build_cli(show_logo).get_matches();\n\n // var::concatinate(&app, &mut scheme);\n\n\n\n if let Some(subcommand) = app.subcommand_name() {\n\n match subcommand {\n\n \"colors\" => cli::colors::run(&app, &mut scheme),\n\n \"create\" => cli::create::run(&app, &mut scheme),\n\n \"config\" => cli::config::run(&app, &mut scheme),\n\n \"daemon\" => cli::daemon::run(&app, &mut scheme),\n\n \"test\" => cli::test::run(&app, &mut scheme),\n\n _ => Ok(())\n\n }.ok();\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 55, "score": 32490.0493546607 }, { "content": "use colored::*;\n\nuse std::path::PathBuf;\n\n\n\nuse crate::gen::kmeans;\n\nuse crate::fun::text;\n\n\n", "file_path": "src/gen/palette.rs", "rank": 56, "score": 30822.558266431566 }, { "content": " walldir: None,\n\n config: None,\n\n cache: None,\n\n scripts: None,\n\n patterns: None,\n\n looop: None,\n\n theme: None,\n\n palette: None,\n\n sort: None,\n\n saturation: None,\n\n illumination: None,\n\n hue: None,\n\n difference: None,\n\n blend: None,\n\n mixes: None \n\n }\n\n }\n\n pub fn modi(&mut self, new: &SCHEME) -> &Self {\n\n if let Some(value) = new.colors() { self.colors = Some(value.clone()); }\n\n if let Some(value) = new.pigments() { self.pigments = Some(value.clone()); }\n", "file_path": "src/scheme.rs", "rank": 57, "score": 30638.45617788443 }, { "content": "#![allow(dead_code)]\n\n\n\nextern crate getset;\n\nuse getset::{CopyGetters, Getters, MutGetters, Setters};\n\nuse std::collections::HashMap as Map;\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, CopyGetters, Getters, MutGetters, Setters)]\n\n#[getset(get = \"pub\", set = \"pub\", get_mut = \"pub\")]\n\npub struct SCHEME {\n\n #[serde(skip)]\n\n colors: Option<Vec<pastel::Color>>,\n\n image: Option<String>,\n\n theme: Option<String>,\n\n pigments: Option<Vec<String>>,\n\n scheme: Option<String>,\n\n walldir: Option<String>,\n\n #[serde(skip)]\n\n config: Option<String>,\n\n #[serde(skip)]\n\n cache: Option<String>,\n", "file_path": "src/scheme.rs", "rank": 58, "score": 30638.428880641244 }, { "content": "\n\n#[derive(Serialize, Deserialize, Debug, Clone, CopyGetters, Getters, MutGetters, Setters)]\n\npub struct Special {\n\n pub background: String,\n\n pub foreground: String,\n\n pub cursor: String\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, CopyGetters, Getters, MutGetters, Setters)]\n\npub struct ProfileMap {\n\n pub wallpaper: String,\n\n pub theme: String,\n\n pub special: Special,\n\n pub colors: Map<String, String>,\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug, Clone, CopyGetters, Getters, MutGetters, Setters)]\n\npub struct ProfileVec {\n\n pub wallpaper: String,\n\n pub theme: String,\n\n pub special: Special,\n\n pub colors: Vec<String>,\n\n}\n", "file_path": "src/scheme.rs", "rank": 59, "score": 30636.309401451916 }, { "content": " scripts: Option<Vec<String>>,\n\n patterns: Option<Vec<(String, String)>>,\n\n looop: Option<usize>,\n\n palette: Option<String>,\n\n sort: Option<String>,\n\n saturation: Option<f32>,\n\n illumination: Option<f32>,\n\n hue: Option<f32>,\n\n difference: Option<f32>,\n\n blend: Option<f32>,\n\n mixes: Option<Map<usize, String>>,\n\n}\n\n\n\nimpl SCHEME {\n\n pub fn init() -> Self {\n\n Self {\n\n colors: None,\n\n pigments: None,\n\n image: None,\n\n scheme: None,\n", "file_path": "src/scheme.rs", "rank": 60, "score": 30634.876368217192 }, { "content": " if let Some(value) = new.image() { self.image = Some(value.clone()); }\n\n if let Some(value) = new.scheme() { self.scheme = Some(value.clone()); }\n\n if let Some(value) = new.walldir() { self.walldir = Some(value.clone()); }\n\n if let Some(value) = new.config() { self.config = Some(value.clone()); }\n\n if let Some(value) = new.cache() { self.cache = Some(value.clone()); }\n\n if let Some(value) = new.scripts() { self.scripts = Some(value.clone()); }\n\n if let Some(value) = new.patterns() { self.patterns = Some(value.clone()); }\n\n if let Some(value) = new.theme() { self.theme = Some(value.clone()); }\n\n if let Some(value) = new.palette() { self.palette = Some(value.clone()); }\n\n if let Some(value) = new.sort() { self.sort = Some(value.clone()); }\n\n if let Some(value) = new.saturation() { self.saturation = Some(value.clone()); }\n\n if let Some(value) = new.illumination() { self.illumination = Some(value.clone()); }\n\n if let Some(value) = new.hue() { self.hue = Some(value.clone()); }\n\n if let Some(value) = new.difference() { self.difference = Some(value.clone()); }\n\n if let Some(value) = new.blend() { self.blend = Some(value.clone()); }\n\n if let Some(value) = new.mixes() { self.mixes = Some(value.clone()); }\n\n self\n\n }\n\n}\n\n\n", "file_path": "src/scheme.rs", "rank": 61, "score": 30633.4564111089 }, { "content": " if let Ok(content) = text::file_to_string(wall_temp) {\n\n scheme.set_image(Some(content));\n\n }\n\n\n\n let mut theme_temp = PathBuf::from(&cachepath);\n\n theme_temp.push(\"theme\");\n\n if let Ok(content) = text::file_to_string(theme_temp) {\n\n scheme.set_theme(Some(content));\n\n }\n\n }\n\n\n\n\n\n\n\n let (cols, rows) = crossterm::terminal::size().ok().unwrap();\n\n if let Some(arg) = sub.value_of(\"action\") {\n\n // let values = write::get_json(output);\n\n if atty::isnt(atty::Stream::Stdout) {\n\n for color in scheme.colors().clone().unwrap().iter() {\n\n println!(\"{}\", color.to_rgb_hex_string(true));\n\n }\n", "file_path": "src/cli/colors.rs", "rank": 62, "score": 29955.379083938173 }, { "content": " } else {\n\n if arg == \"image\" {\n\n viuwer::display_image(&scheme, (cols).into(), (rows -1).into()).ok();\n\n } else if arg == \"ansii\" {\n\n format::show_colors(&scheme, 0..256, 4);\n\n } else if arg == \"list\" {\n\n format::show_pastel_colors(&scheme, 0..256);\n\n } else if arg == \"mix\" {\n\n viuwer::display_image(&scheme, (cols).into(), (rows -3).into()).ok();\n\n println!(\"Wallpaper: {}, \\t\\t Colors: 1-16\", scheme.image().clone().unwrap());\n\n format::show_colors(&scheme, 0..16, ((cols - 56) / 16).into());\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/cli/colors.rs", "rank": 63, "score": 29954.229686381073 }, { "content": "use anyhow::Result;\n\nuse std::path::PathBuf;\n\nuse crate::var;\n\nuse crate::gen::palette;\n\nuse crate::show::format;\n\nuse crate::show::viuwer;\n\nuse crate::scheme::*;\n\nuse crate::fun::text;\n\nuse crate::gen::apply;\n\n\n", "file_path": "src/cli/colors.rs", "rank": 64, "score": 29950.892925078882 }, { "content": "fn external_command(script: &str){\n\n Command::new(\"bash\")\n\n .arg(\"-c\")\n\n .arg(script)\n\n .output()\n\n .expect(\"failed to execute process\").stdout;\n\n}\n\n\n", "file_path": "src/gen/execute.rs", "rank": 65, "score": 28326.648087523357 }, { "content": "fn read_stdin() -> Result<String> {\n\n let mut input = String::new();\n\n let stdin = io::stdin();\n\n for line in stdin.lock().lines() {\n\n input.push_str(&line.unwrap());\n\n }\n\n Ok(input)\n\n}\n\n\n", "file_path": "src/var/pipe.rs", "rank": 66, "score": 28326.648087523357 }, { "content": "/////UNSAFE\n\nfn string_to_unsafe_static_str(s: String) -> &'static str {\n\n Box::leak(s.into_boxed_str())\n\n}\n\n\n", "file_path": "src/cli.rs", "rank": 67, "score": 26454.376289180454 }, { "content": "fn read_pipe(pipe_name: PathBuf, sender: Sender<String>) {\n\n loop{\n\n std::fs::remove_file(pipe_name.clone()).ok();\n\n let pipe = fifo::Pipe::new(pipe_name.clone());\n\n pipe.ensure_exists().unwrap();\n\n let reader = pipe.open_read();\n\n let content = reader.string().unwrap();\n\n sender.send(content).ok();\n\n }\n\n}\n\n\n\n\n", "file_path": "src/cli/daemon.rs", "rank": 68, "score": 24354.423753039457 }, { "content": "fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>\n\n where P: AsRef<Path>, {\n\n let file = File::open(filename)?;\n\n Ok(io::BufReader::new(file).lines())\n\n}\n\n\n\n\n", "file_path": "src/fun/text.rs", "rank": 69, "score": 21597.69564685077 }, { "content": "/// Attempt to create a new Unix named pipe/FIFO on disk.\n\nfn create_pipe<P: ?Sized + nix::NixPath>(path: &P, mode: Option<Mode>) -> nix::Result<()> {\n\n nix::unistd::mkfifo(path, mode.unwrap_or_else(|| Mode::from_bits_truncate(0o660)))\n\n}\n\n\n\n/// Attempt to delete a Unix named pipe/FIFO from disk.\n\nasync fn remove_pipe<P: AsRef<Path>>(path: P) -> tokio::io::Result<()> {\n\n tokio::fs::remove_file(&path).await\n\n}\n\n\n\n/// Represents a path to a Unix named pipe (FIFO).\n\n///\n\n/// Provides convenience methods to create readers and writers, as well as an\n\n/// easy way to ensure the pipe actually exists.\n\n#[derive(Clone)]\n\npub struct Pipe {\n\n inner: PathBuf,\n\n}\n\n\n\nimpl Pipe {\n\n /// Wraps a given path in a `Pipe`.\n", "file_path": "src/fun/fifo.rs", "rank": 70, "score": 21031.51154761641 }, { "content": " ) {\n\n for i in 0..height {\n\n for j in 0..width {\n\n *self.pixel_mut(row + i, col + j) = Some(color.clone());\n\n }\n\n }\n\n }\n\n\n\n pub fn draw_checkerboard(\n\n &mut self,\n\n row: usize,\n\n col: usize,\n\n height: usize,\n\n width: usize,\n\n dark: &Color,\n\n light: &Color,\n\n ) {\n\n for i in 0..height {\n\n for j in 0..width {\n\n let color = if (i + j) % 2 == 0 { dark } else { light };\n", "file_path": "src/show/canvas.rs", "rank": 72, "score": 16.437742981630457 }, { "content": " .set(ArgSettings::RequireEquals)\n\n )\n\n .arg(\n\n Arg::with_name(\"theme\")\n\n .help(\"specify the theme to extract from colors\")\n\n .long(\"theme\")\n\n .takes_value(true)\n\n .value_name(\"THEME\")\n\n .possible_values(&[\"dark\", \"light\"])\n\n .default_value(\"dark\")\n\n .set(ArgSettings::RequireEquals)\n\n )\n\n .arg(\n\n Arg::with_name(\"action\")\n\n .help(\"action to take\")\n\n .possible_values(&[\"set\", \"regen\"])\n\n .takes_value(true)\n\n .last(true)\n\n )\n\n )\n", "file_path": "src/cli.rs", "rank": 73, "score": 11.3128307201989 }, { "content": " SubCommand::with_name(\"config\")\n\n .about(\"Send specific configs to pipe or daemon\")\n\n .arg(\n\n Arg::with_name(\"theme\")\n\n .help(\"specify the theme to extract from colors\")\n\n .long(\"theme\")\n\n .takes_value(true)\n\n .value_name(\"THEME\")\n\n .possible_values(&[\"dark\", \"light\"])\n\n .default_value(\"dark\")\n\n .required(true)\n\n .set(ArgSettings::RequireEquals)\n\n )\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"test\")\n\n .setting(AppSettings::Hidden)\n\n .arg(\n\n Arg::with_name(\"image\")\n\n .help(\"specify the image to extract colors from\")\n", "file_path": "src/cli.rs", "rank": 74, "score": 10.919868155903725 }, { "content": "\n\n let (cols, rows) = crossterm::terminal::size().ok().unwrap();\n\n viuwer::display_image(&scheme, (cols-10).into(), (rows -13).into()).ok();\n\n println!(\"Palette\");\n\n let colors: Vec<pastel::Color> = palette.into_iter().map(|x| pastel::Color::from_hex(&x)).collect();\n\n format::show_specified_colors(colors.clone(), ((cols - 56) / 16).into());\n\n println!(\"\\n6th\");\n\n format::show_specified_colors(generate::gen_main_six(&colors), ((cols - 56) / 16).into());\n\n println!(\"\\nColors\");\n\n format::show_colors(&scheme, 0..16, ((cols - 56) / 16).into());\n\n Ok(())\n\n}\n", "file_path": "src/cli/test.rs", "rank": 75, "score": 10.915223200936609 }, { "content": "pub mod temp;\n\npub mod defs;\n\npub mod envi;\n\npub mod pipe;\n\npub mod args;\n\npub mod file;\n\n\n\nuse clap;\n\nuse crate::scheme::*;\n\nuse colored::*;\n\n\n", "file_path": "src/var.rs", "rank": 76, "score": 10.756791019952535 }, { "content": " *self.pixel_mut(row + i, col + j) = Some(color.clone());\n\n }\n\n }\n\n }\n\n\n\n pub fn draw_text(&mut self, row: usize, col: usize, text: &str) {\n\n assert!(row % 2 == 0);\n\n\n\n for (j, c) in text.chars().enumerate() {\n\n *self.char_mut(row / 2, col + j) = Some(c);\n\n }\n\n }\n\n\n\n pub fn print(&self, out: &mut dyn Write) -> Result<(), Box<dyn std::error::Error>> {\n\n for i_div_2 in 0..self.height / 2 {\n\n for j in 0..self.width {\n\n if let Some(c) = self.char(i_div_2, j) {\n\n write!(out, \"{}\", c)?;\n\n } else {\n\n let p_top = self.pixel(2 * i_div_2, j);\n", "file_path": "src/show/canvas.rs", "rank": 77, "score": 9.935656959759651 }, { "content": " // Calculate the dominance of each color\n\n let mut palette: Vec<(pastel::Lab, f32)> = means.iter().map(|c| (c.clone(), 0.0)).collect();\n\n let len = pixels.len() as f32;\n\n for color in pixels.iter() {\n\n let near = nearest(&color, &means).0;\n\n palette[near].1 += 1.0 / len;\n\n }\n\n return palette;\n\n }\n\n };\n\n\n\n // Pick a color and use it as a cluster center\n\n means.push(pixels[dist.sample(&mut rng)].clone());\n\n }\n\n\n\n let mut clusters: Vec<Vec<&pastel::Lab>>;\n\n let mut iters_left = max_iter.unwrap_or(300);\n\n loop {\n\n clusters = vec![Vec::new(); k as usize];\n\n for color in pixels.iter() {\n", "file_path": "src/gen/kmeans.rs", "rank": 78, "score": 9.524294570919977 }, { "content": "use std::io::Write;\n\n\n\nuse pastel::ansi::{self, Brush, ToAnsiStyle};\n\nuse pastel::named::{NamedColor, NAMED_COLORS};\n\nuse pastel::Color;\n\nuse pastel::Format;\n\n\n\npub struct Canvas {\n\n height: usize,\n\n width: usize,\n\n pixels: Vec<Option<Color>>,\n\n chars: Vec<Option<char>>,\n\n brush: Brush,\n\n}\n\n\n\nimpl Canvas {\n\n pub fn new(height: usize, width: usize, brush: Brush) -> Self {\n\n assert!(height % 2 == 0);\n\n\n\n let mut pixels = vec![];\n", "file_path": "src/show/canvas.rs", "rank": 79, "score": 9.377842617892771 }, { "content": " pixels.resize(height * width, None);\n\n let mut chars = vec![];\n\n chars.resize(height / 2 * width, None);\n\n\n\n Canvas {\n\n height,\n\n width,\n\n pixels,\n\n chars,\n\n brush,\n\n }\n\n }\n\n\n\n pub fn draw_rect(\n\n &mut self,\n\n row: usize,\n\n col: usize,\n\n height: usize,\n\n width: usize,\n\n color: &Color,\n", "file_path": "src/show/canvas.rs", "rank": 80, "score": 9.00956433275333 }, { "content": " }\n\n\n\n let palette: Vec<String>;\n\n if let Some(content) = scheme.palette() {\n\n match content.as_str() {\n\n \"pigment\" => {\n\n palette = palette::palette_from_image(scheme.image().clone().unwrap());\n\n text::write_temp_file(\"lule_palette\", palette.join(\"\\n\").as_bytes());\n\n scheme.set_pigments(Some(palette));\n\n },\n\n _ => unreachable!(),\n\n };\n\n }\n\n }\n\n\n\n let allcolors = generate::get_all_colors(scheme);\n\n scheme.set_colors(Some(allcolors));\n\n\n\n let values = write::output_to_json(scheme, false);\n\n write::write_temp(&scheme);\n\n write::write_cache(&scheme);\n\n write::write_cache_json(scheme, values);\n\n if let Some(_) = scheme.scripts() {\n\n execute::command_execution(scheme);\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/gen/apply.rs", "rank": 81, "score": 8.63780419352208 }, { "content": "use std::path::PathBuf;\n\nuse crate::scheme::*;\n\nuse colored::*;\n\nuse dirs;\n\n\n", "file_path": "src/var/defs.rs", "rank": 82, "score": 8.359433914547495 }, { "content": "pub mod create;\n\npub mod colors;\n\npub mod config;\n\npub mod daemon;\n\npub mod test;\n\nuse colored::*;\n\n\n\nuse clap::{crate_description, crate_name, crate_version, App, Arg, SubCommand, AppSettings, ArgSettings};\n\n\n\n/////UNSAFE\n", "file_path": "src/cli.rs", "rank": 83, "score": 8.316573237642714 }, { "content": " .default_value(\"pigment\")\n\n .value_name(\"NAME\")\n\n .set(ArgSettings::RequireEquals)\n\n )\n\n .arg(\n\n Arg::with_name(\"scheme\")\n\n .long(\"scheme\")\n\n .value_name(\"NAME\")\n\n .help(\"specify a color scheme from configs to use\")\n\n .takes_value(true)\n\n .set(ArgSettings::RequireEquals)\n\n )\n\n .arg(\n\n Arg::with_name(\"image\")\n\n .help(\"specify the image to extract colors from\")\n\n .long(\"image\")\n\n .visible_aliases(&[\"source\"])\n\n .takes_value(true)\n\n .value_name(\"FLEPATH\")\n\n .conflicts_with(\"wallpath\")\n", "file_path": "src/cli.rs", "rank": 84, "score": 8.133152144570055 }, { "content": "use crate::show::canvas;\n\nuse crate::scheme::*;\n\nuse colored::*;\n\nuse pastel::ansi;\n\nuse std::ops::Range;\n\n\n", "file_path": "src/show/format.rs", "rank": 85, "score": 8.072634711452185 }, { "content": " &format!(\"Hex: {}\", color.to_rgb_hex_string(true)),\n\n );\n\n canvas.draw_text(\n\n text_position_y + 4,\n\n text_position_x,\n\n &format!(\"RGB: {}\", color.to_rgb_string(Format::Spaces)),\n\n );\n\n canvas.draw_text(\n\n text_position_y + 6,\n\n text_position_x,\n\n &format!(\"HSL: {}\", color.to_hsl_string(Format::Spaces)),\n\n );\n\n\n\n // canvas.draw_text(\n\n // text_position_y + 8,\n\n // text_position_x,\n\n // \"Most similar:\",\n\n // );\n\n\n\n\n", "file_path": "src/show/canvas.rs", "rank": 86, "score": 7.904928532492997 }, { "content": " let vec_profile = ProfileVec {\n\n wallpaper: scheme.image().clone().unwrap(),\n\n theme: scheme.theme().clone().unwrap(),\n\n special: Special {\n\n background: color_vec[0].clone(),\n\n foreground: color_vec[15].clone(),\n\n cursor: color_vec[1].clone()\n\n },\n\n colors: color_vec\n\n };\n\n if map {\n\n serde_json::to_value(&map_profile).unwrap()\n\n } else {\n\n serde_json::to_value(&vec_profile).unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/gen/write.rs", "rank": 87, "score": 7.7429891262254795 }, { "content": "use std::path::PathBuf;\n\nuse anyhow::Result;\n\nuse crate::gen::generate;\n\nuse crate::gen::palette;\n\nuse crate::gen::write;\n\nuse crate::gen::execute;\n\nuse crate::scheme::*;\n\nuse crate::fun::text;\n\n\n", "file_path": "src/gen/apply.rs", "rank": 88, "score": 7.453337609645464 }, { "content": "use crate::scheme::*;\n\nuse anyhow::Result;\n\n\n\nuse crate::var;\n\nuse crate::gen::palette;\n\nuse crate::gen::generate;\n\nuse crate::gen::templ;\n\nuse crate::fun::text;\n\nuse crate::show::viuwer;\n\nuse crate::show::format;\n\n\n", "file_path": "src/cli/test.rs", "rank": 89, "score": 7.442770014336316 }, { "content": " data[\"cursor\"] = colors[1].to_rgb_hex_string(false).into();\n\n data[\"accent\"] = colors[1].to_rgb_hex_string(false).into();\n\n }\n\n\n\n if let Some(wallpaper) = scheme.image() {\n\n data[\"wallpaper\"] = wallpaper.into();\n\n }\n\n if let Some(theme) = scheme.theme() {\n\n data[\"theme\"] = theme.into();\n\n }\n\n\n\n let context = templar::StandardContext::new();\n\n context.set(data)?;\n\n\n\n let new_content = format!(\"{}\", template.render(&context)?);\n\n text::write_to_file(replaced, new_content.as_bytes());\n\n Ok(())\n\n}\n\n\n", "file_path": "src/gen/templ.rs", "rank": 90, "score": 7.437577294005315 }, { "content": " if let Some(arg) = sub.value_of(\"theme\") {\n\n scheme.set_theme(Some(arg.to_string()));\n\n }\n\n };\n\n if let Some(sub) = app.subcommand_matches(\"daemon\"){\n\n if let Some(arg) = sub.value_of(\"loop\") {\n\n let value = arg.parse::<usize>().expect(\"--loop value must be a number\");\n\n scheme.set_looop(Some(value));\n\n } else {\n\n scheme.set_looop(Some(300));\n\n }\n\n };\n\n if let Some(sub) = app.subcommand_matches(\"test\"){\n\n if let Some(arg) = sub.value_of(\"image\") {\n\n scheme.set_image(Some(text::vaid_image(arg)));\n\n }\n\n if let Some(arg) = sub.value_of(\"theme\") {\n\n scheme.set_theme(Some(arg.to_string()));\n\n }\n\n };\n\n}\n", "file_path": "src/var/args.rs", "rank": 91, "score": 7.315681687569912 }, { "content": " let mut file = tokio::fs::OpenOptions::new()\n\n .write(true)\n\n .create(false)\n\n .open(&self.path.inner)\n\n .await?;\n\n file.write_all(data).await\n\n }\n\n pub fn from_path(source: &Pipe) -> Self {\n\n Self {\n\n path: source.clone(),\n\n }\n\n }\n\n /// Checks if the named pipe actually exists and tries to create it if it doesn't.\n\n pub fn ensure_pipe_exists(&self) -> nix::Result<&Self> {\n\n self.path.ensure_exists()?;\n\n Ok(self)\n\n }\n\n /// Writes byte data to the pipe.\n\n pub fn write(&self, data: &[u8]) -> std::io::Result<()> {\n\n let mut buffer = std::fs::File::create(&self.path.inner.to_str().unwrap())?;\n", "file_path": "src/fun/fifo.rs", "rank": 92, "score": 7.22080465258478 }, { "content": " )\n\n .subcommand(\n\n SubCommand::with_name(\"colors\")\n\n .about(\"Display current colors in terminal\")\n\n .arg(\n\n Arg::with_name(\"gen\")\n\n .help(\"generate new colors - just show them - not apply\")\n\n .short(\"g\")\n\n )\n\n .arg(\n\n Arg::with_name(\"action\")\n\n .help(\"action to take\")\n\n .possible_values(&[\"image\", \"ansii\", \"list\", \"mix\"])\n\n .default_value(\"ansii\")\n\n .required(true)\n\n .takes_value(true)\n\n .last(true)\n\n )\n\n )\n\n .subcommand(\n", "file_path": "src/cli.rs", "rank": 93, "score": 7.1602442114482745 }, { "content": " &Color::graytone(0.71),\n\n );\n\n canvas.draw_rect(\n\n color_panel_position_y,\n\n color_panel_position_x,\n\n color_panel_size,\n\n color_panel_size,\n\n color,\n\n );\n\n\n\n canvas.draw_text(\n\n text_position_y + 0,\n\n text_position_x,\n\n &format!(\"Color: {}\", id),\n\n );\n\n\n\n #[allow(clippy::identity_op)]\n\n canvas.draw_text(\n\n text_position_y + 2,\n\n text_position_x,\n", "file_path": "src/show/canvas.rs", "rank": 94, "score": 6.9065052052991085 }, { "content": " clusters[nearest(&color, &means).0].push(color);\n\n }\n\n let mut changed: bool = false;\n\n for i in 0..clusters.len() {\n\n let new_mean = recalculate(&clusters[i]);\n\n if pastel::delta_e::cie76(&means[i], &new_mean) > TOLERANCE {\n\n changed = true;\n\n }\n\n means[i] = new_mean;\n\n }\n\n iters_left -= 1;\n\n if !changed || iters_left <= 0 {\n\n break;\n\n }\n\n }\n\n\n\n // Length of each cluster divided by total pixels -> dominance of each mean\n\n return clusters\n\n .iter()\n\n .enumerate()\n\n .map(|(i, cluster)| {\n\n (means[i].clone(), cluster.len() as f32 / pixels.len() as f32)\n\n }).collect();\n\n}\n\n\n\n\n\n\n", "file_path": "src/gen/kmeans.rs", "rank": 95, "score": 6.640734218094559 }, { "content": " )\n\n .subcommand(\n\n SubCommand::with_name(\"create\")\n\n .about(\"Generate new colors from an image\")\n\n .arg(\n\n Arg::with_name(\"wallpath\")\n\n .help(\"specify a folder to pick an image randomly\")\n\n .long(\"wallpath\")\n\n .visible_aliases(&[\"path\"])\n\n .takes_value(true)\n\n .value_name(\"DIRPATH\")\n\n .conflicts_with(\"image\")\n\n .set(ArgSettings::RequireEquals)\n\n )\n\n .arg(\n\n Arg::with_name(\"palette\")\n\n .help(\"specify a palete generator for colors\")\n\n .long(\"palette\")\n\n .takes_value(true)\n\n .possible_values(&[\"schemer2\", \"pigment\"])\n", "file_path": "src/cli.rs", "rank": 96, "score": 6.560905422779113 }, { "content": " let similar = similar_colors(&color);\n\n for (i, nc) in similar.iter().enumerate().take(3) {\n\n canvas.draw_text(text_position_y + 10 + 2 * i, text_position_x + 7, nc.name);\n\n canvas.draw_rect(\n\n text_position_y + 10 + 2 * i,\n\n text_position_x + 1,\n\n 2,\n\n 5,\n\n &nc.color,\n\n );\n\n }\n\n\n\n canvas.print(handle)?;\n\n writeln!(handle)?;\n\n Ok(())\n\n}\n", "file_path": "src/show/canvas.rs", "rank": 97, "score": 6.207391757775755 }, { "content": "pub mod kmeans;\n\npub mod palette;\n\npub mod generate;\n\npub mod write;\n\npub mod apply;\n\npub mod execute;\n\npub mod templ;\n", "file_path": "src/gen.rs", "rank": 98, "score": 6.100670135695849 }, { "content": "use clap;\n\nuse crate::scheme::*;\n\nuse crate::fun::text;\n\n\n", "file_path": "src/var/args.rs", "rank": 99, "score": 6.088593241008829 } ]
Rust
contracts/terranames_auction/src/state.rs
jonls/terranames-contracts
13f50b1d396c26391f43478b06f72b220978e4ab
use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use cosmwasm_std::{Addr, Order, StdError, StdResult, Storage, Uint128}; use cosmwasm_storage::{ bucket, bucket_read, singleton, singleton_read, }; use terranames::auction::{ seconds_from_deposit, deposit_from_seconds_ceil, deposit_from_seconds_floor, }; use terranames::utils::{Timedelta, Timestamp}; pub static CONFIG_KEY: &[u8] = b"config"; pub static NAME_STATE_PREFIX: &[u8] = b"name"; const DEFAULT_LIMIT: u32 = 10; const MAX_LIMIT: u32 = 30; fn calc_range_start_str(start_after: Option<&str>) -> Option<Vec<u8>> { start_after.map(|s| { let mut v: Vec<u8> = s.into(); v.push(0); v }) } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct Config { pub collector_addr: Addr, pub stable_denom: String, pub min_lease_secs: Timedelta, pub max_lease_secs: Timedelta, pub counter_delay_secs: Timedelta, pub transition_delay_secs: Timedelta, pub bid_delay_secs: Timedelta, } pub fn read_config(storage: &dyn Storage) -> StdResult<Config> { singleton_read(storage, CONFIG_KEY).load() } #[must_use] pub fn store_config( storage: &mut dyn Storage, config: &Config, ) -> StdResult<()> { singleton(storage, CONFIG_KEY).save(config) } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct NameState { pub owner: Addr, pub controller: Option<Addr>, pub transition_reference_time: Timestamp, pub rate: Uint128, pub begin_time: Timestamp, pub begin_deposit: Uint128, pub previous_owner: Option<Addr>, pub previous_transition_reference_time: Timestamp, } impl NameState { pub fn seconds_spent_since_bid(&self, current_time: Timestamp) -> Option<Timedelta> { current_time.checked_sub(self.begin_time).ok() } pub fn seconds_spent_since_transition(&self, current_time: Timestamp) -> Option<Timedelta> { current_time.checked_sub(self.transition_reference_time).ok() } pub fn counter_delay_end(&self, config: &Config) -> Timestamp { self.begin_time + config.counter_delay_secs } pub fn transition_delay_end(&self, config: &Config) -> Timestamp { if self.transition_reference_time.is_zero() { self.begin_time } else { self.transition_reference_time + config.counter_delay_secs + config.transition_delay_secs } } pub fn bid_delay_end(&self, config: &Config) -> Timestamp { let delay = if !self.rate.is_zero() { config.counter_delay_secs + config.bid_delay_secs } else { Timedelta::zero() }; self.begin_time + delay } pub fn max_seconds(&self) -> Option<Timedelta> { seconds_from_deposit(self.begin_deposit, self.rate) } pub fn expire_time(&self) -> Option<Timestamp> { self.max_seconds().map(|max_seconds| self.begin_time + max_seconds) } pub fn current_deposit(&self, current_time: Timestamp) -> Uint128 { let seconds_spent = match self.seconds_spent_since_bid(current_time) { Some(seconds_spent) => seconds_spent, None => return Uint128::zero(), }; let deposit_spent = deposit_from_seconds_ceil(seconds_spent, self.rate); self.begin_deposit - deposit_spent } pub fn max_allowed_deposit(&self, config: &Config, current_time: Timestamp) -> Uint128 { let seconds_spent = match self.seconds_spent_since_bid(current_time) { Some(seconds_spent) => seconds_spent, None => return Uint128::zero(), }; let max_seconds_from_beginning = config.max_lease_secs + seconds_spent; deposit_from_seconds_floor(max_seconds_from_beginning, self.rate) } pub fn owner_status(&self, config: &Config, current_time: Timestamp) -> OwnerStatus { let seconds_spent_since_bid = match self.seconds_spent_since_bid(current_time) { Some(seconds_spent) => seconds_spent, None => return OwnerStatus::Expired { expire_time: Timestamp::zero(), transition_reference_time: self.transition_reference_time, }, }; if let Some(max_seconds) = self.max_seconds() { if seconds_spent_since_bid >= max_seconds { return OwnerStatus::Expired { expire_time: self.begin_time + max_seconds, transition_reference_time: self.transition_reference_time, }; } } let seconds_spent_since_transition = match self.seconds_spent_since_transition(current_time) { Some(seconds_spent) => seconds_spent, None => return OwnerStatus::Expired { expire_time: Timestamp::zero(), transition_reference_time: self.transition_reference_time, }, }; if seconds_spent_since_bid < config.counter_delay_secs { OwnerStatus::CounterDelay { name_owner: self.previous_owner.clone(), bid_owner: self.owner.clone(), transition_reference_time: self.previous_transition_reference_time, } } else if seconds_spent_since_transition < config.counter_delay_secs + config.transition_delay_secs { OwnerStatus::TransitionDelay { owner: self.owner.clone(), transition_reference_time: self.transition_reference_time, } } else { OwnerStatus::Valid { owner: self.owner.clone(), transition_reference_time: self.transition_reference_time, } } } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub enum OwnerStatus { CounterDelay { name_owner: Option<Addr>, bid_owner: Addr, transition_reference_time: Timestamp, }, TransitionDelay { owner: Addr, transition_reference_time: Timestamp, }, Valid { owner: Addr, transition_reference_time: Timestamp, }, Expired { expire_time: Timestamp, transition_reference_time: Timestamp, }, } impl OwnerStatus { pub fn can_set_rate(&self, sender: &Addr) -> bool { match self { OwnerStatus::Valid { owner, .. } | OwnerStatus::TransitionDelay { owner, .. } => sender == owner, _ => false, } } pub fn can_transfer_name_owner(&self, sender: &Addr) -> bool { match self { OwnerStatus::Valid { owner, .. } | OwnerStatus::CounterDelay { name_owner: Some(owner), .. } | OwnerStatus::TransitionDelay { owner, .. } => sender == owner, _ => false, } } pub fn can_transfer_bid_owner(&self, sender: &Addr) -> bool { match self { OwnerStatus::CounterDelay { bid_owner, .. } => sender == bid_owner, _ => false, } } pub fn can_set_controller(&self, sender: &Addr) -> bool { match self { OwnerStatus::Valid { owner, .. } | OwnerStatus::CounterDelay { name_owner: Some(owner), .. } | OwnerStatus::TransitionDelay { owner, .. } => sender == owner, _ => false, } } } pub fn read_name_state( storage: &dyn Storage, name: &str, ) -> StdResult<NameState> { bucket_read(storage, NAME_STATE_PREFIX).load(name.as_bytes()) } pub fn read_option_name_state( storage: &dyn Storage, name: &str, ) -> StdResult<Option<NameState>> { bucket_read(storage, NAME_STATE_PREFIX).may_load(name.as_bytes()) } pub fn collect_name_states( storage: &dyn Storage, start_after: Option<&str>, limit: Option<u32>, ) -> StdResult<Vec<(String, NameState)>> { let bucket = bucket_read(storage, NAME_STATE_PREFIX); let start = calc_range_start_str(start_after); let limit = limit.unwrap_or(DEFAULT_LIMIT).min(MAX_LIMIT) as usize; bucket.range(start.as_deref(), None, Order::Ascending) .take(limit) .map(|item| { let (key, value) = item?; let key = String::from_utf8(key) .or_else(|_| Err(StdError::generic_err("Invalid utf-8")))?; Ok((key, value)) }) .collect() } #[must_use] pub fn store_name_state( storage: &mut dyn Storage, name: &str, name_info: &NameState, ) -> StdResult<()> { bucket(storage, NAME_STATE_PREFIX).save(name.as_bytes(), name_info) }
use schemars::JsonSchema; use serde::{Deserialize, Serialize}; use cosmwasm_std::{Addr, Order, StdError, StdResult, Storage, Uint128}; use cosmwasm_storage::{ bucket, bucket_read, singleton, singleton_read, }; use terranames::auction::{ seconds_from_deposit, deposit_from_seconds_ceil, deposit_from_seconds_floor, }; use terranames::utils::{Timedelta, Timestamp}; pub static CONFIG_KEY: &[u8] = b"config"; pub static NAME_STATE_PREFIX: &[u8] = b"name"; const DEFAULT_LIMIT: u32 = 10; const MAX_LIMIT: u32 = 30; fn calc_range_start_str(start_after: Option<&str>) -> Option<Vec<u8>> { start_after.map(|s| { let mut v: Vec<u8> = s.into(); v.push(0); v }) } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct Config { pub collector_addr: Addr, pub stable_denom: String, pub min_lease_secs: Timedelta, pub max_lease_secs: Timedelta, pub counter_delay_secs: Timedelta, pub transition_delay_secs: Timedelta, pub bid_delay_secs: Timedelta, } pub fn read_config(storage: &dyn Storage) -> StdResult<Config> { singleton_read(storage, CONFIG_KEY).load() } #[must_use] pub fn store_config( storage: &mut dyn Storage, config: &Config, ) -> St
transition_reference_time: self.transition_reference_time, } } } } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub enum OwnerStatus { CounterDelay { name_owner: Option<Addr>, bid_owner: Addr, transition_reference_time: Timestamp, }, TransitionDelay { owner: Addr, transition_reference_time: Timestamp, }, Valid { owner: Addr, transition_reference_time: Timestamp, }, Expired { expire_time: Timestamp, transition_reference_time: Timestamp, }, } impl OwnerStatus { pub fn can_set_rate(&self, sender: &Addr) -> bool { match self { OwnerStatus::Valid { owner, .. } | OwnerStatus::TransitionDelay { owner, .. } => sender == owner, _ => false, } } pub fn can_transfer_name_owner(&self, sender: &Addr) -> bool { match self { OwnerStatus::Valid { owner, .. } | OwnerStatus::CounterDelay { name_owner: Some(owner), .. } | OwnerStatus::TransitionDelay { owner, .. } => sender == owner, _ => false, } } pub fn can_transfer_bid_owner(&self, sender: &Addr) -> bool { match self { OwnerStatus::CounterDelay { bid_owner, .. } => sender == bid_owner, _ => false, } } pub fn can_set_controller(&self, sender: &Addr) -> bool { match self { OwnerStatus::Valid { owner, .. } | OwnerStatus::CounterDelay { name_owner: Some(owner), .. } | OwnerStatus::TransitionDelay { owner, .. } => sender == owner, _ => false, } } } pub fn read_name_state( storage: &dyn Storage, name: &str, ) -> StdResult<NameState> { bucket_read(storage, NAME_STATE_PREFIX).load(name.as_bytes()) } pub fn read_option_name_state( storage: &dyn Storage, name: &str, ) -> StdResult<Option<NameState>> { bucket_read(storage, NAME_STATE_PREFIX).may_load(name.as_bytes()) } pub fn collect_name_states( storage: &dyn Storage, start_after: Option<&str>, limit: Option<u32>, ) -> StdResult<Vec<(String, NameState)>> { let bucket = bucket_read(storage, NAME_STATE_PREFIX); let start = calc_range_start_str(start_after); let limit = limit.unwrap_or(DEFAULT_LIMIT).min(MAX_LIMIT) as usize; bucket.range(start.as_deref(), None, Order::Ascending) .take(limit) .map(|item| { let (key, value) = item?; let key = String::from_utf8(key) .or_else(|_| Err(StdError::generic_err("Invalid utf-8")))?; Ok((key, value)) }) .collect() } #[must_use] pub fn store_name_state( storage: &mut dyn Storage, name: &str, name_info: &NameState, ) -> StdResult<()> { bucket(storage, NAME_STATE_PREFIX).save(name.as_bytes(), name_info) }
dResult<()> { singleton(storage, CONFIG_KEY).save(config) } #[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)] pub struct NameState { pub owner: Addr, pub controller: Option<Addr>, pub transition_reference_time: Timestamp, pub rate: Uint128, pub begin_time: Timestamp, pub begin_deposit: Uint128, pub previous_owner: Option<Addr>, pub previous_transition_reference_time: Timestamp, } impl NameState { pub fn seconds_spent_since_bid(&self, current_time: Timestamp) -> Option<Timedelta> { current_time.checked_sub(self.begin_time).ok() } pub fn seconds_spent_since_transition(&self, current_time: Timestamp) -> Option<Timedelta> { current_time.checked_sub(self.transition_reference_time).ok() } pub fn counter_delay_end(&self, config: &Config) -> Timestamp { self.begin_time + config.counter_delay_secs } pub fn transition_delay_end(&self, config: &Config) -> Timestamp { if self.transition_reference_time.is_zero() { self.begin_time } else { self.transition_reference_time + config.counter_delay_secs + config.transition_delay_secs } } pub fn bid_delay_end(&self, config: &Config) -> Timestamp { let delay = if !self.rate.is_zero() { config.counter_delay_secs + config.bid_delay_secs } else { Timedelta::zero() }; self.begin_time + delay } pub fn max_seconds(&self) -> Option<Timedelta> { seconds_from_deposit(self.begin_deposit, self.rate) } pub fn expire_time(&self) -> Option<Timestamp> { self.max_seconds().map(|max_seconds| self.begin_time + max_seconds) } pub fn current_deposit(&self, current_time: Timestamp) -> Uint128 { let seconds_spent = match self.seconds_spent_since_bid(current_time) { Some(seconds_spent) => seconds_spent, None => return Uint128::zero(), }; let deposit_spent = deposit_from_seconds_ceil(seconds_spent, self.rate); self.begin_deposit - deposit_spent } pub fn max_allowed_deposit(&self, config: &Config, current_time: Timestamp) -> Uint128 { let seconds_spent = match self.seconds_spent_since_bid(current_time) { Some(seconds_spent) => seconds_spent, None => return Uint128::zero(), }; let max_seconds_from_beginning = config.max_lease_secs + seconds_spent; deposit_from_seconds_floor(max_seconds_from_beginning, self.rate) } pub fn owner_status(&self, config: &Config, current_time: Timestamp) -> OwnerStatus { let seconds_spent_since_bid = match self.seconds_spent_since_bid(current_time) { Some(seconds_spent) => seconds_spent, None => return OwnerStatus::Expired { expire_time: Timestamp::zero(), transition_reference_time: self.transition_reference_time, }, }; if let Some(max_seconds) = self.max_seconds() { if seconds_spent_since_bid >= max_seconds { return OwnerStatus::Expired { expire_time: self.begin_time + max_seconds, transition_reference_time: self.transition_reference_time, }; } } let seconds_spent_since_transition = match self.seconds_spent_since_transition(current_time) { Some(seconds_spent) => seconds_spent, None => return OwnerStatus::Expired { expire_time: Timestamp::zero(), transition_reference_time: self.transition_reference_time, }, }; if seconds_spent_since_bid < config.counter_delay_secs { OwnerStatus::CounterDelay { name_owner: self.previous_owner.clone(), bid_owner: self.owner.clone(), transition_reference_time: self.previous_transition_reference_time, } } else if seconds_spent_since_transition < config.counter_delay_secs + config.transition_delay_secs { OwnerStatus::TransitionDelay { owner: self.owner.clone(), transition_reference_time: self.transition_reference_time, } } else { OwnerStatus::Valid { owner: self.owner.clone(),
random
[ { "content": "pub fn read_config(storage: &dyn Storage) -> StdResult<Config> {\n\n singleton_read(storage, CONFIG_KEY).load()\n\n}\n\n\n", "file_path": "contracts/terranames_resolver/src/state.rs", "rank": 1, "score": 233558.8203027719 }, { "content": "pub fn read_config(storage: &dyn Storage) -> StdResult<Config> {\n\n singleton_read(storage, CONFIG_KEY).load()\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/state.rs", "rank": 2, "score": 230614.59816202818 }, { "content": "/// Return deposit needed for seconds and rate rounded down.\n\n///\n\n/// Rounded down to nearest raw unit (e.g. to 1 uusd NOT 1 whole usd).\n\npub fn deposit_from_seconds_floor(seconds: Timedelta, rate: Uint128) -> Uint128 {\n\n rate.multiply_ratio(seconds, RATE_SEC_DENOM)\n\n}\n\n\n", "file_path": "packages/terranames/src/auction.rs", "rank": 3, "score": 206540.45520908324 }, { "content": "/// Return deposit needed for seconds and rate rounded up.\n\n///\n\n/// Rounded up to nearest raw unit (e.g. to 1 uusd NOT 1 whole usd).\n\npub fn deposit_from_seconds_ceil(seconds: Timedelta, rate: Uint128) -> Uint128 {\n\n let a: u128 = (seconds.value() as u128) * rate.u128() + (RATE_SEC_DENOM.value() as u128) - 1;\n\n Uint128::from(1u64).multiply_ratio(a, RATE_SEC_DENOM)\n\n}\n\n\n", "file_path": "packages/terranames/src/auction.rs", "rank": 4, "score": 206540.45520908324 }, { "content": "/// Return number of seconds corresponding to deposit and rate\n\npub fn seconds_from_deposit(deposit: Uint128, rate: Uint128) -> Option<Timedelta> {\n\n if rate.is_zero() {\n\n None\n\n } else {\n\n Some(Timedelta::from_seconds(deposit.multiply_ratio(RATE_SEC_DENOM, rate).u128() as u64))\n\n }\n\n}\n", "file_path": "packages/terranames/src/auction.rs", "rank": 5, "score": 201622.24168641312 }, { "content": "pub fn read_state(storage: &dyn Storage) -> StdResult<State> {\n\n singleton_read(storage, STATE_KEY).load()\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/state.rs", "rank": 6, "score": 175868.72741870614 }, { "content": "#[must_use]\n\npub fn store_config(\n\n storage: &mut dyn Storage,\n\n config: &Config,\n\n) -> StdResult<()> {\n\n singleton(storage, CONFIG_KEY).save(config)\n\n}\n\n\n", "file_path": "contracts/terranames_resolver/src/state.rs", "rank": 7, "score": 156529.64572616783 }, { "content": "#[must_use]\n\npub fn store_config(\n\n storage: &mut dyn Storage,\n\n config: &Config,\n\n) -> StdResult<()> {\n\n singleton(storage, CONFIG_KEY).save(config)\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\npub struct State {\n\n /// Current multiplier\n\n pub multiplier: Decimal,\n\n /// Total tokens staked\n\n pub total_staked: Uint128,\n\n /// Residual funds\n\n pub residual: Uint128,\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/state.rs", "rank": 9, "score": 154116.57350792328 }, { "content": "/// Return amount after deducting tax.\n\n///\n\n/// This is useful when sending a fixed amount to figure out how much to put in\n\n/// the send message for the amount plus taxes to sum to the fixed amount.\n\n/// Source: terraswap\n\npub fn deduct_tax(\n\n querier: &QuerierWrapper,\n\n denom: &str,\n\n amount: Uint128,\n\n) -> StdResult<Uint128> {\n\n let tax = calculate_tax(querier, denom, amount)?;\n\n Ok(amount.checked_sub(tax)?)\n\n}\n", "file_path": "packages/terranames/src/terra.rs", "rank": 10, "score": 99224.3447034983 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: ExecuteMsg,\n\n) -> ContractResult<Response> {\n\n match msg {\n\n ExecuteMsg::BidName { name, rate } => {\n\n execute_bid(deps, env, info, name, rate)\n\n },\n\n ExecuteMsg::FundName { name, owner } => {\n\n let owner = deps.api.addr_validate(&owner)?;\n\n execute_fund(deps, env, info, name, owner)\n\n },\n\n ExecuteMsg::SetNameRate { name, rate } => {\n\n execute_set_rate(deps, env, info, name, rate)\n\n },\n\n ExecuteMsg::TransferNameOwner { name, to } => {\n\n let to = deps.api.addr_validate(&to)?;\n\n execute_transfer_owner(deps, env, info, name, to)\n\n },\n\n ExecuteMsg::SetNameController { name, controller } => {\n\n let controller = deps.api.addr_validate(&controller)?;\n\n execute_set_controller(deps, env, info, name, controller)\n\n },\n\n }\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 11, "score": 99221.30146264439 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(\n\n deps: Deps,\n\n env: Env,\n\n msg: QueryMsg,\n\n) -> ContractResult<QueryResponse> {\n\n match msg {\n\n QueryMsg::Config {} => {\n\n Ok(to_binary(&query_config(deps, env)?)?)\n\n },\n\n QueryMsg::ResolveName { name } => {\n\n Ok(to_binary(&query_resolve(deps, env, name)?)?)\n\n },\n\n }\n\n}\n\n\n", "file_path": "contracts/terranames_resolver/src/contract.rs", "rank": 12, "score": 99221.30146264439 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(\n\n _deps: DepsMut,\n\n _env: Env,\n\n _msg: MigrateMsg,\n\n) -> StdResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/terranames_resolver/src/contract.rs", "rank": 13, "score": 99221.30146264439 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn instantiate(\n\n deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n msg: InstantiateMsg,\n\n) -> ContractResult<Response> {\n\n let collector_addr = deps.api.addr_validate(&msg.collector_addr)?;\n\n\n\n if !(msg.min_lease_secs <= msg.max_lease_secs) {\n\n return InvalidConfig.fail();\n\n }\n\n\n\n let state = Config {\n\n collector_addr,\n\n stable_denom: msg.stable_denom,\n\n min_lease_secs: msg.min_lease_secs,\n\n max_lease_secs: msg.max_lease_secs,\n\n counter_delay_secs: msg.counter_delay_secs,\n\n transition_delay_secs: msg.transition_delay_secs,\n\n bid_delay_secs: msg.bid_delay_secs,\n\n };\n\n\n\n store_config(deps.storage, &state)?;\n\n\n\n Ok(Response::default())\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 14, "score": 99221.30146264439 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(\n\n deps: Deps,\n\n env: Env,\n\n msg: QueryMsg,\n\n) -> ContractResult<QueryResponse> {\n\n match msg {\n\n QueryMsg::Config {} => {\n\n Ok(to_binary(&query_config(deps)?)?)\n\n },\n\n QueryMsg::GetNameState { name } => {\n\n Ok(to_binary(&query_name_state(deps, env, name)?)?)\n\n },\n\n QueryMsg::GetAllNameStates { start_after, limit } => {\n\n Ok(to_binary(&query_all_name_states(deps, env, start_after, limit)?)?)\n\n },\n\n }\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 15, "score": 99221.30146264439 }, { "content": "/// Calculate tax that is subtracted from the sent amount\n\n///\n\n/// Source: terraswap\n\npub fn calculate_tax(\n\n querier: &QuerierWrapper,\n\n denom: &str,\n\n amount: Uint128,\n\n) -> StdResult<Uint128> {\n\n let terra_querier = TerraQuerier::new(querier);\n\n let tax_rate = terra_querier.query_tax_rate()?.rate;\n\n let tax_cap = terra_querier.query_tax_cap(denom)?.cap;\n\n Ok(std::cmp::min(\n\n // a * (1 - (1 / (t + 1)))\n\n amount.checked_sub(\n\n amount.multiply_ratio(\n\n DECIMAL_FRACTION,\n\n DECIMAL_FRACTION * tax_rate + DECIMAL_FRACTION,\n\n ),\n\n )?,\n\n tax_cap,\n\n ))\n\n}\n\n\n", "file_path": "packages/terranames/src/terra.rs", "rank": 16, "score": 99221.30146264439 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(\n\n _deps: DepsMut,\n\n _env: Env,\n\n _msg: MigrateMsg,\n\n) -> ContractResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 17, "score": 99221.30146264439 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn instantiate(\n\n deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n msg: InstantiateMsg,\n\n) -> ContractResult<Response> {\n\n let auction_contract = deps.api.addr_validate(&msg.auction_contract)?;\n\n\n\n let state = Config {\n\n auction_contract,\n\n };\n\n\n\n store_config(deps.storage, &state)?;\n\n\n\n Ok(Response::default())\n\n}\n\n\n", "file_path": "contracts/terranames_resolver/src/contract.rs", "rank": 18, "score": 99221.30146264439 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: ExecuteMsg,\n\n) -> ContractResult<Response> {\n\n match msg {\n\n ExecuteMsg::SetNameValue { name, value } => {\n\n execute_set_value(deps, env, info, name, value)\n\n },\n\n }\n\n}\n\n\n", "file_path": "contracts/terranames_resolver/src/contract.rs", "rank": 19, "score": 99221.30146264439 }, { "content": "/// Calculate tax to be sent in addition in order for recipient to receive amount\n\n///\n\n/// Source: terraswap\n\npub fn calculate_added_tax(\n\n querier: &QuerierWrapper,\n\n denom: &str,\n\n amount: Uint128,\n\n) -> StdResult<Uint128> {\n\n let terra_querier = TerraQuerier::new(querier);\n\n let tax_rate = terra_querier.query_tax_rate()?.rate;\n\n let tax_cap = terra_querier.query_tax_cap(denom)?.cap;\n\n Ok(std::cmp::min(amount * tax_rate, tax_cap))\n\n}\n\n\n", "file_path": "packages/terranames/src/terra.rs", "rank": 20, "score": 97560.4318680685 }, { "content": "/// Return Coin after deducting tax.\n\n///\n\n/// This is useful when sending a fixed amount to figure out how much to put in\n\n/// the send message for the amount plus taxes to sum to the fixed amount.\n\n/// Source: terraswap\n\npub fn deduct_coin_tax(\n\n querier: &QuerierWrapper,\n\n coin: Coin,\n\n) -> StdResult<Coin> {\n\n if coin.denom == \"uluna\" {\n\n Ok(coin)\n\n } else {\n\n let amount = deduct_tax(querier, &coin.denom, coin.amount)?;\n\n Ok(Coin {\n\n denom: coin.denom,\n\n amount,\n\n })\n\n }\n\n}\n\n\n", "file_path": "packages/terranames/src/terra.rs", "rank": 21, "score": 97559.8266642443 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn execute(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n msg: ExecuteMsg,\n\n) -> ContractResult<Response> {\n\n match msg {\n\n ExecuteMsg::Deposit {} => {\n\n execute_deposit(deps, env, info)\n\n },\n\n ExecuteMsg::UnstakeTokens { amount } => {\n\n execute_unstake_tokens(deps, env, info, amount)\n\n },\n\n ExecuteMsg::WithdrawTokens { amount, to } => {\n\n let to_addr = to.map(|to| deps.api.addr_validate(&to)).transpose()?;\n\n execute_withdraw_tokens(deps, env, info, amount, to_addr)\n\n },\n\n ExecuteMsg::WithdrawDividends { to } => {\n\n let to_addr = to.map(|to| deps.api.addr_validate(&to)).transpose()?;\n\n execute_withdraw_dividends(deps, env, info, to_addr)\n\n },\n\n ExecuteMsg::Receive(msg) => {\n\n execute_receive(deps, env, info, msg)\n\n },\n\n }\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 22, "score": 97556.78342339041 }, { "content": "pub fn query_name_state(\n\n querier: &QuerierWrapper,\n\n auction_contract: &Addr,\n\n name: &str,\n\n) -> StdResult<NameStateResponse> {\n\n let msg = AuctionQueryMsg::GetNameState {\n\n name: name.into(),\n\n };\n\n let query = WasmQuery::Smart {\n\n contract_addr: auction_contract.into(),\n\n msg: to_binary(&msg)?,\n\n }.into();\n\n querier.query(&query)\n\n}\n", "file_path": "packages/terranames/src/querier.rs", "rank": 23, "score": 97556.78342339041 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn migrate(\n\n _deps: DepsMut,\n\n _env: Env,\n\n _msg: MigrateMsg,\n\n) -> ContractResult<Response> {\n\n Ok(Response::default())\n\n}\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 24, "score": 97556.78342339041 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn instantiate(\n\n deps: DepsMut,\n\n _env: Env,\n\n _info: MessageInfo,\n\n msg: InstantiateMsg,\n\n) -> ContractResult<Response> {\n\n let config = Config {\n\n base_token: deps.api.addr_validate(&msg.base_token)?,\n\n stable_denom: msg.stable_denom,\n\n unstake_delay: msg.unstake_delay,\n\n };\n\n\n\n store_config(deps.storage, &config)?;\n\n\n\n let state = State {\n\n multiplier: Decimal::zero(),\n\n total_staked: Uint128::zero(),\n\n residual: Uint128::zero(),\n\n };\n\n\n\n store_state(deps.storage, &state)?;\n\n\n\n Ok(Response::default())\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 25, "score": 97556.78342339041 }, { "content": "#[cfg_attr(not(feature = \"library\"), entry_point)]\n\npub fn query(\n\n deps: Deps,\n\n env: Env,\n\n msg: QueryMsg,\n\n) -> ContractResult<QueryResponse> {\n\n match msg {\n\n QueryMsg::Config {} => {\n\n Ok(to_binary(&query_config(deps, env)?)?)\n\n },\n\n QueryMsg::State {} => {\n\n Ok(to_binary(&query_state(deps, env)?)?)\n\n },\n\n QueryMsg::StakeState { address } => {\n\n Ok(to_binary(&query_stake_state(\n\n deps,\n\n env,\n\n deps.api.addr_validate(&address)?,\n\n )?)?)\n\n },\n\n }\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 26, "score": 97556.78342339041 }, { "content": "#[must_use]\n\npub fn store_state(\n\n storage: &mut dyn Storage,\n\n state: &State,\n\n) -> StdResult<()> {\n\n singleton(storage, STATE_KEY).save(state)\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema)]\n\npub struct StakeState {\n\n /// Staked amount\n\n pub staked_amount: Uint128,\n\n /// Unstaking amount\n\n pub unstaking_amount: Uint128,\n\n /// Unstaking begin time\n\n pub unstaking_begin_time: Option<Timestamp>,\n\n /// Pre-computed unstaked amount\n\n ///\n\n /// This does not include unstaked amount that has not yet been computed into state.\n\n pub unstaked_amount: Uint128,\n\n /// Initial multiplier\n", "file_path": "contracts/terranames_root_collector/src/state.rs", "rank": 27, "score": 95981.35127708653 }, { "content": "#[must_use]\n\npub fn store_name_value(\n\n storage: &mut dyn Storage,\n\n name: &str,\n\n value: Option<String>,\n\n) -> StdResult<()> {\n\n bucket(storage, VALUE_PREFIX).save(name.as_bytes(), &value)\n\n}\n", "file_path": "contracts/terranames_resolver/src/state.rs", "rank": 28, "score": 95981.35127708653 }, { "content": "pub fn mock_dependencies(\n\n contract_balance: &[Coin],\n\n) -> OwnedDeps<MockStorage, MockApi, MockQuerier> {\n\n let contract_addr = MOCK_CONTRACT_ADDR;\n\n let querier: MockQuerier = MockQuerier::new()\n\n .with_base_querier(CosmMockQuerier::new(&[(&contract_addr, contract_balance)]));\n\n\n\n OwnedDeps {\n\n storage: MockStorage::default(),\n\n api: MockApi::default(),\n\n querier: querier,\n\n }\n\n}\n\n\n\npub struct MockQuerier {\n\n pub tax_querier: TaxQuerier,\n\n pub base_querier: CosmMockQuerier<TerraQueryWrapper>,\n\n}\n\n\n\nimpl MockQuerier {\n", "file_path": "contracts/terranames_auction/src/mock_querier.rs", "rank": 30, "score": 95977.30100730652 }, { "content": "pub fn mock_dependencies(\n\n contract_balance: &[Coin],\n\n) -> OwnedDeps<MockStorage, MockApi, MockQuerier> {\n\n let contract_addr = MOCK_CONTRACT_ADDR;\n\n let querier: MockQuerier = MockQuerier::new()\n\n .with_base_querier(CosmMockQuerier::new(&[(&contract_addr, contract_balance)]));\n\n\n\n OwnedDeps {\n\n storage: MockStorage::default(),\n\n api: MockApi::default(),\n\n querier: querier,\n\n }\n\n}\n\n\n\npub struct MockQuerier {\n\n pub auction_querier: AuctionQuerier,\n\n pub base_querier: CosmMockQuerier<Empty>,\n\n}\n\n\n\nimpl MockQuerier {\n", "file_path": "contracts/terranames_resolver/src/mock_querier.rs", "rank": 32, "score": 95977.30100730652 }, { "content": "pub fn read_name_value(\n\n storage: &dyn Storage,\n\n name: &str,\n\n) -> StdResult<Option<String>> {\n\n bucket_read(storage, VALUE_PREFIX).load(name.as_bytes())\n\n}\n\n\n", "file_path": "contracts/terranames_resolver/src/state.rs", "rank": 33, "score": 95977.30100730652 }, { "content": "pub fn mock_dependencies(\n\n contract_balance: &[Coin],\n\n) -> OwnedDeps<MockStorage, MockApi, MockQuerier> {\n\n let contract_addr = MOCK_CONTRACT_ADDR;\n\n let querier: MockQuerier = MockQuerier::new()\n\n .with_base_querier(CosmMockQuerier::new(&[(&contract_addr, contract_balance)]));\n\n\n\n OwnedDeps {\n\n storage: MockStorage::default(),\n\n api: MockApi::default(),\n\n querier: querier,\n\n }\n\n}\n\n\n\npub struct MockQuerier {\n\n pub tax_querier: TaxQuerier,\n\n pub terranames_token_querier: Cw20Querier,\n\n pub base_querier: CosmMockQuerier<TerraQueryWrapper>,\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/mock_querier.rs", "rank": 36, "score": 94476.50016751868 }, { "content": "pub fn store_stake_state(\n\n storage: &mut dyn Storage,\n\n address: &Addr,\n\n stake_state: &StakeState,\n\n) -> StdResult<()> {\n\n bucket(storage, STAKE_STATE_PREFIX).save(address.as_ref().as_bytes(), stake_state)\n\n}\n", "file_path": "contracts/terranames_root_collector/src/state.rs", "rank": 37, "score": 94476.50016751868 }, { "content": "pub fn read_stake_state(\n\n storage: &dyn Storage,\n\n address: &Addr\n\n) -> StdResult<StakeState> {\n\n bucket_read(storage, STAKE_STATE_PREFIX)\n\n .load(address.as_ref().as_bytes())\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/state.rs", "rank": 38, "score": 94476.50016751868 }, { "content": "pub fn read_option_stake_state(\n\n storage: &dyn Storage,\n\n address: &Addr,\n\n) -> StdResult<Option<StakeState>> {\n\n bucket_read(storage, STAKE_STATE_PREFIX)\n\n .may_load(address.as_ref().as_bytes())\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/state.rs", "rank": 39, "score": 93048.64452364367 }, { "content": "/// Return the funds of type denom attached in the request.\n\nfn get_sent_funds(info: &MessageInfo, denom: &str) -> Uint128 {\n\n info.funds\n\n .iter()\n\n .find(|c| c.denom == denom)\n\n .map(|c| c.amount)\n\n .unwrap_or_else(Uint128::zero)\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 40, "score": 80615.94476392378 }, { "content": "/// Return the funds of type denom attached in the request.\n\nfn get_sent_funds(info: &MessageInfo, denom: &str) -> Uint128 {\n\n info.funds\n\n .iter()\n\n .find(|c| c.denom == denom)\n\n .map(|c| c.amount)\n\n .unwrap_or_else(Uint128::zero)\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 41, "score": 79419.42009832045 }, { "content": "fn query_config(\n\n deps: Deps,\n\n) -> ContractResult<ConfigResponse> {\n\n let config = read_config(deps.storage)?;\n\n\n\n Ok(ConfigResponse {\n\n collector_addr: config.collector_addr,\n\n stable_denom: config.stable_denom,\n\n min_lease_secs: config.min_lease_secs,\n\n max_lease_secs: config.max_lease_secs,\n\n counter_delay_secs: config.counter_delay_secs,\n\n transition_delay_secs: config.transition_delay_secs,\n\n bid_delay_secs: config.bid_delay_secs,\n\n })\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 43, "score": 76513.60700320032 }, { "content": "fn query_config(\n\n deps: Deps,\n\n _env: Env,\n\n) -> ContractResult<ConfigResponse> {\n\n let config = read_config(deps.storage)?;\n\n\n\n Ok(ConfigResponse {\n\n auction_contract: config.auction_contract.into(),\n\n })\n\n}\n\n\n", "file_path": "contracts/terranames_resolver/src/contract.rs", "rank": 44, "score": 76513.60700320032 }, { "content": "#[test]\n\nfn set_value_to_string() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n\n\n\n deps.querier.auction_querier.response = Some(NameStateResponse {\n\n name_owner: Some(Addr::unchecked(\"owner\")),\n\n bid_owner: Some(Addr::unchecked(\"owner\")),\n\n controller: Some(Addr::unchecked(\"controller\")),\n\n\n\n rate: Uint128::from(100u64),\n\n begin_time: Timestamp::from_seconds(100_000),\n\n begin_deposit: Uint128::from(1000u64),\n\n current_deposit: Uint128::from(965u64),\n\n\n", "file_path": "contracts/terranames_resolver/src/tests.rs", "rank": 45, "score": 74980.36625626242 }, { "content": "fn query_config(\n\n deps: Deps,\n\n _env: Env,\n\n) -> ContractResult<ConfigResponse> {\n\n let config = read_config(deps.storage)?;\n\n Ok(ConfigResponse {\n\n base_token: config.base_token.into(),\n\n stable_denom: config.stable_denom,\n\n unstake_delay: config.unstake_delay,\n\n })\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 46, "score": 74832.64724251778 }, { "content": "/// Builder for creating a single bid\n\nstruct Bid<'a> {\n\n name: &'a str,\n\n bidder: &'a str,\n\n timestamp: u64,\n\n rate: u128,\n\n deposit: u128,\n\n}\n\n\n\nimpl<'a> Bid<'a> {\n\n fn on(name: &'a str, bidder: &'a str, timestamp: u64) -> Bid<'a> {\n\n Bid {\n\n name,\n\n bidder,\n\n timestamp,\n\n rate: 0,\n\n deposit: 0,\n\n }\n\n }\n\n\n\n fn rate(self, rate: u128) -> Bid<'a> {\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 47, "score": 60950.23856777164 }, { "content": "#[must_use]\n\nstruct NameStateAsserter<'a> {\n\n name: &'a str,\n\n\n\n name_owner: Option<Option<&'a str>>,\n\n bid_owner: Option<Option<&'a str>>,\n\n controller: Option<Option<&'a str>>,\n\n\n\n rate: Option<u128>,\n\n begin_time: Option<u64>,\n\n begin_deposit: Option<u128>,\n\n current_deposit: Option<u128>,\n\n\n\n counter_delay_end: Option<u64>,\n\n transition_delay_end: Option<u64>,\n\n bid_delay_end: Option<u64>,\n\n expire_time: Option<Option<u64>>,\n\n}\n\n\n\nimpl<'a> NameStateAsserter<'a> {\n\n /// Create NameStateAsserter for asserting state of name\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 48, "score": 59141.68243193663 }, { "content": "/// Helper trait for modifying Env\n\npub trait EnvBuilder {\n\n fn at_time(self, timestamp: u64) -> Self;\n\n}\n\n\n\nimpl EnvBuilder for Env {\n\n /// Set block time for Env\n\n fn at_time(mut self, timestamp: u64) -> Self {\n\n self.block.time = Timestamp::from_seconds(timestamp).into();\n\n self\n\n }\n\n}\n", "file_path": "packages/terranames/src/testing/helpers.rs", "rank": 49, "score": 56614.604182331444 }, { "content": "fn main() {\n\n let mut out_dir = current_dir().unwrap();\n\n out_dir.push(\"schema\");\n\n create_dir_all(&out_dir).unwrap();\n\n remove_schemas(&out_dir).unwrap();\n\n\n\n //export_schema(&schema_for!(Token), &out_dir);\n\n}\n", "file_path": "packages/terranames/examples/schema.rs", "rank": 50, "score": 44743.1329600567 }, { "content": "fn main() {\n\n let mut out_dir = current_dir().unwrap();\n\n out_dir.push(\"schema\");\n\n create_dir_all(&out_dir).unwrap();\n\n remove_schemas(&out_dir).unwrap();\n\n\n\n export_schema(&schema_for!(InstantiateMsg), &out_dir);\n\n export_schema(&schema_for!(ExecuteMsg), &out_dir);\n\n export_schema(&schema_for!(QueryMsg), &out_dir);\n\n\n\n export_schema(&schema_for!(ConfigResponse), &out_dir);\n\n export_schema(&schema_for!(ResolveNameResponse), &out_dir);\n\n}\n", "file_path": "contracts/terranames_resolver/examples/schema.rs", "rank": 51, "score": 43848.053085628475 }, { "content": "fn main() {\n\n let mut out_dir = current_dir().unwrap();\n\n out_dir.push(\"schema\");\n\n create_dir_all(&out_dir).unwrap();\n\n remove_schemas(&out_dir).unwrap();\n\n\n\n export_schema(&schema_for!(InstantiateMsg), &out_dir);\n\n export_schema(&schema_for!(ExecuteMsg), &out_dir);\n\n export_schema(&schema_for!(QueryMsg), &out_dir);\n\n\n\n export_schema(&schema_for!(NameStateResponse), &out_dir);\n\n}\n", "file_path": "contracts/terranames_auction/examples/schema.rs", "rank": 52, "score": 43848.053085628475 }, { "content": "#[test]\n\nfn proper_initialization() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n // we can just call .unwrap() to assert this was a success\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n // it worked, let's query the state\n\n let env = mock_env();\n\n let res = query(deps.as_ref(), env, QueryMsg::Config {}).unwrap();\n\n let config: ConfigResponse = from_binary(&res).unwrap();\n\n assert_eq!(config.collector_addr.as_str(), \"collector\");\n\n assert_eq!(config.stable_denom.as_str(), ABC_COIN);\n\n assert_eq!(config.min_lease_secs, Timedelta::from_seconds(15_778_476));\n\n assert_eq!(config.max_lease_secs, Timedelta::from_seconds(157_784_760));\n\n assert_eq!(config.counter_delay_secs, Timedelta::from_seconds(604_800));\n\n assert_eq!(config.transition_delay_secs, Timedelta::from_seconds(1_814_400));\n\n assert_eq!(config.bid_delay_secs, Timedelta::from_seconds(15_778_476));\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 53, "score": 43001.162209149166 }, { "content": "#[test]\n\nfn proper_initialization() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n\n\n\n // it worked, let's query the state\n\n let env = mock_env();\n\n let res = query(deps.as_ref(), env, QueryMsg::Config {}).unwrap();\n\n let config: ConfigResponse = from_binary(&res).unwrap();\n\n assert_eq!(config.auction_contract.as_str(), \"auction\");\n\n}\n\n\n", "file_path": "contracts/terranames_resolver/src/tests.rs", "rank": 54, "score": 43001.162209149166 }, { "content": "fn execute_bid(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n name: String,\n\n rate: Uint128,\n\n) -> ContractResult<Response> {\n\n if let Some(name_state) = read_option_name_state(deps.storage, &name)? {\n\n let config = read_config(deps.storage)?;\n\n let owner_status = name_state.owner_status(&config, env.block.time.into());\n\n match owner_status {\n\n OwnerStatus::Valid { owner, transition_reference_time } |\n\n OwnerStatus::TransitionDelay { owner, transition_reference_time } => {\n\n execute_bid_existing(\n\n deps, env, info, name, rate, config, name_state, Some(owner),\n\n transition_reference_time,\n\n )\n\n },\n\n OwnerStatus::CounterDelay { name_owner: owner, transition_reference_time, .. } => {\n\n execute_bid_existing(\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 55, "score": 43001.162209149166 }, { "content": "#[test]\n\nfn transfer_owner() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n let bid_time = 1234;\n\n let deposit_amount: u128 = 30_000;\n\n let res = Bid::on(\"example\", \"bidder\", bid_time)\n\n .deposit(deposit_amount)\n\n .rate(123)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 1);\n\n\n\n // Ownership transferred\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 56, "score": 43001.162209149166 }, { "content": "fn execute_fund(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n name: String,\n\n owner: Addr,\n\n) -> ContractResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n let msg_deposit = get_sent_funds(&info, &config.stable_denom);\n\n let mut name_state = read_name_state(deps.storage, &name)?;\n\n\n\n if msg_deposit.is_zero() {\n\n return Unfunded.fail();\n\n }\n\n\n\n let owner_canonical = owner;\n\n if name_state.owner != owner_canonical {\n\n return UnexpectedState.fail();\n\n }\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 57, "score": 43001.162209149166 }, { "content": "#[test]\n\nfn fund_name() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n let bid_time = 1234;\n\n let deposit_amount: u128 = 30_000;\n\n let res = Bid::on(\"example\", \"bidder\", bid_time)\n\n .deposit(deposit_amount)\n\n .rate(123)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 1);\n\n\n\n // Funding an owned name is possible up to the max lease limit.\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 58, "score": 43001.162209149166 }, { "content": "fn main() {\n\n let mut out_dir = current_dir().unwrap();\n\n out_dir.push(\"schema\");\n\n create_dir_all(&out_dir).unwrap();\n\n remove_schemas(&out_dir).unwrap();\n\n\n\n export_schema(&schema_for!(InstantiateMsg), &out_dir);\n\n export_schema(&schema_for!(ExecuteMsg), &out_dir);\n\n export_schema(&schema_for!(QueryMsg), &out_dir);\n\n\n\n export_schema(&schema_for!(ConfigResponse), &out_dir);\n\n export_schema(&schema_for!(StateResponse), &out_dir);\n\n}\n", "file_path": "contracts/terranames_root_collector/examples/schema.rs", "rank": 59, "score": 43001.162209149166 }, { "content": "fn query_resolve(\n\n deps: Deps,\n\n _env: Env,\n\n name: String,\n\n) -> ContractResult<ResolveNameResponse> {\n\n let config = read_config(deps.storage)?;\n\n let name_state = query_name_state(\n\n &deps.querier,\n\n &config.auction_contract,\n\n &name,\n\n )?;\n\n let name_value = read_name_value(deps.storage, &name)?;\n\n\n\n let owner = name_state.name_owner.context(NameExpired {})?;\n\n\n\n Ok(ResolveNameResponse {\n\n value: name_value,\n\n owner,\n\n expire_time: name_state.expire_time,\n\n })\n\n}\n\n\n", "file_path": "contracts/terranames_resolver/src/contract.rs", "rank": 60, "score": 43001.162209149166 }, { "content": "/// Create message for refund deposits\n\n///\n\n/// Idea: Store refunds in this contract instead of sending them back\n\n/// immediately, in order to avoid repeated tax on transfers. Instead users can\n\n/// use the refund balance in calls needing funds. Also need a separate call to\n\n/// actually send the refund balance back.\n\nfn refund_deposit_msg(\n\n querier: &QuerierWrapper,\n\n _env: &Env,\n\n config: &Config,\n\n to: &Addr,\n\n amount: Uint128,\n\n) -> StdResult<CosmosMsg> {\n\n Ok(CosmosMsg::Bank(\n\n BankMsg::Send {\n\n to_address: to.into(),\n\n amount: vec![\n\n deduct_coin_tax(\n\n querier,\n\n Coin {\n\n denom: config.stable_denom.clone(),\n\n amount,\n\n },\n\n )?\n\n ],\n\n }\n\n ))\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 61, "score": 42203.88247667128 }, { "content": "fn execute_set_controller(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n name: String,\n\n controller: Addr,\n\n) -> ContractResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n let mut name_state = read_name_state(deps.storage, &name)?;\n\n let sender_canonical = info.sender;\n\n let owner_status = name_state.owner_status(&config, env.block.time.into());\n\n\n\n if !owner_status.can_set_controller(&sender_canonical) {\n\n return Unauthorized.fail();\n\n }\n\n\n\n name_state.controller = Some(controller.clone());\n\n store_name_state(deps.storage, &name, &name_state)?;\n\n\n\n Ok(Response::new()\n\n .add_attribute(\"action\", \"set_controller\")\n\n .add_attribute(\"controller\", controller)\n\n )\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 62, "score": 42198.67076752195 }, { "content": "fn execute_bid_existing(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n name: String,\n\n rate: Uint128,\n\n config: Config,\n\n mut name_state: NameState,\n\n owner: Option<Addr>,\n\n transition_reference_time: Timestamp,\n\n) -> ContractResult<Response> {\n\n if info.sender == name_state.owner {\n\n return Unauthorized.fail();\n\n }\n\n\n\n let seconds_spent_since_bid = match name_state.seconds_spent_since_bid(env.block.time.into()) {\n\n Some(seconds_spent) => seconds_spent,\n\n None => panic!(\"Invalid block time\"),\n\n };\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 63, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn set_higher_rate() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n let bid_time = 1234;\n\n let deposit_amount: u128 = 30_000;\n\n let res = Bid::on(\"example\", \"bidder\", bid_time)\n\n .deposit(deposit_amount)\n\n .rate(123)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 1);\n\n\n\n // Owner submits requests to increase the charged rate\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 64, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn set_value_as_other_fails() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n\n\n\n deps.querier.auction_querier.response = Some(NameStateResponse {\n\n bid_owner: Some(Addr::unchecked(\"owner\")),\n\n name_owner: Some(Addr::unchecked(\"owner\")),\n\n controller: Some(Addr::unchecked(\"controller\")),\n\n\n\n rate: Uint128::zero(),\n\n begin_time: Timestamp::from_seconds(100_000),\n\n begin_deposit: Uint128::zero(),\n\n current_deposit: Uint128::zero(),\n\n\n", "file_path": "contracts/terranames_resolver/src/tests.rs", "rank": 65, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn bid_on_expired_name() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n // Initial bid\n\n let bid_1_time = 1234;\n\n let deposit_amount = 30_000;\n\n let res = Bid::on(\"example\", \"bidder_1\", bid_1_time)\n\n .deposit(deposit_amount)\n\n .rate(123)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 1);\n\n\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 66, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn withdraw_tokens() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n\n\n\n // Staker 1 stakes tokens\n\n let stake_1_amount: u128 = 9_122_993;\n\n let env = mock_env();\n\n let info = mock_info(\"token_contract\", &[]);\n\n let res = execute(deps.as_mut(), env, info, ExecuteMsg::Receive(Cw20ReceiveMsg {\n\n amount: Uint128::from(stake_1_amount),\n\n sender: \"staker_1\".into(),\n\n msg: to_binary(&ReceiveMsg::Stake { }).unwrap(),\n\n })).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n", "file_path": "contracts/terranames_root_collector/src/tests.rs", "rank": 67, "score": 42198.67076752195 }, { "content": "fn query_name_state(\n\n deps: Deps,\n\n env: Env,\n\n name: String,\n\n) -> ContractResult<NameStateResponse> {\n\n let config = read_config(deps.storage)?;\n\n let name_state = read_name_state(deps.storage, &name)?;\n\n\n\n Ok(create_name_state_response(&config, env.block.time.into(), &name_state))\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 68, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn proper_initialization() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n\n\n\n // it worked, let's query the config\n\n let env = mock_env();\n\n let res = query(deps.as_ref(), env, QueryMsg::Config {}).unwrap();\n\n let config: ConfigResponse = from_binary(&res).unwrap();\n\n assert_eq!(config.base_token.as_str(), \"token_contract\");\n\n assert_eq!(config.stable_denom, \"uabc\");\n\n assert_eq!(config.unstake_delay.value(), 1_814_400);\n\n\n\n let env = mock_env();\n\n let res = query(deps.as_ref(), env, QueryMsg::State {}).unwrap();\n\n let state: StateResponse = from_binary(&res).unwrap();\n\n assert_eq!(state.multiplier, Decimal::zero());\n\n assert_eq!(state.residual, Uint128::zero());\n\n assert_eq!(state.total_staked, Uint128::zero());\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/tests.rs", "rank": 69, "score": 42198.67076752195 }, { "content": "fn execute_set_rate(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n name: String,\n\n rate: Uint128,\n\n) -> ContractResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n let mut name_state = read_name_state(deps.storage, &name)?;\n\n let sender_canonical = info.sender;\n\n let owner_status = name_state.owner_status(&config, env.block.time.into());\n\n\n\n if !owner_status.can_set_rate(&sender_canonical) {\n\n return Unauthorized.fail();\n\n }\n\n\n\n // Always round up spent deposit to avoid charging too little.\n\n let seconds_spent = Timestamp::from(env.block.time).checked_sub(name_state.begin_time)?;\n\n let spent_deposit = deposit_from_seconds_ceil(seconds_spent, name_state.rate);\n\n let new_deposit = name_state.begin_deposit.saturating_sub(spent_deposit); // TODO <-- add test for this: last block spends slightly more than total deposit\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 70, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn bid_three_bidders() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n // Initial bid\n\n let bid_1_time = 1234;\n\n let deposit_amount = 30_000;\n\n let res = Bid::on(\"example\", \"bidder_1\", bid_1_time)\n\n .deposit(deposit_amount)\n\n .rate(123)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 1);\n\n\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 71, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn multiple_stakers() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n\n\n\n // Staker 1 stakes tokens\n\n let stake_1_amount: u128 = 9_122_993;\n\n\n\n let env = mock_env();\n\n let info = mock_info(\"token_contract\", &[]);\n\n let res = execute(deps.as_mut(), env, info, ExecuteMsg::Receive(Cw20ReceiveMsg {\n\n amount: Uint128::from(stake_1_amount),\n\n sender: \"staker_1\".into(),\n\n msg: to_binary(&ReceiveMsg::Stake { }).unwrap(),\n\n })).unwrap();\n", "file_path": "contracts/terranames_root_collector/src/tests.rs", "rank": 72, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn withdraw_dividends() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n\n\n\n // Staker 1 stakes tokens\n\n let stake_1_amount: u128 = 9_122_993;\n\n let env = mock_env();\n\n let info = mock_info(\"token_contract\", &[]);\n\n let res = execute(deps.as_mut(), env, info, ExecuteMsg::Receive(Cw20ReceiveMsg {\n\n amount: Uint128::from(stake_1_amount),\n\n sender: \"staker_1\".into(),\n\n msg: to_binary(&ReceiveMsg::Stake { }).unwrap(),\n\n })).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n", "file_path": "contracts/terranames_root_collector/src/tests.rs", "rank": 73, "score": 42198.67076752195 }, { "content": "fn query_state(\n\n deps: Deps,\n\n _env: Env,\n\n) -> ContractResult<StateResponse> {\n\n let state = read_state(deps.storage)?;\n\n Ok(StateResponse {\n\n multiplier: state.multiplier,\n\n total_staked: state.total_staked,\n\n residual: state.residual,\n\n })\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 74, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn stake_tokens() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n\n\n\n let stake_amount: u128 = 9_122_993;\n\n\n\n let env = mock_env();\n\n let info = mock_info(\"token_contract\", &[]);\n\n let res = execute(deps.as_mut(), env, info, ExecuteMsg::Receive(Cw20ReceiveMsg {\n\n amount: Uint128::from(stake_amount),\n\n sender: \"staker\".into(),\n\n msg: to_binary(&ReceiveMsg::Stake { }).unwrap(),\n\n })).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n", "file_path": "contracts/terranames_root_collector/src/tests.rs", "rank": 75, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn initial_zero_bid() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n let bid_time = 1234;\n\n let res = Bid::on(\"example\", \"bidder\", bid_time)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 0);\n\n\n\n NameStateAsserter::new(\"example\")\n\n .name_owner(None)\n\n .bid_owner(Some(\"bidder\"))\n\n .controller(None)\n\n .rate(0)\n\n .begin_time(bid_time)\n\n .begin_deposit(0)\n\n .counter_delay_end(1234 + 604800)\n\n .transition_delay_end(1234)\n\n .bid_delay_end(1234)\n\n .expire_time(None)\n\n .assert(deps.as_ref(), bid_time);\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 76, "score": 42198.67076752195 }, { "content": "fn execute_receive(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n wrapper: Cw20ReceiveMsg,\n\n) -> ContractResult<Response> {\n\n let msg: ReceiveMsg = from_binary(&wrapper.msg)?;\n\n\n\n let config = read_config(deps.storage)?;\n\n\n\n if info.sender != config.base_token {\n\n return Unauthorized.fail();\n\n }\n\n\n\n match msg {\n\n ReceiveMsg::Stake {} => {\n\n execute_receive_stake(deps, wrapper)\n\n },\n\n }\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 77, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn query_all_name_states() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n // First bid\n\n let bid_1_time = 1234;\n\n let deposit_amount: u128 = 5_670;\n\n let res = Bid::on(\"example\", \"bidder_1\", bid_1_time)\n\n .deposit(deposit_amount)\n\n .rate(30)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 1);\n\n\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 78, "score": 42198.67076752195 }, { "content": "/// Create message for sending deposits to collector\n\nfn send_to_collector_msg(\n\n querier: &QuerierWrapper,\n\n _env: &Env,\n\n config: &Config,\n\n _source_addr: &Addr,\n\n amount: Uint128,\n\n) -> StdResult<CosmosMsg> {\n\n Ok(CosmosMsg::Wasm(\n\n WasmMsg::Execute {\n\n contract_addr: config.collector_addr.to_string(),\n\n msg: to_binary(&RootCollectorExecuteMsg::Deposit {})?,\n\n funds: vec![\n\n deduct_coin_tax(\n\n querier,\n\n Coin {\n\n denom: config.stable_denom.clone(),\n\n amount,\n\n },\n\n )?\n\n ],\n\n }\n\n ))\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 79, "score": 42198.67076752195 }, { "content": "fn execute_transfer_owner(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n name: String,\n\n to: Addr,\n\n) -> ContractResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n let mut name_state = read_name_state(deps.storage, &name)?;\n\n let sender_canonical = info.sender;\n\n let owner_status = name_state.owner_status(&config, env.block.time.into());\n\n\n\n let new_owner = to;\n\n\n\n if owner_status.can_transfer_name_owner(&sender_canonical) {\n\n match owner_status {\n\n // In the counter-delay state, the current owner is determined by\n\n // previous_owner since owner is the current highest bid holder.\n\n OwnerStatus::CounterDelay { .. } => {\n\n name_state.previous_owner = Some(new_owner.clone());\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 80, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn set_value_to_none() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(res.messages.len(), 0);\n\n\n\n deps.querier.auction_querier.response = Some(NameStateResponse {\n\n bid_owner: Some(Addr::unchecked(\"owner\")),\n\n name_owner: Some(Addr::unchecked(\"owner\")),\n\n controller: Some(Addr::unchecked(\"controller\")),\n\n\n\n rate: Uint128::from(100u64),\n\n begin_time: Timestamp::from_seconds(100_000),\n\n begin_deposit: Uint128::from(1000u64),\n\n current_deposit: Uint128::from(965u64),\n\n\n", "file_path": "contracts/terranames_resolver/src/tests.rs", "rank": 81, "score": 42198.67076752195 }, { "content": "fn query_all_name_states(\n\n deps: Deps,\n\n env: Env,\n\n start_after: Option<String>,\n\n limit: Option<u32>,\n\n) -> ContractResult<AllNameStatesResponse> {\n\n let config = read_config(deps.storage)?;\n\n let name_states = collect_name_states(\n\n deps.storage,\n\n start_after.as_deref(),\n\n limit,\n\n )?;\n\n\n\n let names: Vec<NameStateItem> = name_states.into_iter().map(|(name, name_state)| {\n\n let state = create_name_state_response(\n\n &config, env.block.time.into(), &name_state,\n\n );\n\n\n\n Ok(NameStateItem {\n\n name,\n\n state,\n\n })\n\n }).collect::<StdResult<Vec<_>>>()?;\n\n\n\n Ok(AllNameStatesResponse {\n\n names,\n\n })\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 82, "score": 42198.67076752195 }, { "content": "fn execute_deposit(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n) -> ContractResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n let mut state = read_state(deps.storage)?;\n\n\n\n let deposit = get_sent_funds(&info, &config.stable_denom) + state.residual;\n\n let (deposit_per_stake, residual) = if !state.total_staked.is_zero() {\n\n let deposit_per_stake = Decimal::from_ratio(deposit, state.total_staked);\n\n let residual = deposit.checked_sub(deposit_per_stake * state.total_staked)?;\n\n (deposit_per_stake, residual)\n\n } else {\n\n (Decimal::zero(), deposit)\n\n };\n\n\n\n state.multiplier = state.multiplier + deposit_per_stake;\n\n state.residual = residual;\n\n\n\n store_state(deps.storage, &state)?;\n\n\n\n Ok(Response::new()\n\n .add_attribute(\"action\", \"deposit\")\n\n .add_attribute(\"amount\", deposit)\n\n .add_attribute(\"multiplier\", state.multiplier.to_string())\n\n .add_attribute(\"residual\", state.residual)\n\n )\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 83, "score": 42198.67076752195 }, { "content": "fn execute_bid_new(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n name: String,\n\n rate: Uint128,\n\n transition_reference_time: Timestamp,\n\n) -> ContractResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n let msg_deposit = get_sent_funds(&info, &config.stable_denom);\n\n let begin_time = env.block.time.into();\n\n\n\n let min_deposit = deposit_from_seconds_ceil(config.min_lease_secs, rate);\n\n let max_deposit = deposit_from_seconds_floor(config.max_lease_secs, rate);\n\n if msg_deposit < min_deposit || msg_deposit > max_deposit {\n\n return BidInvalidInterval.fail();\n\n }\n\n\n\n let name_state = NameState {\n\n owner: info.sender.clone(),\n", "file_path": "contracts/terranames_auction/src/contract.rs", "rank": 84, "score": 42198.67076752195 }, { "content": "fn execute_set_value(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n name: String,\n\n value: Option<String>,\n\n) -> ContractResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n let name_state = query_name_state(\n\n &deps.querier,\n\n &config.auction_contract,\n\n &name,\n\n )?;\n\n\n\n // ensure name controller permission\n\n if let Some(controller) = name_state.controller {\n\n if info.sender != controller {\n\n return Unauthorized.fail();\n\n }\n\n } else {\n", "file_path": "contracts/terranames_resolver/src/contract.rs", "rank": 85, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn set_lower_rate() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n let bid_time = 1234;\n\n let deposit_amount: u128 = 30_000;\n\n let res = Bid::on(\"example\", \"bidder\", bid_time)\n\n .deposit(deposit_amount)\n\n .rate(123)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 1);\n\n\n\n // Owner submits requests to decrease the charged rate\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 86, "score": 42198.67076752195 }, { "content": "#[test]\n\nfn bid_on_existing_name_as_owner() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n let bid_1_time: u64 = 1234;\n\n let deposit_amount = 30_000;\n\n let res = Bid::on(\"example\", \"bidder_1\", bid_1_time)\n\n .deposit(deposit_amount)\n\n .rate(123)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 1);\n\n\n\n // Bid on the name as the currentq owner. Not allowed.\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 87, "score": 41437.17639424614 }, { "content": "fn execute_withdraw_dividends(\n\n deps: DepsMut,\n\n _env: Env,\n\n info: MessageInfo,\n\n to: Option<Addr>,\n\n) -> ContractResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n let state = read_state(deps.storage)?;\n\n\n\n let opt_stake_state = read_option_stake_state(deps.storage, &info.sender)?;\n\n let mut stake_state = if let Some(stake_state) = opt_stake_state {\n\n stake_state\n\n } else {\n\n return InsufficientFunds.fail();\n\n };\n\n\n\n stake_state.update_dividend(state.multiplier);\n\n if stake_state.dividend.is_zero() {\n\n return InsufficientFunds.fail();\n\n }\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 88, "score": 41437.17639424614 }, { "content": "/// Create message for dividend deposits\n\nfn send_dividend_msg(\n\n querier: &QuerierWrapper,\n\n config: &Config,\n\n to: &Addr,\n\n amount: Uint128,\n\n) -> StdResult<CosmosMsg> {\n\n Ok(CosmosMsg::Bank(\n\n BankMsg::Send {\n\n to_address: to.into(),\n\n amount: vec![\n\n deduct_coin_tax(\n\n querier,\n\n Coin {\n\n denom: config.stable_denom.clone(),\n\n amount,\n\n },\n\n )?\n\n ],\n\n }\n\n ))\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 89, "score": 41437.17639424614 }, { "content": "#[test]\n\nfn bid_on_expired_name_in_transition() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n // Change min lease blocks so the name can expire during a transition.\n\n let mut msg = default_init();\n\n msg.min_lease_secs = msg.counter_delay_secs;\n\n msg.bid_delay_secs = Timedelta::from_seconds(50_000);\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n // Initial bid\n\n let bid_1_time = 1234;\n\n let deposit_amount = 30_000;\n\n let res = Bid::on(\"example\", \"bidder_1\", bid_1_time)\n\n .deposit(deposit_amount)\n\n .rate(123)\n\n .execute(deps.as_mut())\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 90, "score": 41437.17639424614 }, { "content": "/// Create message for sending tokens\n\nfn send_tokens_msg(\n\n config: &Config,\n\n recipient: &Addr,\n\n amount: Uint128,\n\n) -> StdResult<CosmosMsg> {\n\n Ok(CosmosMsg::Wasm(\n\n WasmMsg::Execute {\n\n contract_addr: config.base_token.clone().into(),\n\n msg: to_binary(&Cw20ExecuteMsg::Transfer {\n\n recipient: recipient.into(),\n\n amount,\n\n })?,\n\n funds: vec![],\n\n }\n\n ))\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 91, "score": 41437.17639424614 }, { "content": "#[test]\n\nfn transfer_owner_during_counter_bid() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n let bid_1_time = 1234;\n\n let deposit_amount: u128 = 30_000;\n\n let res = Bid::on(\"example\", \"bidder_1\", bid_1_time)\n\n .deposit(deposit_amount)\n\n .rate(123)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 1);\n\n\n\n // Another bid occurs following the bid delay\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 92, "score": 41437.17639424614 }, { "content": "fn execute_unstake_tokens(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n amount: Uint128,\n\n) -> ContractResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n let mut state = read_state(deps.storage)?;\n\n\n\n let opt_stake_state = read_option_stake_state(deps.storage, &info.sender)?;\n\n let mut stake_state = if let Some(stake_state) = opt_stake_state {\n\n stake_state\n\n } else {\n\n return InsufficientTokens.fail();\n\n };\n\n\n\n if stake_state.staked_amount < amount {\n\n return InsufficientTokens.fail();\n\n }\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 93, "score": 41437.17639424614 }, { "content": "#[test]\n\nfn initial_non_zero_bid() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n let bid_time = 1234;\n\n let deposit_amount: u128 = 1_230_000;\n\n let res = Bid::on(\"example\", \"bidder\", bid_time)\n\n .deposit(deposit_amount)\n\n .rate(4_513)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 1);\n\n\n\n // The tax needed to be withheld from 1_230_000 at the rate of 0.405%.\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 94, "score": 41437.17639424614 }, { "content": "#[test]\n\nfn fund_unclaimed_name_fails() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n let env = mock_env();\n\n let info = mock_info(\"funder\", &coins(1_000_000, ABC_COIN));\n\n let res = execute(deps.as_mut(), env, info, ExecuteMsg::FundName {\n\n name: \"example\".into(),\n\n owner: \"owner\".into(),\n\n });\n\n assert!(matches!(res, Err(ContractError::Std { .. })));\n\n}\n\n\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 95, "score": 41437.17639424614 }, { "content": "fn execute_receive_stake(\n\n deps: DepsMut,\n\n wrapper: Cw20ReceiveMsg,\n\n) -> ContractResult<Response> {\n\n let mut state = read_state(deps.storage)?;\n\n\n\n let token_sender = deps.api.addr_validate(&wrapper.sender)?;\n\n let opt_stake_state = read_option_stake_state(deps.storage, &token_sender)?;\n\n\n\n let stake_state = if let Some(mut stake_state) = opt_stake_state {\n\n stake_state.update_dividend(state.multiplier);\n\n stake_state.staked_amount += wrapper.amount;\n\n stake_state\n\n } else {\n\n StakeState {\n\n multiplier: state.multiplier,\n\n staked_amount: wrapper.amount,\n\n unstaking_amount: Uint128::zero(),\n\n unstaked_amount: Uint128::zero(),\n\n unstaking_begin_time: None,\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 96, "score": 41437.17639424614 }, { "content": "#[test]\n\nfn bid_is_counter_bid_then_countered() {\n\n let mut deps = mock_dependencies(&[]);\n\n\n\n let msg = default_init();\n\n let env = mock_env();\n\n let info = mock_info(\"creator\", &[]);\n\n\n\n let res = instantiate(deps.as_mut(), env, info, msg).unwrap();\n\n assert_eq!(0, res.messages.len());\n\n\n\n // Initial bid\n\n let bid_1_time = 1234;\n\n let deposit_amount = 30_000;\n\n let res = Bid::on(\"example\", \"bidder_1\", bid_1_time)\n\n .deposit(deposit_amount)\n\n .rate(123)\n\n .execute(deps.as_mut())\n\n .unwrap();\n\n assert_eq!(res.messages.len(), 1);\n\n\n", "file_path": "contracts/terranames_auction/src/tests.rs", "rank": 97, "score": 41437.17639424614 }, { "content": "fn query_stake_state(\n\n deps: Deps,\n\n env: Env,\n\n address: Addr,\n\n) -> ContractResult<StakeStateResponse> {\n\n let config = read_config(deps.storage)?;\n\n let state = read_state(deps.storage)?;\n\n let stake_state = read_stake_state(deps.storage, &address)?;\n\n\n\n let (unstaking_amount, unstaked_amount) = stake_state.unstaking_unstaked_amount(\n\n env.block.time.into(), config.unstake_delay,\n\n );\n\n let unstake_time = stake_state.unstaking_begin_time.map(|t| t + config.unstake_delay);\n\n\n\n let dividend = stake_state.dividend(state.multiplier);\n\n\n\n Ok(StakeStateResponse {\n\n staked_amount: stake_state.staked_amount,\n\n unstaking_amount,\n\n unstake_time,\n\n unstaked_amount,\n\n multiplier: stake_state.multiplier,\n\n dividend,\n\n })\n\n}\n\n\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 98, "score": 41437.17639424614 }, { "content": "fn execute_withdraw_tokens(\n\n deps: DepsMut,\n\n env: Env,\n\n info: MessageInfo,\n\n amount: Uint128,\n\n to: Option<Addr>,\n\n) -> ContractResult<Response> {\n\n let config = read_config(deps.storage)?;\n\n\n\n let opt_stake_state = read_option_stake_state(deps.storage, &info.sender)?;\n\n let mut stake_state = if let Some(stake_state) = opt_stake_state {\n\n stake_state\n\n } else {\n\n return InsufficientTokens.fail();\n\n };\n\n\n\n stake_state.update_unstaked_amount(env.block.time.into(), config.unstake_delay);\n\n\n\n if stake_state.unstaked_amount < amount {\n\n return InsufficientTokens.fail();\n", "file_path": "contracts/terranames_root_collector/src/contract.rs", "rank": 99, "score": 41437.17639424614 } ]
Rust
hive-core/src/lua/shared/kv.rs
hackerer1c/hive
a98ab9a97836f208646df252175283067a398b7b
use super::SharedTable; use mlua::{ExternalError, ExternalResult, FromLua, Lua, ToLua}; use serde::{Serialize, Serializer}; use smallvec::SmallVec; use std::hash::{Hash, Hasher}; use std::sync::Arc; #[derive(Clone, Serialize)] #[serde(untagged)] pub enum SharedTableValue { Nil, Boolean(bool), Integer(i64), Number(f64), String(#[serde(serialize_with = "serialize_slice_as_str")] SmallVec<[u8; 32]>), Table(SharedTable), } fn serialize_slice_as_str<S: Serializer>(slice: &[u8], serializer: S) -> Result<S::Ok, S::Error> { if let Ok(x) = std::str::from_utf8(slice) { serializer.serialize_str(x) } else { serializer.serialize_bytes(slice) } } impl<'lua> FromLua<'lua> for SharedTableValue { fn from_lua(lua_value: mlua::Value<'lua>, lua: &'lua Lua) -> mlua::Result<Self> { use mlua::Value::*; let result = match lua_value { Nil => Self::Nil, Boolean(x) => Self::Boolean(x), Integer(x) => Self::Integer(x), Number(x) => Self::Number(x), String(x) => Self::String(x.as_bytes().into()), Table(x) => Self::Table(self::SharedTable::from_lua_table(lua, x)?), UserData(x) => { if let Ok(x) = x.borrow::<self::SharedTable>() { Self::Table(x.clone()) } else { return Err("invalid table value".to_lua_err()); } } _ => return Err("invalid table value".to_lua_err()), }; Ok(result) } } impl<'a, 'lua> ToLua<'lua> for &'a SharedTableValue { fn to_lua(self, lua: &'lua Lua) -> mlua::Result<mlua::Value<'lua>> { use SharedTableValue::*; let result = match self { Nil => mlua::Value::Nil, Boolean(x) => mlua::Value::Boolean(*x), Integer(x) => mlua::Value::Integer(*x), Number(x) => mlua::Value::Number(*x), String(x) => mlua::Value::String(lua.create_string(x)?), Table(x) => mlua::Value::UserData(lua.create_ser_userdata(x.clone())?), }; Ok(result) } } impl<'lua> ToLua<'lua> for SharedTableValue { fn to_lua(self, lua: &'lua Lua) -> mlua::Result<mlua::Value<'lua>> { use SharedTableValue::*; let result = match self { Nil => mlua::Value::Nil, Boolean(x) => mlua::Value::Boolean(x), Integer(x) => mlua::Value::Integer(x), Number(x) => mlua::Value::Number(x), String(x) => mlua::Value::String(lua.create_string(&x)?), Table(x) => mlua::Value::UserData(lua.create_ser_userdata(x)?), }; Ok(result) } } impl PartialEq for SharedTableValue { fn eq(&self, other: &Self) -> bool { use SharedTableValue::*; match (self, other) { (Nil, Nil) => true, (Nil, _) => false, (Boolean(x), Boolean(y)) => x == y, (Boolean(_), _) => false, (Integer(x), Integer(y)) => x == y, (Integer(x), Number(y)) => *x as f64 == *y, (Integer(_), _) => false, (Number(x), Number(y)) => x == y, (Number(x), Integer(y)) => *x == *y as f64, (Number(_), _) => false, (String(x), String(y)) => x == y, (String(_), _) => false, (Table(x), Table(y)) => Arc::ptr_eq(&x.0, &y.0), (Table(_), _) => false, } } } #[derive(Clone, Serialize)] pub struct SharedTableKey(pub(super) SharedTableValue); #[derive(Debug, thiserror::Error)] #[error("invalid key")] pub struct InvalidKey(()); impl SharedTableKey { pub fn from_value(value: SharedTableValue) -> Result<Self, InvalidKey> { use SharedTableValue::*; match value { Nil => Err(InvalidKey(())), Table(_) => Err(InvalidKey(())), Number(x) if x.is_nan() => Err(InvalidKey(())), Number(x) => { let i = x as i64; if i as f64 == x { Ok(Self(Integer(i))) } else { Ok(Self(value)) } } _ => Ok(Self(value)), } } pub fn to_i64(&self) -> Option<i64> { if let SharedTableValue::Integer(i) = self.0 { Some(i) } else { None } } } impl Hash for SharedTableKey { fn hash<H: Hasher>(&self, state: &mut H) { use SharedTableValue::*; fn canonical_float_bytes(f: f64) -> [u8; 8] { assert!(!f.is_nan()); if f == 0.0 { 0.0f64.to_ne_bytes() } else { f.to_ne_bytes() } } match &self.0 { Boolean(x) => (0u8, x).hash(state), Integer(x) => (1u8, x).hash(state), Number(x) => (2u8, canonical_float_bytes(*x)).hash(state), String(x) => (3u8, x).hash(state), Nil => unreachable!(), Table(_) => unreachable!(), } } } impl PartialEq for SharedTableKey { fn eq(&self, other: &Self) -> bool { self.0 == other.0 } } impl Eq for SharedTableKey {} impl<'lua> FromLua<'lua> for SharedTableKey { fn from_lua(lua_value: mlua::Value<'lua>, lua: &'lua Lua) -> mlua::Result<Self> { Self::from_value(SharedTableValue::from_lua(lua_value, lua)?).to_lua_err() } } impl<'a, 'lua> ToLua<'lua> for &'a SharedTableKey { fn to_lua(self, lua: &'lua Lua) -> mlua::Result<mlua::Value<'lua>> { (&self.0).to_lua(lua) } } impl<'a, 'lua> ToLua<'lua> for SharedTableKey { fn to_lua(self, lua: &'lua Lua) -> mlua::Result<mlua::Value<'lua>> { self.0.to_lua(lua) } }
use super::SharedTable; use mlua::{ExternalError, ExternalResult, FromLua, Lua, ToLua}; use serde::{Serialize, Serializer}; use smallvec::SmallVec; use std::hash::{Hash, Hasher}; use std::sync::Arc; #[derive(Clone, Serialize)] #[serde(untagged)] pub enum SharedTableValue { Nil, Boolean(bool), Integer(i64), Number(f64), String(#[serde(serialize_with = "serialize_slice_as_str")] SmallVec<[u8; 32]>), Table(SharedTable), } fn serialize_slice_as_str<S: Serializer>(slice: &[u8], serializer: S) -> Result<S::Ok, S::Error> { if let Ok(x) = std::str::from_utf8(slice) { serializer.serialize_str(x) } else { serializer.serialize_bytes(slice) } } impl<'lua> FromLua<'lua> for SharedTableValue { fn from_lua(lua_value: mlua::Value<'lua>, lua: &'lua Lua) -> mlua::Result<Self> { use mlua::Value::*; let result = match lua_value { Nil => Self::Nil, Boolean(x) => Self::Boolean(x), Integer(x) => Self::Integer(x), Number(x) => Self::Number(x), String(x) => Self::String(x.as_bytes().into()), Table(x) => Self::Table(self::SharedTable::from_lua_table(lua, x)?), UserData(x) => { if let Ok(x) = x.borrow::<self::SharedTable>() { Self::Table(x.clone()) } else { return Err("invalid table value".to_lua_err()); } } _ => return Err("invalid table value".to_lua_err()), }; Ok(result) } } impl<'a, 'lua> ToLua<'lua> for &'a SharedTableValue {
} impl<'lua> ToLua<'lua> for SharedTableValue { fn to_lua(self, lua: &'lua Lua) -> mlua::Result<mlua::Value<'lua>> { use SharedTableValue::*; let result = match self { Nil => mlua::Value::Nil, Boolean(x) => mlua::Value::Boolean(x), Integer(x) => mlua::Value::Integer(x), Number(x) => mlua::Value::Number(x), String(x) => mlua::Value::String(lua.create_string(&x)?), Table(x) => mlua::Value::UserData(lua.create_ser_userdata(x)?), }; Ok(result) } } impl PartialEq for SharedTableValue { fn eq(&self, other: &Self) -> bool { use SharedTableValue::*; match (self, other) { (Nil, Nil) => true, (Nil, _) => false, (Boolean(x), Boolean(y)) => x == y, (Boolean(_), _) => false, (Integer(x), Integer(y)) => x == y, (Integer(x), Number(y)) => *x as f64 == *y, (Integer(_), _) => false, (Number(x), Number(y)) => x == y, (Number(x), Integer(y)) => *x == *y as f64, (Number(_), _) => false, (String(x), String(y)) => x == y, (String(_), _) => false, (Table(x), Table(y)) => Arc::ptr_eq(&x.0, &y.0), (Table(_), _) => false, } } } #[derive(Clone, Serialize)] pub struct SharedTableKey(pub(super) SharedTableValue); #[derive(Debug, thiserror::Error)] #[error("invalid key")] pub struct InvalidKey(()); impl SharedTableKey { pub fn from_value(value: SharedTableValue) -> Result<Self, InvalidKey> { use SharedTableValue::*; match value { Nil => Err(InvalidKey(())), Table(_) => Err(InvalidKey(())), Number(x) if x.is_nan() => Err(InvalidKey(())), Number(x) => { let i = x as i64; if i as f64 == x { Ok(Self(Integer(i))) } else { Ok(Self(value)) } } _ => Ok(Self(value)), } } pub fn to_i64(&self) -> Option<i64> { if let SharedTableValue::Integer(i) = self.0 { Some(i) } else { None } } } impl Hash for SharedTableKey { fn hash<H: Hasher>(&self, state: &mut H) { use SharedTableValue::*; fn canonical_float_bytes(f: f64) -> [u8; 8] { assert!(!f.is_nan()); if f == 0.0 { 0.0f64.to_ne_bytes() } else { f.to_ne_bytes() } } match &self.0 { Boolean(x) => (0u8, x).hash(state), Integer(x) => (1u8, x).hash(state), Number(x) => (2u8, canonical_float_bytes(*x)).hash(state), String(x) => (3u8, x).hash(state), Nil => unreachable!(), Table(_) => unreachable!(), } } } impl PartialEq for SharedTableKey { fn eq(&self, other: &Self) -> bool { self.0 == other.0 } } impl Eq for SharedTableKey {} impl<'lua> FromLua<'lua> for SharedTableKey { fn from_lua(lua_value: mlua::Value<'lua>, lua: &'lua Lua) -> mlua::Result<Self> { Self::from_value(SharedTableValue::from_lua(lua_value, lua)?).to_lua_err() } } impl<'a, 'lua> ToLua<'lua> for &'a SharedTableKey { fn to_lua(self, lua: &'lua Lua) -> mlua::Result<mlua::Value<'lua>> { (&self.0).to_lua(lua) } } impl<'a, 'lua> ToLua<'lua> for SharedTableKey { fn to_lua(self, lua: &'lua Lua) -> mlua::Result<mlua::Value<'lua>> { self.0.to_lua(lua) } }
fn to_lua(self, lua: &'lua Lua) -> mlua::Result<mlua::Value<'lua>> { use SharedTableValue::*; let result = match self { Nil => mlua::Value::Nil, Boolean(x) => mlua::Value::Boolean(*x), Integer(x) => mlua::Value::Integer(*x), Number(x) => mlua::Value::Number(*x), String(x) => mlua::Value::String(lua.create_string(x)?), Table(x) => mlua::Value::UserData(lua.create_ser_userdata(x.clone())?), }; Ok(result) }
function_block-full_function
[ { "content": "pub fn apply_table_module_patch(lua: &Lua, table_module: Table) -> mlua::Result<()> {\n\n table_module.raw_set(\"dump\", create_fn_table_dump(lua)?)?;\n\n table_module.raw_set(\"insert\", create_fn_table_insert(lua)?)?;\n\n table_module.raw_set(\"scope\", create_fn_table_scope(lua)?)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 0, "score": 228337.55688656075 }, { "content": "pub fn destroy(lua: &Lua, context: Table) -> mlua::Result<()> {\n\n let code = mlua::chunk! {\n\n for _, v in ipairs($context) do\n\n pcall(function() local v2 <close> = v end)\n\n end\n\n };\n\n lua.load(code).set_name(\"_hive_destroy_context\")?.call(())\n\n}\n\n\n", "file_path": "hive-core/src/lua/context.rs", "rank": 1, "score": 227259.99632270564 }, { "content": "pub fn set_current(lua: &Lua, context: Option<Table>) -> mlua::Result<()> {\n\n lua.set_named_registry_value(\"_hive_current_context\", context)\n\n}\n\n\n", "file_path": "hive-core/src/lua/context.rs", "rank": 2, "score": 218503.2617998065 }, { "content": "pub fn register<'lua>(lua: &'lua Lua, value: impl ToLua<'lua>) -> mlua::Result<()> {\n\n let context: Table = lua.named_registry_value(\"_hive_current_context\")?;\n\n context.raw_insert(context.raw_len() + 1, value)\n\n}\n", "file_path": "hive-core/src/lua/context.rs", "rank": 3, "score": 210477.1728120459 }, { "content": "pub fn create_module_permission(lua: &Lua, permissions: Arc<PermissionSet>) -> mlua::Result<Table> {\n\n let permission_table = lua.create_table()?;\n\n permission_table.raw_set(\n\n \"check\",\n\n lua.create_function(move |_lua, perm: Permission| Ok(permissions.clone().check_ok(&perm)))?,\n\n )?;\n\n Ok(permission_table)\n\n}\n", "file_path": "hive-core/src/lua/permission.rs", "rank": 4, "score": 207887.4216152703 }, { "content": "pub fn create_fn_create_response(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|_lua, params: Table| {\n\n let body = params.raw_get::<_, LuaBody>(\"body\")?;\n\n let mut response = body.into_default_response();\n\n\n\n let status = params.raw_get::<_, Option<u16>>(\"status\")?;\n\n if let Some(x) = status {\n\n response.status = StatusCode::from_u16(x)\n\n .map_err(|_| format!(\"invalid status code: {x}\"))\n\n .to_lua_err()?;\n\n }\n\n\n\n let headers = params.raw_get::<_, Option<Table>>(\"headers\")?;\n\n if let Some(x) = headers {\n\n let mut headers = response.headers.borrow_mut();\n\n for f in x.pairs::<String, String>() {\n\n let (k, v) = f?;\n\n headers.insert(\n\n HeaderName::from_bytes(k.as_bytes())\n\n .map_err(|_| format!(\"invalid header value: {}\", k))\n", "file_path": "hive-core/src/lua/http/response.rs", "rank": 5, "score": 204169.61297372507 }, { "content": "pub fn create_fn_create_uri(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|_lua, s: mlua::String| {\n\n Ok(LuaUri(hyper::Uri::try_from(s.as_bytes()).to_lua_err()?))\n\n })\n\n}\n", "file_path": "hive-core/src/lua/http/uri.rs", "rank": 6, "score": 204169.61297372507 }, { "content": "pub fn create_preload_json(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, ()| {\n\n let json_table = lua.create_table()?;\n\n json_table.raw_set(\"parse\", create_fn_json_parse(lua)?)?;\n\n json_table.raw_set(\"stringify\", create_fn_json_stringify(lua)?)?;\n\n json_table.raw_set(\"array\", create_fn_json_array(lua)?)?;\n\n json_table.raw_set(\"undo_array\", create_fn_json_undo_array(lua)?)?;\n\n json_table.raw_set(\"array_metatable\", lua.array_metatable())?;\n\n Ok(json_table)\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/json.rs", "rank": 7, "score": 199118.86662742798 }, { "content": "pub fn create_preload_crypto(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, ()| {\n\n let crypto_table = lua.create_table()?;\n\n crypto_table.raw_set(\"thread_rng\", LuaRng(Box::new(thread_rng())))?;\n\n Ok(crypto_table)\n\n })\n\n}\n", "file_path": "hive-core/src/lua/crypto.rs", "rank": 8, "score": 199118.86662742798 }, { "content": "fn create_fn_table_dump(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, table: mlua::Value| match table {\n\n mlua::Value::Table(table) => Ok(table),\n\n mlua::Value::UserData(x) => {\n\n if let Ok(x) = x.borrow::<SharedTable>() {\n\n x.deep_dump(lua)\n\n } else if let Ok(x) = x.borrow::<SharedTableScope>() {\n\n x.deep_dump(lua)\n\n } else {\n\n Err(userdata_not_shared_table(\"dump\", 1))\n\n }\n\n }\n\n _ => Err(expected_table(\"dump\", 1, table.type_name())),\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 10, "score": 189773.0172278092 }, { "content": "fn create_fn_table_insert(lua: &Lua) -> mlua::Result<Function> {\n\n let old: Function = lua\n\n .globals()\n\n .raw_get_path(\"<global>\", &[\"table\", \"insert\"])?;\n\n let f = lua.create_function(\n\n |lua, (old, table, args): (Function, mlua::Value, MultiValue)| match table {\n\n mlua::Value::Table(table) => old.call::<_, ()>((table, args)),\n\n mlua::Value::UserData(table) => {\n\n let mut args = args.into_iter();\n\n match args.len() {\n\n 1 => table_insert_shared_2(lua, table, args.next().unwrap()),\n\n 2 => table_insert_shared_3(\n\n lua,\n\n table,\n\n lua.unpack(args.next().unwrap())?,\n\n args.next().unwrap(),\n\n ),\n\n _ => Err(\"wrong number of arguments\".to_lua_err()),\n\n }\n\n }\n\n _ => Err(format!(\"expected table or shared table, got {}\", table.type_name()).to_lua_err()),\n\n },\n\n )?;\n\n f.bind(old)\n\n}\n\n\n\n// Exceptions\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 11, "score": 189773.01722780918 }, { "content": "fn create_fn_table_scope(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_async_function(|lua, (table, f): (mlua::Value, Function)| async move {\n\n match table {\n\n mlua::Value::Table(table) => f.call_async(table).await,\n\n mlua::Value::UserData(x) => {\n\n if let Ok(x) = x.borrow::<SharedTable>() {\n\n let x = lua.create_ser_userdata(SharedTableScope::new(x.0.clone()))?;\n\n let result = f.call_async::<_, mlua::Value>(x.clone()).await;\n\n x.take::<SharedTableScope>()?;\n\n return result;\n\n }\n\n if x.borrow::<SharedTableScope>().is_ok() {\n\n f.call_async::<_, mlua::Value>(x).await\n\n } else {\n\n Err(userdata_not_shared_table(\"scope\", 1))\n\n }\n\n }\n\n _ => Err(expected_table(\"scope\", 1, table.type_name())),\n\n }\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 12, "score": 189773.0172278092 }, { "content": "pub fn create_fn_os_getenv(lua: &Lua, permissions: Arc<PermissionSet>) -> mlua::Result<Function> {\n\n lua.create_function(move |_lua, name: mlua::String| {\n\n let name = std::str::from_utf8(name.as_bytes()).to_lua_err()?;\n\n permissions.check(&Permission::Env {\n\n name: Cow::Borrowed(name),\n\n })?;\n\n std::env::var(name).to_lua_err()\n\n })\n\n}\n", "file_path": "hive-core/src/lua/env.rs", "rank": 13, "score": 189474.74835900118 }, { "content": "pub fn create_fn_print<'a>(lua: &'a Lua, service_name: &str) -> mlua::Result<Function<'a>> {\n\n let tostring: Function = lua.globals().raw_get(\"tostring\")?;\n\n let target = format!(\"service '{service_name}'\");\n\n let f = lua.create_function(move |_lua, (tostring, args): (Function, MultiValue)| {\n\n let s = args\n\n .into_iter()\n\n .try_fold(String::new(), |mut init, x| -> mlua::Result<_> {\n\n let string = tostring.call::<_, mlua::String>(x)?;\n\n let string = std::str::from_utf8(string.as_bytes()).to_lua_err()?;\n\n init.push_str(string);\n\n (0..8 - string.as_bytes().len() % 8).for_each(|_| init.push(' '));\n\n Ok(init)\n\n })?;\n\n info!(target: &target, \"{s}\");\n\n Ok(())\n\n })?;\n\n f.bind(tostring)\n\n}\n", "file_path": "hive-core/src/lua/print.rs", "rank": 14, "score": 188524.46290793052 }, { "content": "pub fn create_preload_http(lua: &Lua, permissions: Arc<PermissionSet>) -> mlua::Result<Function> {\n\n lua.create_function(move |lua, ()| {\n\n let http = lua.create_table()?;\n\n\n\n http.raw_set(\"request\", create_fn_request(lua, permissions.clone())?)?;\n\n http.raw_set(\"Response\", create_fn_create_response(lua)?)?;\n\n http.raw_set(\"Uri\", create_fn_create_uri(lua)?)?;\n\n\n\n Ok(http)\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/http/mod.rs", "rank": 15, "score": 179928.8587504177 }, { "content": "pub fn create_module_shared(lua: &Lua, service_name: Box<str>) -> mlua::Result<AnyUserData> {\n\n let shared = SHARED_STORE\n\n .entry(service_name)\n\n .or_insert(SharedTable::new())\n\n .clone();\n\n lua.create_ser_userdata(shared)\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 16, "score": 178073.45434328518 }, { "content": "fn table_insert_shared_2(lua: &Lua, table: AnyUserData, value: mlua::Value) -> mlua::Result<()> {\n\n let (borrowed, owned);\n\n let table = if let Ok(table) = table.borrow::<SharedTable>() {\n\n owned = SharedTableScope::new(table.0.clone());\n\n &owned\n\n } else if let Ok(table) = table.borrow::<SharedTableScope>() {\n\n borrowed = table;\n\n &borrowed\n\n } else {\n\n return Err(userdata_not_shared_table(\"insert\", 1));\n\n };\n\n\n\n table.push(lua.unpack(value)?);\n\n Ok(())\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 17, "score": 173074.69289214752 }, { "content": "fn create_fn_json_array(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, table: mlua::Value| {\n\n match &table {\n\n mlua::Value::Table(table) => table.set_metatable(Some(lua.array_metatable())),\n\n mlua::Value::UserData(table) => {\n\n let table = table.borrow_mut::<SharedTable>()?;\n\n table.set_array(true);\n\n }\n\n _ => return Err(\"expected table or shared table\".to_lua_err()),\n\n }\n\n Ok(table)\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/json.rs", "rank": 18, "score": 163609.1049708563 }, { "content": "fn create_fn_json_parse(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, string: mlua::String| {\n\n extract_error(lua, || {\n\n let result: serde_json::Value = serde_json::from_slice(string.as_bytes()).to_lua_err()?;\n\n lua.to_value(&result)\n\n })\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/json.rs", "rank": 19, "score": 163609.1049708563 }, { "content": "fn create_fn_json_stringify(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, (value, pretty): (mlua::Value, Option<bool>)| {\n\n extract_error(lua, || {\n\n let string = if pretty.unwrap_or_default() {\n\n serde_json::to_string_pretty(&value).to_lua_err()?\n\n } else {\n\n serde_json::to_string(&value).to_lua_err()?\n\n };\n\n Ok(string)\n\n })\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/json.rs", "rank": 20, "score": 163609.10497085628 }, { "content": "fn bind_local_env_to_shared(lua: &Lua, local_env: Table, shared: mlua::Value) -> Result<()> {\n\n let index = lua\n\n .create_function(\n\n |lua, (shared, _this, key): (SharedTable, Table, mlua::Value)| {\n\n if let Ok(key) = lua.unpack::<SharedTableKey>(key) {\n\n lua.pack(&*shared.get(key))\n\n } else {\n\n Ok(mlua::Value::Nil)\n\n }\n\n },\n\n )?\n\n .bind(shared.clone())?;\n\n\n\n let newindex = lua\n\n .create_function(\n\n |lua, (shared, this, key, value): (SharedTable, Table, mlua::Value, mlua::Value)| {\n\n if let (Ok(key), Ok(value)) = (\n\n lua.unpack::<SharedTableKey>(key.clone()),\n\n lua.unpack::<SharedTableValue>(value.clone()),\n\n ) {\n", "file_path": "hive-core/src/lua/sandbox/local_env.rs", "rank": 21, "score": 163422.49489051377 }, { "content": "fn create_fn_json_undo_array(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, table: mlua::Value| {\n\n match &table {\n\n mlua::Value::Table(table) => {\n\n if table\n\n .get_metatable()\n\n .map(|x| x == lua.array_metatable())\n\n .unwrap_or(false)\n\n {\n\n table.set_metatable(None);\n\n }\n\n }\n\n mlua::Value::UserData(table) => {\n\n let table = table.borrow_mut::<SharedTable>()?;\n\n table.set_array(false);\n\n }\n\n _ => return Err(\"expected table or shared table\".to_lua_err()),\n\n }\n\n Ok(table)\n\n })\n\n}\n", "file_path": "hive-core/src/lua/json.rs", "rank": 22, "score": 162042.54092578645 }, { "content": "fn create_fn_current_worker(lua: &Lua) -> mlua::Result<Function> {\n\n lua.create_function(|lua, ()| std::thread::current().name().to_lua(lua))\n\n}\n", "file_path": "hive-core/src/lua/sandbox/global_env.rs", "rank": 23, "score": 160526.52482890955 }, { "content": "pub fn json_response(status: StatusCode, body: impl Serialize) -> Result<Response<Body>> {\n\n Ok(json_response_raw(status, body))\n\n}\n\n\n", "file_path": "hive-server/src/util.rs", "rank": 24, "score": 153164.4141454707 }, { "content": "pub fn serialize_arc<S: Serializer>(arc: &Arc<impl Serialize>, ser: S) -> Result<S::Ok, S::Error> {\n\n arc.as_ref().serialize(ser)\n\n}\n", "file_path": "hive-core/src/util.rs", "rank": 25, "score": 151946.10110890042 }, { "content": "fn create_fn_request(lua: &Lua, permissions: Arc<PermissionSet>) -> mlua::Result<Function> {\n\n lua.create_async_function(move |lua, req: LuaRequest| {\n\n let permissions = permissions.clone();\n\n extract_error_async(lua, async move {\n\n if let Some(auth) = req.uri.authority() {\n\n let host = auth.host();\n\n let port = (auth.port())\n\n .map(|x| NonZeroU16::new(x.as_u16()).ok_or(\"port is zero\"))\n\n .unwrap_or_else(|| {\n\n (req.uri.scheme())\n\n .map(|x| match x.as_str() {\n\n \"https\" => nonzero!(443u16),\n\n _ => nonzero!(80u16),\n\n })\n\n .ok_or(\"no URI scheme specified\")\n\n })\n\n .to_lua_err()?;\n\n permissions.check(&Permission::Net {\n\n host: host.into(),\n\n port,\n", "file_path": "hive-core/src/lua/http/mod.rs", "rank": 26, "score": 150689.04665649403 }, { "content": "fn userdata_not_shared_table(fn_name: &'static str, pos: u8) -> mlua::Error {\n\n BadArgument::new(fn_name, pos, \"failed to borrow userdata as shared table\").into()\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 27, "score": 146293.66092882384 }, { "content": "fn expected_table(fn_name: &'static str, pos: u8, found: &str) -> mlua::Error {\n\n BadArgument::new(\n\n fn_name,\n\n pos,\n\n format!(\"expected table or shared table, found {found}\"),\n\n )\n\n .into()\n\n}\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 28, "score": 141379.23958911654 }, { "content": "fn get_context_table<'lua>(\n\n sandbox: &'lua Rc<Sandbox>,\n\n context: &Option<RegistryKey>,\n\n) -> mlua::Result<Option<Table<'lua>>> {\n\n context\n\n .as_ref()\n\n .map(|x| sandbox.lua.registry_value(x))\n\n .transpose()\n\n}\n\n\n\nimpl Future for TaskFuture {\n\n type Output = mlua::Result<()>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut std::task::Context<'_>) -> Poll<Self::Output> {\n\n let this = self.project();\n\n let tx = if let Some(tx) = this.tx.take() {\n\n tx\n\n } else {\n\n return Poll::Ready(Ok(()));\n\n };\n", "file_path": "hive-core/src/task/task_future.rs", "rank": 29, "score": 133746.66304941932 }, { "content": "fn len(x: &SharedTableRepr) -> i64 {\n\n x.int.iter().last().map(|x| 0.max(*x.0)).unwrap_or(0)\n\n}\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 30, "score": 128732.19351568309 }, { "content": "pub trait LuaTableExt<'a> {\n\n fn raw_get_path<T: FromLua<'a>>(&self, base: &str, path: &[&str]) -> Result<T>;\n\n}\n\n\n", "file_path": "hive-core/src/lua/mod.rs", "rank": 31, "score": 123529.20763189746 }, { "content": "fn out_of_bounds(fn_name: &'static str, pos: u8) -> mlua::Error {\n\n BadArgument::new(fn_name, pos, \"out of bounds\").into()\n\n}\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 32, "score": 122938.57884640859 }, { "content": "pub fn remove_service_shared_stores(service_name: &str) {\n\n SHARED_STORE.retain(|k, _| k.as_ref() != service_name);\n\n}\n\n\n\n#[derive(Clone, Default)]\n\npub struct SharedTable(Arc<RwLock<SharedTableRepr>>);\n\n\n\nimpl SharedTable {\n\n pub fn new() -> Self {\n\n Default::default()\n\n }\n\n\n\n pub fn from_lua_table(lua: &Lua, table: Table) -> mlua::Result<Self> {\n\n let mut int = BTreeMap::new();\n\n let mut hash = HashMap::new();\n\n for kv in table.clone().pairs::<SharedTableKey, SharedTableValue>() {\n\n let (k, v) = kv?;\n\n if let Some(i) = k.to_i64() {\n\n int.insert(i, v);\n\n } else {\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 33, "score": 121183.30943861161 }, { "content": "fn scheme_not_supported<T>(scheme: &str) -> mlua::Result<T> {\n\n Err(format!(\"scheme currently not supported: {scheme}\").to_lua_err())\n\n}\n\n\n\npub async fn remove_service_local_storage(state: &HiveState, service_name: &str) -> Result<()> {\n\n let path = state.local_storage_path.join(service_name);\n\n Ok(tokio::fs::remove_dir_all(path).await?)\n\n}\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 34, "score": 115508.11709238545 }, { "content": "fn table_insert_shared_3(\n\n lua: &Lua,\n\n table: AnyUserData,\n\n pos: i64,\n\n value: mlua::Value,\n\n) -> mlua::Result<()> {\n\n if pos < 1 {\n\n return Err(out_of_bounds(\"insert\", 2));\n\n }\n\n let (borrowed, owned);\n\n let table = if let Ok(table) = table.borrow::<SharedTable>() {\n\n owned = SharedTableScope::new(table.0.clone());\n\n &owned\n\n } else if let Ok(table) = table.borrow::<SharedTableScope>() {\n\n borrowed = table;\n\n &borrowed\n\n } else {\n\n return Err(userdata_not_shared_table(\"insert\", 1));\n\n };\n\n\n", "file_path": "hive-core/src/lua/shared/patch.rs", "rank": 35, "score": 112695.31885035167 }, { "content": "pub fn json_response_raw(status: StatusCode, body: impl Serialize) -> Response<Body> {\n\n Response::builder()\n\n .status(status)\n\n .header(\"content-type\", \"application/json\")\n\n .body(serde_json::to_string(&body).unwrap().into())\n\n .unwrap()\n\n}\n\n\n\n/// Taken from `tokio::fs`\n\npub async fn asyncify<F, T, E>(f: F) -> Result<T>\n\nwhere\n\n F: FnOnce() -> Result<T, E> + Send + 'static,\n\n T: Send + 'static,\n\n E: Send + 'static,\n\n crate::Error: From<E>,\n\n{\n\n match spawn_blocking(f).await {\n\n Ok(res) => res.map_err(From::from),\n\n Err(_) => Err(io::Error::new(io::ErrorKind::Other, \"background task failed\").into()),\n\n }\n", "file_path": "hive-server/src/util.rs", "rank": 36, "score": 112258.71154228893 }, { "content": "fn main() -> anyhow::Result<()> {\n\n tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .worker_threads(*HALF_NUM_CPUS)\n\n .build()\n\n .unwrap()\n\n .block_on(run())\n\n}\n\n\n\n#[cfg(unix)]\n\nasync fn shutdown_signal() {\n\n use tokio::select;\n\n use tokio::signal::unix::{signal, SignalKind};\n\n\n\n let mut sigint = signal(SignalKind::interrupt()).unwrap();\n\n let mut sigterm = signal(SignalKind::terminate()).unwrap();\n\n\n\n let signal = select! {\n\n _ = sigint.recv() => \"SIGINT\",\n\n _ = sigterm.recv() => \"SIGTERM\",\n", "file_path": "hive-server/src/main.rs", "rank": 37, "score": 106656.68721645922 }, { "content": "fn parse_path<'a>(path: &'a mlua::String<'a>) -> mlua::Result<(&'a str, &'a str)> {\n\n let path = std::str::from_utf8(path.as_bytes()).to_lua_err()?;\n\n Ok(path.split_once(':').unwrap_or((\"local\", path)))\n\n}\n\n\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 38, "score": 106326.11430391486 }, { "content": "fn raw_get_path<'a, T: FromLua<'a>>(\n\n table: &Table<'a>,\n\n base: &mut String,\n\n path: &[&str],\n\n) -> mlua::Result<T> {\n\n base.extend([\".\", path[0]]);\n\n if path.len() == 1 {\n\n Ok(table.raw_get(path[0])?)\n\n } else {\n\n raw_get_path(&table.raw_get::<_, Table>(path[0])?, base, &path[1..])\n\n }\n\n}\n\n\n\nimpl<'a> LuaTableExt<'a> for Table<'a> {\n\n fn raw_get_path<T: FromLua<'a>>(&self, base: &str, path: &[&str]) -> Result<T> {\n\n let mut base = base.into();\n\n let result = raw_get_path(self, &mut base, path).map_err(|mut error| {\n\n if let mlua::Error::FromLuaConversionError { message, .. } = &mut error {\n\n *message = Some(base);\n\n }\n", "file_path": "hive-core/src/lua/mod.rs", "rank": 39, "score": 105640.27396857919 }, { "content": "fn serialize_error<E, S>(error: E, ser: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n E: std::error::Error,\n\n S: Serializer,\n\n{\n\n json!({ \"msg\": error.to_string() }).serialize(ser)\n\n}\n\n\n\nimpl ErrorKind {\n\n pub fn status(&self) -> StatusCode {\n\n match self {\n\n Self::LuaCustom { status, .. } => *status,\n\n _ => self.get_str(\"status\").unwrap().parse().unwrap(),\n\n }\n\n }\n\n\n\n pub fn error(&self) -> &str {\n\n match self {\n\n Self::LuaCustom { error, .. } => error,\n\n _ => self.get_str(\"error\").unwrap(),\n", "file_path": "hive-core/src/error.rs", "rank": 40, "score": 105506.88279217947 }, { "content": "fn serialize_error<E, S>(error: E, ser: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n E: std::error::Error,\n\n S: Serializer,\n\n{\n\n json!({ \"msg\": error.to_string() }).serialize(ser)\n\n}\n\n\n\nimpl ErrorKind {\n\n pub fn status(&self) -> StatusCode {\n\n match self {\n\n ErrorKind::Hive(error) => error.kind().status(),\n\n ErrorKind::Custom { status, .. } => *status,\n\n _ => self.get_str(\"status\").unwrap().parse().unwrap(),\n\n }\n\n }\n\n\n\n pub fn internal(&self) -> bool {\n\n match self {\n\n ErrorKind::Hive(error) => error.kind().internal(),\n\n _ => self.status().is_server_error(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "hive-server/src/error.rs", "rank": 41, "score": 105506.88279217947 }, { "content": "/// Similar to `hive_core::path::normalize_path`, but for `str`s instead of\n\n/// `Path`s.\n\n///\n\n/// The returned path is always relative, which is intentional and convenient\n\n/// for concatenating to other paths in usual cases.\n\npub fn normalize_path_str(path: &str) -> String {\n\n let mut result = Vec::new();\n\n let segments = path\n\n .split(['/', '\\\\'])\n\n .filter(|&x| !x.is_empty() && x != \".\");\n\n for s in segments {\n\n if s == \"..\" {\n\n result.pop();\n\n } else {\n\n result.push(s);\n\n }\n\n }\n\n result.join(\"/\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::ffi::OsString;\n\n use test_case::test_case;\n", "file_path": "hive-core/src/path.rs", "rank": 42, "score": 97967.1839097366 }, { "content": "fn list(state: &MainState) -> Result<Response<Body>> {\n\n let services = state.hive.list_services().collect::<Vec<_>>();\n\n let services = (services.iter()).map(Service::upgrade).collect::<Vec<_>>();\n\n json_response(StatusCode::OK, services)\n\n}\n\n\n", "file_path": "hive-server/src/handle/mod.rs", "rank": 43, "score": 92836.56252736879 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\nenum ReadMode {\n\n All,\n\n Exact(u64),\n\n Line,\n\n LineWithDelimiter,\n\n // Numeral,\n\n}\n\n\n\nimpl ReadMode {\n\n fn from_lua(mode: mlua::Value) -> mlua::Result<Self> {\n\n match mode {\n\n mlua::Value::Integer(i) => {\n\n if i > 0 {\n\n return Ok(Self::Exact(i as _));\n\n }\n\n }\n\n mlua::Value::String(s) => match s.as_bytes() {\n\n b\"a\" => return Ok(Self::All),\n\n b\"l\" => return Ok(Self::Line),\n\n b\"L\" => return Ok(Self::LineWithDelimiter),\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 44, "score": 92079.89254512389 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\nenum OpenMode {\n\n Read,\n\n Write,\n\n Append,\n\n ReadWrite,\n\n ReadWriteNew,\n\n ReadAppend,\n\n}\n\n\n\nimpl OpenMode {\n\n fn from_lua(mode: Option<mlua::String>) -> mlua::Result<Self> {\n\n use OpenMode::*;\n\n if let Some(mode) = mode {\n\n let result = match mode.as_bytes() {\n\n b\"r\" => Read,\n\n b\"w\" => Write,\n\n b\"a\" => Append,\n\n b\"r+\" => ReadWrite,\n\n b\"w+\" => ReadWriteNew,\n\n b\"a+\" => ReadAppend,\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 45, "score": 92079.89254512389 }, { "content": "/// Taken from [Cargo](https://github.com/rust-lang/cargo/blob/af307a38c20a753ec60f0ad18be5abed3db3c9ac/src/cargo/util/paths.rs#L60-L85),\n\n/// and modified to force absolute path.\n\npub fn normalize_path(path: impl AsRef<Path>) -> PathBuf {\n\n let mut components = path.as_ref().components().peekable();\n\n let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().cloned() {\n\n components.next();\n\n PathBuf::from(c.as_os_str())\n\n } else {\n\n PathBuf::new()\n\n };\n\n ret.push(\"/\");\n\n\n\n for component in components {\n\n match component {\n\n Component::Prefix(..) => unreachable!(),\n\n Component::RootDir => {\n\n ret.push(component.as_os_str());\n\n }\n\n Component::CurDir => {}\n\n Component::ParentDir => {\n\n ret.pop();\n\n }\n\n Component::Normal(c) => {\n\n ret.push(c);\n\n }\n\n }\n\n }\n\n ret\n\n}\n\n\n", "file_path": "hive-core/src/path.rs", "rank": 46, "score": 91592.11667987864 }, { "content": "pub fn method_not_allowed(expected: &[&'static str], got: &Method) -> Error {\n\n From::from((\n\n 405,\n\n \"method not allowed\",\n\n json!({ \"expected\": expected, \"got\": got.as_str() }),\n\n ))\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub struct ErrorAuthWrapper {\n\n inner: Error,\n\n uuid: Option<Uuid>,\n\n}\n\n\n\nimpl ErrorAuthWrapper {\n\n pub fn new(auth: bool, error: impl Into<Error>) -> Self {\n\n let inner = error.into();\n\n let uuid = if !auth && inner.kind.internal() {\n\n Some(Uuid::new_v4())\n\n } else {\n", "file_path": "hive-server/src/error.rs", "rank": 47, "score": 90019.15465462428 }, { "content": "fn create_fn_fs_mkdir(\n\n lua: &Lua,\n\n local_storage_path: Arc<Path>,\n\n permissions: Arc<PermissionSet>,\n\n) -> mlua::Result<Function> {\n\n lua.create_async_function(move |lua, (path, all): (mlua::String, bool)| {\n\n let local_storage_path = local_storage_path.clone();\n\n let permissions = permissions.clone();\n\n extract_error_async(lua, async move {\n\n let (scheme, path) = parse_path(&path)?;\n\n\n\n let path: Cow<Path> = match scheme {\n\n \"local\" => local_storage_path.join(normalize_path_str(path)).into(),\n\n \"external\" => {\n\n permissions.check(&Permission::Write {\n\n path: Cow::Borrowed(Path::new(path)),\n\n })?;\n\n Path::new(path).into()\n\n }\n\n \"source\" => return Err(\"cannot modify service source\".to_lua_err()),\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 48, "score": 89656.26677038806 }, { "content": "fn create_fn_fs_open(\n\n lua: &Lua,\n\n source: Source,\n\n local_storage_path: Arc<Path>,\n\n permissions: Arc<PermissionSet>,\n\n) -> mlua::Result<Function<'_>> {\n\n lua.create_async_function(\n\n move |lua, (path, mode): (mlua::String, Option<mlua::String>)| {\n\n use OpenMode::*;\n\n let source = source.clone();\n\n let local_storage_path = local_storage_path.clone();\n\n let permissions = permissions.clone();\n\n async move {\n\n let (scheme, path) = parse_path(&path)?;\n\n let mode = OpenMode::from_lua(mode)?;\n\n extract_error_async(lua, async {\n\n let file = match scheme {\n\n \"local\" => {\n\n let path = normalize_path_str(path);\n\n GenericFile::File(\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 49, "score": 89656.26677038806 }, { "content": "fn create_fn_fs_remove(\n\n lua: &Lua,\n\n local_storage_path: Arc<Path>,\n\n permissions: Arc<PermissionSet>,\n\n) -> mlua::Result<Function> {\n\n lua.create_async_function(move |lua, (path, all): (mlua::String, bool)| {\n\n let local_storage_path = local_storage_path.clone();\n\n let permissions = permissions.clone();\n\n extract_error_async(lua, async move {\n\n let (scheme, path) = parse_path(&path)?;\n\n\n\n let path: Cow<Path> = match scheme {\n\n \"local\" => local_storage_path.join(normalize_path_str(path)).into(),\n\n \"external\" => {\n\n let path: Cow<_> = Path::new(path).into();\n\n permissions.check(&Permission::Write { path: path.clone() })?;\n\n path\n\n }\n\n \"source\" => return Err(\"cannot modify service source\".to_lua_err()),\n\n _ => return scheme_not_supported(scheme),\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 50, "score": 89656.26677038806 }, { "content": "fn get(state: &MainState, name: &str) -> Result<Response<Body>> {\n\n let service = state.hive.get_service(name)?;\n\n json_response(StatusCode::OK, service.upgrade())\n\n}\n\n\n\nasync fn start_stop(state: &MainState, name: &str, query: &str) -> Result<Response<Body>> {\n\n #[derive(Deserialize)]\n\n struct Query {\n\n op: Operation,\n\n }\n\n\n\n #[derive(Deserialize)]\n\n enum Operation {\n\n #[serde(rename = \"start\")]\n\n Start,\n\n #[serde(rename = \"stop\")]\n\n Stop,\n\n }\n\n\n\n let Query { op } = serde_qs::from_str(query)?;\n", "file_path": "hive-server/src/handle/mod.rs", "rank": 51, "score": 87084.23581890587 }, { "content": "fn parse_multipart(headers: &HeaderMap, body: Body) -> Result<Multipart<'static>> {\n\n let allowed_fields = vec![\"single\", \"multi\", \"config\"];\n\n let size_limit = SizeLimit::new()\n\n .for_field(\"single\", 1024u64.pow(2) * 5)\n\n .for_field(\"multi\", 1024u64.pow(2) * 100)\n\n .for_field(\"config\", 1024u64.pow(2) * 5);\n\n\n\n let content_type = headers\n\n .get(\"content-type\")\n\n .ok_or(\"no Content-Type given\")?\n\n .to_str()\n\n .or(Err(\"Content-Type is not valid UTF-8\"))?;\n\n let boundary = multer::parse_boundary(content_type)?;\n\n let constraints = Constraints::new()\n\n .allowed_fields(allowed_fields)\n\n .size_limit(size_limit);\n\n Ok(Multipart::with_constraints(body, boundary, constraints))\n\n}\n\n\n\nasync fn read_single<'a>(\n", "file_path": "hive-server/src/handle/upload.rs", "rank": 52, "score": 85873.6522208157 }, { "content": "fn _create_preload_fs(\n\n lua: &Lua,\n\n local_storage_path: Arc<Path>,\n\n source: Source,\n\n permissions: Arc<PermissionSet>,\n\n) -> mlua::Result<Function<'_>> {\n\n lua.create_function(move |lua, ()| {\n\n let fs_table = lua.create_table()?;\n\n fs_table.raw_set(\n\n \"open\",\n\n create_fn_fs_open(\n\n lua,\n\n source.clone(),\n\n local_storage_path.clone(),\n\n permissions.clone(),\n\n )?,\n\n )?;\n\n fs_table.raw_set(\n\n \"mkdir\",\n\n create_fn_fs_mkdir(lua, local_storage_path.clone(), permissions.clone())?,\n\n )?;\n\n fs_table.raw_set(\n\n \"remove\",\n\n create_fn_fs_remove(lua, local_storage_path.clone(), permissions.clone())?,\n\n )?;\n\n Ok(fs_table)\n\n })\n\n}\n\n\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 53, "score": 82332.06586991044 }, { "content": "type Result<T, E = Error> = std::result::Result<T, E>;\n\n\n\npub(crate) struct MainState {\n\n hive: Hive,\n\n hive_path: PathBuf,\n\n auth_token: Option<Uuid>,\n\n}\n\n\n\nasync fn run() -> anyhow::Result<()> {\n\n if option_env!(\"RUST_LOG\").is_none() {\n\n std::env::set_var(\"RUST_LOG\", \"INFO\");\n\n }\n\n pretty_env_logger::init();\n\n info!(\"Starting hive-server v{}\", env!(\"CARGO_PKG_VERSION\"));\n\n\n\n let Args { config, hive_path } = Args::parse();\n\n\n\n info!(\"Hive working path: {}\", hive_path.display().underline());\n\n let local_storage_path = init_paths(&hive_path).await;\n\n\n", "file_path": "hive-server/src/main.rs", "rank": 54, "score": 68525.33698001315 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]\n\nenum UploadMode {\n\n #[serde(rename = \"create\")]\n\n Create,\n\n #[serde(rename = \"hot\")]\n\n Hot,\n\n #[serde(rename = \"cold\")]\n\n Cold,\n\n #[serde(rename = \"load\")]\n\n Load,\n\n}\n\n\n\nimpl Default for UploadMode {\n\n fn default() -> Self {\n\n Self::Hot\n\n }\n\n}\n\n\n", "file_path": "hive-server/src/handle/upload.rs", "rank": 55, "score": 62386.473364677724 }, { "content": "#[derive(Default)]\n\nstruct SharedTableRepr {\n\n int: BTreeMap<i64, SharedTableValue>,\n\n hash: HashMap<SharedTableKey, SharedTableValue>,\n\n array: bool,\n\n}\n\n\n\nimpl SharedTableRepr {\n\n fn get(&self, key: SharedTableKey) -> &SharedTableValue {\n\n const CONST_NIL: SharedTableValue = SharedTableValue::Nil;\n\n\n\n (key.to_i64())\n\n .map(|i| self.int.get(&i))\n\n .unwrap_or_else(|| self.hash.get(&key))\n\n .unwrap_or(&CONST_NIL)\n\n }\n\n\n\n fn set(&mut self, key: SharedTableKey, value: SharedTableValue) -> SharedTableValue {\n\n if let Some(i) = key.to_i64() {\n\n self.int.insert(i, value)\n\n } else {\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 56, "score": 60098.307388780755 }, { "content": "enum StoppedServiceInner<'a> {\n\n Ref(Ref<'a, ServiceName, ServiceState>),\n\n RefMulti(RefMulti<'a, ServiceName, ServiceState>),\n\n}\n\n\n\nimpl<'a> StoppedService<'a> {\n\n pub(super) fn from_ref(x: Ref<'a, ServiceName, ServiceState>) -> Self {\n\n assert!(matches!(x.value(), ServiceState::Stopped(_)));\n\n Self(StoppedServiceInner::Ref(x))\n\n }\n\n\n\n pub(super) fn from_ref_multi(x: RefMulti<'a, ServiceName, ServiceState>) -> Self {\n\n assert!(matches!(x.value(), ServiceState::Stopped(_)));\n\n Self(StoppedServiceInner::RefMulti(x))\n\n }\n\n}\n\n\n\nimpl Deref for StoppedService<'_> {\n\n type Target = ServiceImpl;\n\n\n", "file_path": "hive-core/src/service/impls.rs", "rank": 57, "score": 59710.498708259525 }, { "content": "struct SharedTableScope(RefCell<ArcRwLockWriteGuard<RawRwLock, SharedTableRepr>>);\n\n\n\nimpl SharedTableScope {\n\n fn new(x: Arc<RwLock<SharedTableRepr>>) -> Self {\n\n Self(RefCell::new(x.write_arc()))\n\n }\n\n\n\n fn push(&self, value: SharedTableValue) {\n\n let mut wl = self.0.borrow_mut();\n\n let pos = len(&wl) + 1;\n\n wl.set(SharedTableKey(SharedTableValue::Integer(pos)), value);\n\n }\n\n\n\n fn deep_dump<'lua>(&self, lua: &'lua Lua) -> mlua::Result<Table<'lua>> {\n\n let guard = self.0.borrow();\n\n guard._deep_dump(\n\n lua,\n\n Arc::as_ptr(ArcRwLockWriteGuard::rwlock(&guard)) as _,\n\n &mut HashMap::new(),\n\n )\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 58, "score": 55175.349786513994 }, { "content": "type SharedStore = Arc<DashMap<Box<str>, SharedTable>>;\n\n\n\nstatic SHARED_STORE: Lazy<SharedStore> = Lazy::new(|| Arc::new(DashMap::new()));\n\n\n", "file_path": "hive-core/src/lua/shared/mod.rs", "rank": 59, "score": 52269.67989410942 }, { "content": "fn get_default_hive_path() -> PathBuf {\n\n let mut hive_path = home::home_dir().expect(\"no home directory found\");\n\n hive_path.push(\".hive\");\n\n hive_path\n\n}\n\n\n\n#[derive(Debug, Clone, Parser)]\n\n#[clap(author, version, about)]\n\npub struct ConfigArgs {\n\n /// Listening address [overrides config]\n\n #[clap(short, long)]\n\n pub listen: Option<SocketAddr>,\n\n\n\n /// Authentication token [overrides config]\n\n #[clap(long)]\n\n pub auth_token: Option<Uuid>,\n\n\n\n /// Hive executor pool size [overrides config]\n\n #[clap(long)]\n\n pub pool_size: Option<usize>,\n", "file_path": "hive-server/src/config.rs", "rank": 60, "score": 50482.897427370684 }, { "content": " },\n\n __call = function(self, req)\n\n local handler = self[\"$\" .. req.method]\n\n local any = self[\"$_\"]\n\n if type(handler) == \"function\" then\n\n return handler(req)\n\n elseif type(any) == \"function\" then\n\n return any(req)\n\n else\n\n local allowed_methods = {}\n\n for k, _ in pairs(self) do\n\n if k:sub(1, 1) == \"$\" then\n\n allowed_methods[#allowed_methods + 1] = k:sub(2)\n\n end\n\n end\n\n\n\n error {\n\n status = 405,\n\n error = \"method not allowed\",\n\n detail = {\n", "file_path": "hive-core/src/lua/routing.lua", "rank": 61, "score": 38698.75902974032 }, { "content": " allowed_methods = allowed_methods\n\n }\n\n }\n\n end\n\n end,\n\n}\n\n\n\nlocal function init_method_route(method, handler)\n\n return setmetatable({\n\n [\"$\" .. method] = handler\n\n }, mt)\n\nend\n\n\n\nlocal routing = {\n\n any = bind_one(init_method_route, \"_\"),\n\n}\n\n\n\nfor _, v in ipairs(methods) do\n\n local method_upper = v:upper()\n\n mt.__index[v] = bind_one(add_method_route, method_upper)\n\n routing[v] = bind_one(init_method_route, method_upper)\n\nend\n\n\n\nreturn routing\n", "file_path": "hive-core/src/lua/routing.lua", "rank": 62, "score": 38698.75902974032 }, { "content": "local methods = {\n\n \"get\", \"post\", \"put\",\n\n \"patch\", \"head\", \"delete\",\n\n \"trace\",\n\n}\n\n\n\nlocal function bind_one(f, arg)\n\n return function(...)\n\n return f(arg, ...)\n\n end\n\nend\n\n\n\nlocal function add_method_route(method, self, handler)\n\n self[\"$\" .. method] = handler\n\n return self\n\nend\n\n\n\nlocal mt = {\n\n __index = {\n\n any = bind_one(add_method_route, \"_\"),\n", "file_path": "hive-core/src/lua/routing.lua", "rank": 63, "score": 38698.75902974032 }, { "content": " modname_type == \"string\",\n\n \"bad argument #1 to 'require' (string expected, got \" .. modname_type .. \")\"\n\n )\n\n\n\n local package = internal.package;\n\n local error_msgs = {}\n\n if package.loaded[modname] then\n\n return table.unpack(package.loaded[modname])\n\n else\n\n for _, searcher in ipairs(package.searchers) do\n\n local loader, data = searcher(modname)\n\n if loader then\n\n local result = { loader(modname, data) }\n\n package.loaded[modname] = result\n\n return table.unpack(result)\n\n else\n\n table.insert(error_msgs, data)\n\n end\n\n end\n\n end\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 64, "score": 37425.07664842874 }, { "content": "-- Fields with `nil` should be initialized in Rust\n\n\n\n-- Internal --\n\n\n\nlocal internal = {\n\n paths = {},\n\n sealed = false,\n\n source = nil,\n\n package = {\n\n loaded = {},\n\n preload = {},\n\n searchers = nil,\n\n },\n\n}\n\n\n\n-- Hive table --\n\n\n\nlocal function register(path, handler)\n\n assert(\n\n not internal.sealed,\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 65, "score": 37425.07129452725 }, { "content": " local loader = preload[modname]\n\n if loader then\n\n return loader, \"<preload>\"\n\n else\n\n return nil, \"preload '\" .. modname .. \"' not found\"\n\n end\n\nend\n\n\n\nlocal function source_searcher(modname)\n\n local source = internal.source\n\n local path = \"\"\n\n for str in string.gmatch(modname, \"([^%.]+)\") do\n\n path = path .. \"/\" .. str\n\n end\n\n\n\n local file_exists = source:exists(path .. \".lua\")\n\n local init_exists = source:exists(path .. \"/init.lua\")\n\n\n\n if file_exists and init_exists then\n\n return nil, \"file '@source:\" .. path .. \".lua' and '@source:\" .. path .. \"/init.lua' conflicts\"\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 66, "score": 37424.760218191404 }, { "content": " \"cannot call `hive.register` from places other than the top level of `main.lua`\"\n\n )\n\n local type_handler = type(handler)\n\n if type_handler ~= \"function\" then\n\n if type_handler == \"table\" then\n\n local mt = getmetatable(handler)\n\n if type(mt) == \"table\" and type(mt.__call) == \"function\" then\n\n goto ok\n\n end\n\n end\n\n error \"handler must either be a function or a callable table\"\n\n end\n\n\n\n ::ok::\n\n table.insert(internal.paths, { path, handler })\n\nend\n\n\n\nlocal function require(modname)\n\n local modname_type = type(modname)\n\n assert(\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 67, "score": 37424.10985827191 }, { "content": " error(\"module '\" .. modname .. \"' not found:\\n\\t\" .. table.concat(error_msgs, \"\\n\"))\n\nend\n\n\n\n-- Local env --\n\n\n\nlocal local_env = {\n\n hive = {\n\n register = register,\n\n context = nil,\n\n permission = nil,\n\n current_worker = current_worker,\n\n Error = hive_Error,\n\n },\n\n require = require,\n\n}\n\n\n\n-- Searchers --\n\n\n\nlocal preload = internal.package.preload\n\nlocal function preload_searcher(modname)\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 68, "score": 37424.09258577359 }, { "content": " elseif not file_exists and not init_exists then\n\n return nil, \"no file '@source:\" .. path .. \".lua'\\n\\tno file 'source:\" .. path .. \"/init.lua'\"\n\n else\n\n path = path .. (file_exists and \".lua\" or \"/init.lua\")\n\n local function source_loader(modname, path)\n\n return source:load(path, local_env)()\n\n end\n\n return source_loader, path\n\n end\n\nend\n\n\n\ninternal.package.searchers = { preload_searcher, source_searcher }\n\n\n\n-- Standard library whitelist --\n\n\n\nlocal whitelist = {\n\n [false] = {\n\n \"assert\", \"ipairs\", \"next\", \"pairs\",\n\n \"pcall\", \"print\", \"rawequal\", \"select\",\n\n \"setmetatable\", \"tonumber\", \"tostring\", \"type\",\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 69, "score": 37423.79746418141 }, { "content": " table = {\n\n \"remove\", \"sort\", \"concat\", \"pack\",\n\n \"unpack\",\n\n },\n\n coroutine = {\n\n \"close\", \"create\", \"isyieldable\", \"resume\",\n\n \"running\", \"status\", \"wrap\", \"yield\",\n\n },\n\n}\n\n\n\nlocal monkey_patch = {\n\n [false] = {\n\n \"error\",\n\n },\n\n table = {\n\n \"insert\", \"dump\", \"scope\"\n\n },\n\n routing = \"*\"\n\n}\n\n\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 70, "score": 37421.526325766106 }, { "content": " \"warn\", \"xpcall\", \"_VERSION\",\n\n },\n\n math = {\n\n \"abs\", \"acos\", \"asin\", \"atan\",\n\n \"atan2\", \"ceil\", \"cos\", \"deg\",\n\n \"exp\", \"floor\", \"fmod\", \"frexp\",\n\n \"huge\", \"ldexp\", \"log\", \"log10\",\n\n \"max\", \"maxinteger\", \"min\", \"mininteger\",\n\n \"modf\", \"pi\", \"pow\", \"rad\", \"random\",\n\n \"sin\", \"sinh\", \"sqrt\", \"tan\",\n\n \"tanh\", \"tointeger\", \"type\", \"ult\",\n\n },\n\n os = {\n\n \"clock\", \"difftime\", \"time\",\n\n },\n\n string = {\n\n \"byte\", \"char\", \"find\", \"format\",\n\n \"gmatch\", \"gsub\", \"len\", \"lower\",\n\n \"match\", \"reverse\", \"sub\", \"upper\",\n\n },\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 71, "score": 37420.07828556613 }, { "content": " end\n\n end\n\nend\n\n\n\napply_whitelist(whitelist)\n\napply_whitelist(monkey_patch)\n\n\n\nlocal_env.getmetatable = safe_getmetatable\n\n\n\nreturn local_env, internal\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 72, "score": 37417.65248577154 }, { "content": "local function apply_whitelist(whitelist)\n\n for module, fields in pairs(whitelist) do\n\n local from_module, to_module\n\n if module then\n\n from_module = _G[module]\n\n to_module = {}\n\n local_env[module] = to_module\n\n else\n\n from_module = _G\n\n to_module = local_env\n\n end\n\n\n\n if fields == \"*\" then\n\n for k, v in pairs(from_module) do\n\n to_module[k] = v\n\n end\n\n else\n\n for _, field in ipairs(fields) do\n\n to_module[field] = from_module[field]\n\n end\n", "file_path": "hive-core/src/lua/sandbox/local_env.lua", "rank": 73, "score": 37417.65248577154 }, { "content": "function safe_getmetatable(t)\n\n local type_t = type(t)\n\n assert(\n\n type_t == \"table\",\n\n \"bad argument #1 to 'getmetatable' (table expected, got\" .. type_t .. \")\"\n\n )\n\n getmetatable(t)\n\nend\n", "file_path": "hive-core/src/lua/sandbox/global_env.lua", "rank": 74, "score": 36218.64896808601 }, { "content": "function error(msg, level)\n\n if type(msg) == \"table\" then\n\n local type_detail = type(msg.detail)\n\n assert(\n\n type_detail == \"nil\" or type_detail == \"string\" or type_detail == \"table\",\n\n \"error detail must be nil, string or table\"\n\n )\n\n end\n\n lua_error(msg, level)\n\nend\n\n\n", "file_path": "hive-core/src/lua/sandbox/global_env.lua", "rank": 75, "score": 35647.50827671372 }, { "content": "function hive_Error(obj)\n\n local status = obj.status\n\n local error = obj.error\n\n\n\n local result = {\n\n status = status,\n\n error = error,\n\n }\n\n local result_mt = {\n\n __call = function(detail)\n\n return {\n\n status = status,\n\n error = error,\n\n detail = detail,\n\n }\n\n end\n\n }\n\n\n\n return setmetatable(result, result_mt)\n\nend\n\n\n\nlocal lua_error = error\n\n\n", "file_path": "hive-core/src/lua/sandbox/global_env.lua", "rank": 76, "score": 35647.50827671372 }, { "content": "struct LuaRng(Box<dyn RngCore>);\n\n\n\nimpl UserData for LuaRng {\n\n fn add_methods<'lua, M: mlua::UserDataMethods<'lua, Self>>(methods: &mut M) {\n\n methods.add_method_mut(\"random\", |_lua, this, ()| Ok(this.0.gen::<f64>()));\n\n\n\n methods.add_method_mut(\"gen_range\", |_lua, this, (low, high): (i64, i64)| {\n\n if low >= high {\n\n Err(\"range is empty\".to_lua_err())\n\n } else {\n\n Ok(this.0.gen_range(low..=high))\n\n }\n\n })\n\n }\n\n}\n\n\n", "file_path": "hive-core/src/lua/crypto.rs", "rank": 77, "score": 35094.10085820657 }, { "content": "#[self_referencing]\n\nstruct LuaHeaderMapIter {\n\n inner: Rc<RefCell<HeaderMap>>,\n\n\n\n #[borrows(inner)]\n\n #[not_covariant]\n\n borrow: RefMut<'this, HeaderMap>,\n\n\n\n #[borrows(borrow)]\n\n #[covariant]\n\n iter: hyper::header::Iter<'this, HeaderValue>,\n\n}\n\n\n\nimpl UserData for LuaHeaderMapIter {}\n", "file_path": "hive-core/src/lua/http/header_map.rs", "rank": 78, "score": 35094.10085820657 }, { "content": "type TaskFn = Box<(dyn FnOnce(Rc<Sandbox>) -> LocalBoxFuture<'static, AnyBox> + Send + 'static)>;\n", "file_path": "hive-core/src/task/mod.rs", "rank": 79, "score": 34495.55326034327 }, { "content": "local json = require \"json\"\n\n\n", "file_path": "examples/hello.lua", "rank": 80, "score": 34188.49657933357 }, { "content": "local http = require \"http\"\n\n\n\nhive.register(\"/\", function(req)\n\n local resp = http.request \"https://httpbin.org/get\"\n\n\n\n return {\n\n status = resp.status,\n\n result = resp.body:parse_json(),\n\n }\n\nend)\n", "file_path": "examples/request/main.lua", "rank": 81, "score": 33187.58510901332 }, { "content": "function hive.start()\n\n names = json.array {}\n\nend\n\n\n\nlocal function hello(req)\n\n local name = req.params.name or \"world\"\n\n table.insert(names, name)\n\n return { greeting = \"Hello, \" .. name .. \"!\" }\n\nend\n\n\n\nlocal function list(req)\n\n return names\n\nend\n\n\n\nhive.register(\"/\", hello)\n\nhive.register(\"/list\", list)\n\nhive.register(\"/:name\", hello)\n", "file_path": "examples/hello.lua", "rank": 82, "score": 32238.23472680756 }, { "content": " let msg = msg.into().into();\n\n Self { fn_name, pos, msg }\n\n }\n\n}\n\n\n\nimpl From<BadArgument> for mlua::Error {\n\n fn from(x: BadArgument) -> mlua::Error {\n\n x.to_lua_err()\n\n }\n\n}\n\n\n\npub(super) fn extract_error<'lua, R, F>(lua: &'lua Lua, func: F) -> mlua::Result<MultiValue<'lua>>\n\nwhere\n\n R: ToLuaMulti<'lua>,\n\n F: FnOnce() -> mlua::Result<R>,\n\n{\n\n match func() {\n\n Ok(result) => lua.pack_multi(result),\n\n Err(error) => lua.pack_multi((mlua::Value::Nil, error.to_string())),\n\n }\n", "file_path": "hive-core/src/lua/mod.rs", "rank": 83, "score": 31361.609567859985 }, { "content": "pub(crate) mod context;\n\npub mod http;\n\n\n\nmod byte_stream;\n\nmod crypto;\n\nmod env;\n\nmod fs;\n\nmod json;\n\nmod permission;\n\nmod print;\n\nmod sandbox;\n\nmod shared;\n\n\n\npub use fs::remove_service_local_storage;\n\npub use sandbox::Sandbox;\n\n\n\nuse crate::Result;\n\nuse futures::Future;\n\nuse mlua::{ExternalError, FromLua, Function, Lua, MultiValue, Table, ToLua, ToLuaMulti};\n\nuse std::sync::Arc;\n\n\n", "file_path": "hive-core/src/lua/mod.rs", "rank": 84, "score": 31360.5068428153 }, { "content": "}\n\n\n\npub(super) async fn extract_error_async<'lua, R, Fut>(\n\n lua: &'lua Lua,\n\n future: Fut,\n\n) -> mlua::Result<MultiValue<'lua>>\n\nwhere\n\n R: ToLuaMulti<'lua>,\n\n Fut: Future<Output = mlua::Result<R>>,\n\n{\n\n match future.await {\n\n Ok(result) => lua.pack_multi(result),\n\n Err(error) => lua.pack_multi((mlua::Value::Nil, error.to_string())),\n\n }\n\n}\n\n\n\n/// Temporary solution to https://github.com/khvzak/mlua/issues/161\n\npub(super) fn async_bind_temp<'lua, T: ToLua<'lua>>(\n\n lua: &'lua Lua,\n\n f: Function<'lua>,\n", "file_path": "hive-core/src/lua/mod.rs", "rank": 85, "score": 31359.59575466103 }, { "content": "use super::extract_error;\n\nuse super::shared::SharedTable;\n\nuse mlua::{ExternalError, ExternalResult, Function, Lua, LuaSerdeExt};\n\n\n", "file_path": "hive-core/src/lua/json.rs", "rank": 86, "score": 31358.524821926967 }, { "content": " _ => (),\n\n },\n\n _ => (),\n\n }\n\n Err(\"invalid file read mode\".to_lua_err())\n\n }\n\n}\n\n\n\npub struct LuaFile(BufReader<GenericFile>);\n\n\n\nasync fn read_once<'lua>(\n\n this: &mut LuaFile,\n\n lua: &'lua Lua,\n\n mode: ReadMode,\n\n) -> mlua::Result<mlua::Value<'lua>> {\n\n use ReadMode::*;\n\n match mode {\n\n All => {\n\n let file_ref = this.0.get_mut();\n\n let file_len = file_ref.metadata().await?.len();\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 87, "score": 31358.417794429293 }, { "content": " let mut results = Vec::new();\n\n if modes.is_empty() {\n\n results.push(read_once(&mut this, lua, ReadMode::Line).await?);\n\n } else {\n\n for (i, mode) in modes.into_iter().enumerate() {\n\n let mode = ReadMode::from_lua(mode)\n\n .map_err(|error| BadArgument::new(\"read\", i as u8 + 1, error.to_string()))?;\n\n let result = read_once(&mut this, lua, mode).await?;\n\n if let mlua::Value::Nil = result {\n\n results.push(result);\n\n break;\n\n } else {\n\n results.push(result);\n\n }\n\n }\n\n }\n\n Ok(MultiValue::from_vec(results))\n\n })\n\n .await\n\n },\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 88, "score": 31356.84083939145 }, { "content": "use super::async_bind_temp;\n\nuse crate::lua::byte_stream::ByteStream;\n\nuse crate::lua::{context, extract_error_async, BadArgument};\n\nuse crate::path::{normalize_path, normalize_path_str};\n\nuse crate::permission::{Permission, PermissionSet};\n\nuse crate::source::{GenericFile, Source};\n\nuse crate::{HiveState, Result};\n\nuse mlua::{\n\n AnyUserData, ExternalError, ExternalResult, Function, Lua, MultiValue, ToLua, UserData,\n\n UserDataMethods, Variadic,\n\n};\n\nuse std::borrow::Cow;\n\nuse std::io::SeekFrom;\n\nuse std::path::Path;\n\nuse std::sync::Arc;\n\nuse tokio::fs::{self, OpenOptions};\n\nuse tokio::io::{AsyncBufReadExt, AsyncReadExt, AsyncSeekExt, AsyncWriteExt, BufReader};\n\n\n\npub async fn create_preload_fs<'lua>(\n\n lua: &'lua Lua,\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 89, "score": 31356.57886351583 }, { "content": "use mlua::{Lua, Table, ToLua};\n\n\n", "file_path": "hive-core/src/lua/context.rs", "rank": 90, "score": 31355.997655408148 }, { "content": "use crate::permission::{Permission, PermissionSet};\n\nuse mlua::{Lua, Table};\n\nuse std::sync::Arc;\n\n\n", "file_path": "hive-core/src/lua/permission.rs", "rank": 91, "score": 31355.360142251033 }, { "content": "use log::info;\n\nuse mlua::{ExternalResult, Function, Lua, MultiValue};\n\n\n", "file_path": "hive-core/src/lua/print.rs", "rank": 92, "score": 31355.07327365507 }, { "content": "use crate::permission::{Permission, PermissionSet};\n\nuse mlua::{ExternalResult, Function, Lua};\n\nuse std::borrow::Cow;\n\nuse std::sync::Arc;\n\n\n", "file_path": "hive-core/src/lua/env.rs", "rank": 93, "score": 31354.770315380538 }, { "content": " error\n\n })?;\n\n Ok(result)\n\n }\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\n#[error(\"bad argument #{pos} to '{fn_name}' ({msg})\")]\n\npub struct BadArgument {\n\n fn_name: &'static str,\n\n pos: u8,\n\n msg: Arc<dyn std::error::Error + Send + Sync>,\n\n}\n\n\n\nimpl BadArgument {\n\n fn new(\n\n fn_name: &'static str,\n\n pos: u8,\n\n msg: impl Into<Box<dyn std::error::Error + Send + Sync>>,\n\n ) -> Self {\n", "file_path": "hive-core/src/lua/mod.rs", "rank": 94, "score": 31352.476824439764 }, { "content": " _ => return Err(\"invalid open mode\".to_lua_err()),\n\n };\n\n Ok(result)\n\n } else {\n\n Ok(Self::Read)\n\n }\n\n }\n\n\n\n fn to_open_options(self) -> OpenOptions {\n\n use OpenMode::*;\n\n let mut options = OpenOptions::new();\n\n match self {\n\n Read => options.read(true),\n\n Write => options.create(true).truncate(true).write(true),\n\n Append => options.create(true).append(true),\n\n ReadWrite => options.read(true).write(true),\n\n ReadWriteNew => options.create(true).truncate(true).read(true).write(true),\n\n ReadAppend => options.create(true).read(true).append(true),\n\n };\n\n options\n\n }\n\n}\n\n\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 95, "score": 31352.42472163804 }, { "content": "use mlua::{ExternalError, Function, Lua, UserData};\n\nuse rand::{thread_rng, Rng, RngCore};\n\n\n", "file_path": "hive-core/src/lua/crypto.rs", "rank": 96, "score": 31351.55310407235 }, { "content": " t: T,\n\n) -> mlua::Result<Function<'lua>> {\n\n lua\n\n .load(mlua::chunk! {\n\n local arg = ...\n\n return function(...)\n\n return $f(arg, ...)\n\n end\n\n })\n\n .call::<_, Function>(t)\n\n}\n", "file_path": "hive-core/src/lua/mod.rs", "rank": 97, "score": 31351.24547732913 }, { "content": " Ok(mlua::Value::Nil)\n\n } else {\n\n Ok(mlua::Value::String(lua.create_string(&buf)?))\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl UserData for LuaFile {\n\n fn add_methods<'lua, M: UserDataMethods<'lua, Self>>(methods: &mut M) {\n\n methods.add_meta_function(\"__close\", |_lua, this: AnyUserData| {\n\n drop(this.take::<Self>());\n\n Ok(())\n\n });\n\n\n\n methods.add_async_function(\n\n \"read\",\n\n |lua, (this, modes): (AnyUserData, MultiValue)| async move {\n\n let mut this = this.borrow_mut::<Self>()?;\n\n extract_error_async(lua, async {\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 98, "score": 31350.70903442739 }, { "content": " extract_error_async(lua, async {\n\n let offset = offset.unwrap_or(0);\n\n let seekfrom = if let Some(whence) = whence {\n\n match whence.as_bytes() {\n\n b\"set\" => SeekFrom::Start(offset.try_into().to_lua_err()?),\n\n b\"cur\" => SeekFrom::Current(offset),\n\n b\"end\" => SeekFrom::End(offset),\n\n x => {\n\n return Err(\n\n format!(\"invalid seek base: {}\", String::from_utf8_lossy(x)).to_lua_err(),\n\n )\n\n }\n\n }\n\n } else {\n\n SeekFrom::Current(0)\n\n };\n\n Ok(this.0.seek(seekfrom).await?)\n\n })\n\n .await\n\n },\n", "file_path": "hive-core/src/lua/fs.rs", "rank": 99, "score": 31350.524183462476 } ]
Rust
src/table_properties_rc.rs
hustwp233/rust-rocksdb
bac3ef613692ba783cfe9256cfd2388411c25a6d
use crocksdb_ffi::{DBTablePropertiesCollection, DBTableProperty}; use libc::size_t; use librocksdb_sys as crocksdb_ffi; use std::ops::Deref; use std::slice; use std::str; use crate::table_properties_rc_handles::{ TablePropertiesCollectionHandle, TablePropertiesCollectionIteratorHandle, TablePropertiesHandle, UserCollectedPropertiesHandle, }; pub struct TablePropertiesCollection { handle: TablePropertiesCollectionHandle, } impl TablePropertiesCollection { pub unsafe fn new(ptr: *mut DBTablePropertiesCollection) -> TablePropertiesCollection { assert!(!ptr.is_null()); TablePropertiesCollection { handle: TablePropertiesCollectionHandle::new(ptr), } } pub fn iter(&self) -> TablePropertiesCollectionIter { TablePropertiesCollectionIter::new(self.handle.clone()) } pub fn len(&self) -> usize { unsafe { crocksdb_ffi::crocksdb_table_properties_collection_len(self.handle.ptr()) } } pub fn is_empty(&self) -> bool { self.len() == 0 } } pub struct TablePropertiesCollectionIter { handle: TablePropertiesCollectionIteratorHandle, } impl TablePropertiesCollectionIter { fn new(collection: TablePropertiesCollectionHandle) -> TablePropertiesCollectionIter { TablePropertiesCollectionIter { handle: TablePropertiesCollectionIteratorHandle::new(collection), } } } impl Iterator for TablePropertiesCollectionIter { type Item = (TablePropertiesKey, TableProperties); fn next(&mut self) -> Option<Self::Item> { unsafe { loop { if !crocksdb_ffi::crocksdb_table_properties_collection_iter_valid(self.handle.ptr()) { return None; } let mut keylen: size_t = 0; let key = crocksdb_ffi::crocksdb_table_properties_collection_iter_key( self.handle.ptr(), &mut keylen, ); let props = crocksdb_ffi::crocksdb_table_properties_collection_iter_value( self.handle.ptr(), ); crocksdb_ffi::crocksdb_table_properties_collection_iter_next(self.handle.ptr()); if !props.is_null() { assert!(!key.is_null() && keylen != 0); let key = TablePropertiesKey::new(key, keylen, self.handle.clone()); let props_handle = TablePropertiesHandle::new(props, self.handle.clone()); let val = TableProperties::new(props_handle); return Some((key, val)); } } } } } pub struct TablePropertiesKey { key: *const u8, keylen: size_t, _iter_handle: TablePropertiesCollectionIteratorHandle, } impl TablePropertiesKey { fn new( key: *const u8, keylen: size_t, _iter_handle: TablePropertiesCollectionIteratorHandle, ) -> TablePropertiesKey { unsafe { let bytes = slice::from_raw_parts(key, keylen); assert!(str::from_utf8(bytes).is_ok()); } TablePropertiesKey { key, keylen, _iter_handle, } } } impl Deref for TablePropertiesKey { type Target = str; fn deref(&self) -> &str { unsafe { let bytes = slice::from_raw_parts(self.key, self.keylen); str::from_utf8_unchecked(bytes) } } } pub struct TableProperties { handle: TablePropertiesHandle, } impl TableProperties { fn new(handle: TablePropertiesHandle) -> TableProperties { TableProperties { handle } } fn get_u64(&self, prop: DBTableProperty) -> u64 { unsafe { crocksdb_ffi::crocksdb_table_properties_get_u64(self.handle.ptr(), prop) } } pub fn num_entries(&self) -> u64 { self.get_u64(DBTableProperty::NumEntries) } pub fn user_collected_properties(&self) -> UserCollectedProperties { UserCollectedProperties::new(self.handle.clone()) } } pub struct UserCollectedProperties { handle: UserCollectedPropertiesHandle, } impl UserCollectedProperties { fn new(table_props_handle: TablePropertiesHandle) -> UserCollectedProperties { UserCollectedProperties { handle: UserCollectedPropertiesHandle::new(table_props_handle), } } pub fn get<Q: AsRef<[u8]>>(&self, index: Q) -> Option<&[u8]> { let bytes = index.as_ref(); let mut size = 0; unsafe { let ptr = crocksdb_ffi::crocksdb_user_collected_properties_get( self.handle.ptr(), bytes.as_ptr(), bytes.len(), &mut size, ); if ptr.is_null() { return None; } Some(slice::from_raw_parts(ptr, size)) } } pub fn len(&self) -> usize { unsafe { crocksdb_ffi::crocksdb_user_collected_properties_len(self.handle.ptr()) } } pub fn is_empty(&self) -> bool { self.len() == 0 } }
use crocksdb_ffi::{DBTablePropertiesCollection, DBTableProperty}; use libc::size_t; use librocksdb_sys as crocksdb_ffi; use std::ops::Deref; use std::slice; use std::str; use crate::table_properties_rc_handles::{ TablePropertiesCollectionHandle, TablePropertiesCollectionIteratorHandle, TablePropertiesHandle, UserCollectedPropertiesHandle, }; pub struct TablePropertiesCollection { handle: TablePropertiesCollectionHandle, } impl TablePropertiesCollection { pub unsafe fn new(ptr: *mut DBTablePropertiesCollection) -> TablePropertiesCollection { assert!(!ptr.is_null()); TablePropertiesCollection { handle: TablePropertiesCollectionHandle::new(ptr), } } pub fn iter(&self) -> TablePropertiesCollectionIter { TablePropertiesCollectionIter::new(self.handle.clone()) } pub fn len(&self) -> usize { unsafe { crocksdb_ffi::crocksdb_table_properties_collection_len(self.handle.ptr()) } } pub fn is_empty(&self) -> bool { self.len() == 0 } } pub struct TablePropertiesCollectionIter { handle: TablePropertiesCollectionIteratorHandle, } impl TablePropertiesCollectionIter { fn new(collection: TablePropertiesCollectionHandle) -> TablePropertiesCollectionIter { TablePropertiesCollectionIter { handle: TablePropertiesCollectionIteratorHandle::new(collection), } } } impl Iterator for TablePropertiesCollectionIter { type Item = (TablePropertiesK
e { let ptr = crocksdb_ffi::crocksdb_user_collected_properties_get( self.handle.ptr(), bytes.as_ptr(), bytes.len(), &mut size, ); if ptr.is_null() { return None; } Some(slice::from_raw_parts(ptr, size)) } } pub fn len(&self) -> usize { unsafe { crocksdb_ffi::crocksdb_user_collected_properties_len(self.handle.ptr()) } } pub fn is_empty(&self) -> bool { self.len() == 0 } }
ey, TableProperties); fn next(&mut self) -> Option<Self::Item> { unsafe { loop { if !crocksdb_ffi::crocksdb_table_properties_collection_iter_valid(self.handle.ptr()) { return None; } let mut keylen: size_t = 0; let key = crocksdb_ffi::crocksdb_table_properties_collection_iter_key( self.handle.ptr(), &mut keylen, ); let props = crocksdb_ffi::crocksdb_table_properties_collection_iter_value( self.handle.ptr(), ); crocksdb_ffi::crocksdb_table_properties_collection_iter_next(self.handle.ptr()); if !props.is_null() { assert!(!key.is_null() && keylen != 0); let key = TablePropertiesKey::new(key, keylen, self.handle.clone()); let props_handle = TablePropertiesHandle::new(props, self.handle.clone()); let val = TableProperties::new(props_handle); return Some((key, val)); } } } } } pub struct TablePropertiesKey { key: *const u8, keylen: size_t, _iter_handle: TablePropertiesCollectionIteratorHandle, } impl TablePropertiesKey { fn new( key: *const u8, keylen: size_t, _iter_handle: TablePropertiesCollectionIteratorHandle, ) -> TablePropertiesKey { unsafe { let bytes = slice::from_raw_parts(key, keylen); assert!(str::from_utf8(bytes).is_ok()); } TablePropertiesKey { key, keylen, _iter_handle, } } } impl Deref for TablePropertiesKey { type Target = str; fn deref(&self) -> &str { unsafe { let bytes = slice::from_raw_parts(self.key, self.keylen); str::from_utf8_unchecked(bytes) } } } pub struct TableProperties { handle: TablePropertiesHandle, } impl TableProperties { fn new(handle: TablePropertiesHandle) -> TableProperties { TableProperties { handle } } fn get_u64(&self, prop: DBTableProperty) -> u64 { unsafe { crocksdb_ffi::crocksdb_table_properties_get_u64(self.handle.ptr(), prop) } } pub fn num_entries(&self) -> u64 { self.get_u64(DBTableProperty::NumEntries) } pub fn user_collected_properties(&self) -> UserCollectedProperties { UserCollectedProperties::new(self.handle.clone()) } } pub struct UserCollectedProperties { handle: UserCollectedPropertiesHandle, } impl UserCollectedProperties { fn new(table_props_handle: TablePropertiesHandle) -> UserCollectedProperties { UserCollectedProperties { handle: UserCollectedPropertiesHandle::new(table_props_handle), } } pub fn get<Q: AsRef<[u8]>>(&self, index: Q) -> Option<&[u8]> { let bytes = index.as_ref(); let mut size = 0; unsaf
random
[ { "content": "#[test]\n\npub fn test_iterator() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_iteratortest\");\n\n\n\n let k1 = b\"k1\";\n\n let k2 = b\"k2\";\n\n let k3 = b\"k3\";\n\n let k4 = b\"k4\";\n\n let v1 = b\"v1111\";\n\n let v2 = b\"v2222\";\n\n let v3 = b\"v3333\";\n\n let v4 = b\"v4444\";\n\n let db = DB::open_default(path.path().to_str().unwrap()).unwrap();\n\n let p = db.put(k1, v1);\n\n assert!(p.is_ok());\n\n let p = db.put(k2, v2);\n\n assert!(p.is_ok());\n\n let p = db.put(k3, v3);\n\n assert!(p.is_ok());\n\n let expected = vec![\n\n (k1.to_vec(), v1.to_vec()),\n", "file_path": "tests/cases/test_iterator.rs", "rank": 0, "score": 174599.93099087098 }, { "content": "pub fn create_env_logger(fname: &str, mut env: DBEnv) -> *mut DBLogger {\n\n let name = CString::new(fname.as_bytes()).unwrap();\n\n unsafe { crocksdb_ffi::crocksdb_create_env_logger(name.as_ptr(), &mut env) }\n\n}\n", "file_path": "src/logger.rs", "rank": 1, "score": 145576.27045398534 }, { "content": "pub fn supported_compression() -> Vec<DBCompressionType> {\n\n unsafe {\n\n let size = crocksdb_ffi::crocksdb_get_supported_compression_number() as usize;\n\n let mut v: Vec<DBCompressionType> = Vec::with_capacity(size);\n\n let pv = v.as_mut_ptr();\n\n crocksdb_ffi::crocksdb_get_supported_compression(pv, size as size_t);\n\n v.set_len(size);\n\n v\n\n }\n\n}\n\n\n\npub struct Env {\n\n pub inner: *mut DBEnv,\n\n #[allow(dead_code)]\n\n base: Option<Arc<Env>>,\n\n}\n\n\n\nunsafe impl Send for Env {}\n\n\n\nunsafe impl Sync for Env {}\n", "file_path": "src/rocksdb.rs", "rank": 2, "score": 142846.76615643137 }, { "content": "#[test]\n\npub fn test_column_family_option_use_doubly_skiplist() {\n\n let cf_opts = ColumnFamilyOptions::new();\n\n let memtable_name = cf_opts.get_memtable_factory_name();\n\n assert!(memtable_name.is_some());\n\n assert_eq!(\"SkipListFactory\", memtable_name.unwrap());\n\n cf_opts.set_doubly_skiplist();\n\n let memtable_name = cf_opts.get_memtable_factory_name();\n\n assert_eq!(\"DoublySkipListFactory\", memtable_name.unwrap());\n\n}\n", "file_path": "tests/cases/test_column_family.rs", "rank": 3, "score": 140985.76841322632 }, { "content": "pub fn new_logger<L: Logger>(l: L) -> *mut DBLogger {\n\n unsafe {\n\n let p: Box<dyn Logger> = Box::new(l);\n\n crocksdb_ffi::crocksdb_logger_create(\n\n Box::into_raw(Box::new(p)) as *mut c_void,\n\n destructor,\n\n logv,\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/logger.rs", "rank": 4, "score": 138625.79847278638 }, { "content": "struct TablePropertiesCollectionIteratorHandleWithDrop {\n\n ptr: *mut DBTablePropertiesCollectionIterator,\n\n}\n\n\n\nimpl Drop for TablePropertiesCollectionIteratorHandleWithDrop {\n\n fn drop(&mut self) {\n\n unsafe {\n\n crocksdb_ffi::crocksdb_table_properties_collection_iter_destroy(self.ptr);\n\n }\n\n }\n\n}\n\n\n\n// # Safety\n\n//\n\n// `ptr` is valid as long as the iterator is\n\n#[derive(Clone)]\n\npub struct TablePropertiesHandle {\n\n ptr: *const DBTableProperties,\n\n iter_handle: TablePropertiesCollectionIteratorHandle,\n\n}\n", "file_path": "src/table_properties_rc_handles.rs", "rank": 5, "score": 138155.86998756367 }, { "content": "pub fn new_bloom_filter(bits: c_int) -> *mut DBFilterPolicy {\n\n unsafe { crocksdb_filterpolicy_create_bloom(bits) }\n\n}\n\n\n\n/// # Safety\n\n///\n\n/// `DBLRUCacheOptions` should pointer to a valid cache options\n\npub unsafe fn new_lru_cache(opt: *mut DBLRUCacheOptions) -> *mut DBCache {\n\n crocksdb_cache_create_lru(opt)\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq)]\n\n#[repr(C)]\n\npub enum DBEntryType {\n\n Put = 0,\n\n Delete = 1,\n\n SingleDelete = 2,\n\n Merge = 3,\n\n RangeDeletion = 4,\n\n BlobIndex = 5,\n", "file_path": "librocksdb_sys/src/lib.rs", "rank": 6, "score": 137549.39229560894 }, { "content": "#[allow(deprecated)]\n\nfn prev_collect<D: Deref<Target = DB>>(iter: &mut DBIterator<D>) -> Vec<Kv> {\n\n let mut buf = vec![];\n\n while iter.valid().unwrap() {\n\n let k = iter.key().to_vec();\n\n let v = iter.value().to_vec();\n\n buf.push((k, v));\n\n let _ = iter.prev();\n\n }\n\n buf\n\n}\n\n\n", "file_path": "tests/cases/test_iterator.rs", "rank": 7, "score": 134397.37680136296 }, { "content": "pub fn load_latest_options(\n\n dbpath: &str,\n\n env: &Env,\n\n ignore_unknown_options: bool,\n\n) -> Result<Option<(DBOptions, Vec<CColumnFamilyDescriptor>)>, String> {\n\n const ERR_CONVERT_PATH: &str = \"Failed to convert path to CString when load latest options\";\n\n\n\n let dbpath = CString::new(dbpath.as_bytes()).map_err(|_| ERR_CONVERT_PATH.to_owned())?;\n\n let db_options = DBOptions::new();\n\n unsafe {\n\n let raw_cf_descs: *mut *mut crocksdb_ffi::ColumnFamilyDescriptor = ptr::null_mut();\n\n let mut cf_descs_len: size_t = 0;\n\n\n\n let ok = ffi_try!(crocksdb_load_latest_options(\n\n dbpath.as_ptr(),\n\n env.inner,\n\n db_options.inner,\n\n &raw_cf_descs,\n\n &mut cf_descs_len,\n\n ignore_unknown_options\n", "file_path": "src/rocksdb.rs", "rank": 8, "score": 128503.12545824706 }, { "content": "fn run_bench_wal_recycle_log(b: &mut Bencher, name: &str, recycled: bool) {\n\n let mut opts = DBOptions::new();\n\n if recycled {\n\n opts.set_recycle_log_file_num(10);\n\n }\n\n\n\n let mut wopts = WriteOptions::new();\n\n wopts.set_sync(true);\n\n\n\n run_bench_wal(b, name, opts, wopts);\n\n}\n\n\n", "file_path": "benches/cases/bench_wal.rs", "rank": 9, "score": 127392.7668525301 }, { "content": "pub fn new_event_listener<L: EventListener>(l: L) -> *mut DBEventListener {\n\n let p: Box<dyn EventListener> = Box::new(l);\n\n unsafe {\n\n crocksdb_ffi::crocksdb_eventlistener_create(\n\n Box::into_raw(Box::new(p)) as *mut c_void,\n\n destructor,\n\n on_flush_completed,\n\n on_compaction_completed,\n\n on_external_file_ingested,\n\n on_background_error,\n\n on_stall_conditions_changed,\n\n )\n\n }\n\n}\n", "file_path": "src/event_listener.rs", "rank": 10, "score": 127179.52009521492 }, { "content": "#[test]\n\npub fn test_ttl() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_ttl_test\");\n\n let path_str = path.path().to_str().unwrap();\n\n\n\n // should be able to open db with ttl\n\n {\n\n let mut opts = DBOptions::new();\n\n let cf_opts = ColumnFamilyOptions::new();\n\n let ttl = 10;\n\n opts.create_if_missing(true);\n\n\n\n let mut db = match DB::open_cf_with_ttl(\n\n opts,\n\n path.path().to_str().unwrap(),\n\n vec![(\"default\", cf_opts)],\n\n &[ttl],\n\n ) {\n\n Ok(db) => {\n\n println!(\"successfully opened db with ttl\");\n\n db\n", "file_path": "tests/cases/test_ttl.rs", "rank": 11, "score": 125467.38869016935 }, { "content": "#[test]\n\npub fn test_multithreaded() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_multithreadtest\");\n\n\n\n let db = DB::open_default(path.path().to_str().unwrap()).unwrap();\n\n let db = Arc::new(db);\n\n\n\n db.put(b\"key\", b\"value1\").unwrap();\n\n\n\n let db1 = db.clone();\n\n let j1 = thread::spawn(move || {\n\n for _ in 1..N {\n\n db1.put(b\"key\", b\"value1\").unwrap();\n\n }\n\n });\n\n\n\n let db2 = db.clone();\n\n let j2 = thread::spawn(move || {\n\n for _ in 1..N {\n\n db2.put(b\"key\", b\"value2\").unwrap();\n\n }\n", "file_path": "tests/cases/test_multithreaded.rs", "rank": 12, "score": 125467.38869016935 }, { "content": "fn next_collect<D: Deref<Target = DB>>(iter: &mut DBIterator<D>) -> Vec<(Vec<u8>, Vec<u8>)> {\n\n let mut buf = vec![];\n\n while iter.valid().unwrap() {\n\n let k = iter.key().to_vec();\n\n let v = iter.value().to_vec();\n\n buf.push((k, v));\n\n let _ = iter.next();\n\n }\n\n buf\n\n}\n\n\n", "file_path": "tests/cases/test_iterator.rs", "rank": 13, "score": 122465.43231777952 }, { "content": "pub fn set_external_sst_file_global_seq_no(\n\n db: &DB,\n\n cf: &CFHandle,\n\n file: &str,\n\n seq_no: u64,\n\n) -> Result<u64, String> {\n\n let cfile = CString::new(file).unwrap();\n\n unsafe {\n\n let pre_seq_no = ffi_try!(crocksdb_set_external_sst_file_global_seq_no(\n\n db.inner,\n\n cf.inner,\n\n cfile.as_ptr(),\n\n seq_no\n\n ));\n\n Ok(pre_seq_no)\n\n }\n\n}\n\n\n", "file_path": "src/rocksdb.rs", "rank": 14, "score": 120070.77057532081 }, { "content": "pub fn gen_sst(\n\n opt: ColumnFamilyOptions,\n\n cf: Option<&CFHandle>,\n\n path: &str,\n\n data: &[(&[u8], &[u8])],\n\n) {\n\n let _ = fs::remove_file(path);\n\n let env_opt = EnvOptions::new();\n\n let mut writer = if cf.is_some() {\n\n SstFileWriter::new_cf(env_opt, opt, cf.unwrap())\n\n } else {\n\n SstFileWriter::new(env_opt, opt)\n\n };\n\n writer.open(path).unwrap();\n\n for &(k, v) in data {\n\n writer.put(k, v).unwrap();\n\n }\n\n\n\n writer.finish().unwrap();\n\n}\n\n\n", "file_path": "tests/cases/test_ingest_external_file.rs", "rank": 15, "score": 120070.77057532081 }, { "content": "#[test]\n\npub fn test_column_family() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_cftest\");\n\n let path_str = path.path().to_str().unwrap();\n\n\n\n // should be able to create column families\n\n {\n\n let mut opts = DBOptions::new();\n\n opts.create_if_missing(true);\n\n let mut cf_opts = ColumnFamilyOptions::new();\n\n cf_opts.add_merge_operator(\"test operator\", test_provided_merge);\n\n let mut db = DB::open_cf(opts, path_str, vec![(\"default\", cf_opts)]).unwrap();\n\n match db.create_cf(\"cf1\") {\n\n Ok(_) => println!(\"cf1 created successfully\"),\n\n Err(e) => {\n\n panic!(\"could not create column family: {}\", e);\n\n }\n\n }\n\n assert_eq!(db.cf_names(), vec![\"cf1\", \"default\"]);\n\n }\n\n\n", "file_path": "tests/cases/test_column_family.rs", "rank": 16, "score": 120070.77057532081 }, { "content": "#[test]\n\nfn test_send_iterator() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_iteratortest_send\");\n\n\n\n let db = Arc::new(DB::open_default(path.path().to_str().unwrap()).unwrap());\n\n db.put(b\"k1\", b\"v1\").unwrap();\n\n\n\n let opt = ReadOptions::new();\n\n let iter = DBIterator::new(db.clone(), opt);\n\n\n\n let make_checker = |mut iter: DBIterator<Arc<DB>>| {\n\n let (tx, rx) = mpsc::channel();\n\n let j = thread::spawn(move || {\n\n rx.recv().unwrap();\n\n iter.seek(SeekKey::Start).unwrap();\n\n assert_eq!(iter.key(), b\"k1\");\n\n assert_eq!(iter.value(), b\"v1\");\n\n });\n\n (tx, j)\n\n };\n\n\n", "file_path": "tests/cases/test_iterator.rs", "rank": 17, "score": 114449.3672166714 }, { "content": "pub fn get_perf_level() -> PerfLevel {\n\n let v = unsafe { crocksdb_ffi::crocksdb_get_perf_level() };\n\n match v {\n\n 0 => PerfLevel::Uninitialized,\n\n 1 => PerfLevel::Disable,\n\n 2 => PerfLevel::EnableCount,\n\n 3 => PerfLevel::EnableTimeExceptForMutex,\n\n 4 => PerfLevel::EnableTimeAndCPUTimeExceptForMutex,\n\n 5 => PerfLevel::EnableTime,\n\n 6 => PerfLevel::OutOfBounds,\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/perf_context.rs", "rank": 18, "score": 113783.99419196235 }, { "content": "pub fn get_cf_handle<'a>(db: &'a DB, cf: &str) -> Result<&'a CFHandle, String> {\n\n db.cf_handle(cf)\n\n .ok_or_else(|| format!(\"cf {} not found.\", cf))\n\n}\n\n\n", "file_path": "tests/cases/test_delete_range.rs", "rank": 19, "score": 113109.10091006175 }, { "content": "fn link_cpp(build: &mut Build) {\n\n let tool = build.get_compiler();\n\n let stdlib = if tool.is_like_gnu() {\n\n \"libstdc++.a\"\n\n } else if tool.is_like_clang() {\n\n \"libc++.a\"\n\n } else {\n\n // Don't link to c++ statically on windows.\n\n return;\n\n };\n\n let output = tool\n\n .to_command()\n\n .arg(\"--print-file-name\")\n\n .arg(stdlib)\n\n .output()\n\n .unwrap();\n\n if !output.status.success() || output.stdout.is_empty() {\n\n // fallback to dynamically\n\n return;\n\n }\n", "file_path": "librocksdb_sys/build.rs", "rank": 20, "score": 111558.56745620506 }, { "content": "struct crocksdb_iterator_t { Iterator* rep; };\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 21, "score": 108985.46801273503 }, { "content": "#[bench]\n\nfn bench_wal_no_sync(b: &mut Bencher) {\n\n let opts = DBOptions::new();\n\n let mut wopts = WriteOptions::new();\n\n wopts.set_sync(false);\n\n\n\n run_bench_wal(b, \"_rust_rocksdb_wal_no_sync\", opts, wopts);\n\n}\n\n\n", "file_path": "benches/cases/bench_wal.rs", "rank": 22, "score": 108962.9567217507 }, { "content": "struct TablePropertiesCollectionHandleWithDrop {\n\n ptr: *mut DBTablePropertiesCollection,\n\n}\n\n\n\nimpl Drop for TablePropertiesCollectionHandleWithDrop {\n\n fn drop(&mut self) {\n\n unsafe {\n\n crocksdb_ffi::crocksdb_table_properties_collection_destroy(self.ptr);\n\n }\n\n }\n\n}\n\n\n\n/// This is a shared wrapper around a DBTablePropertiesCollection w/ dtor.\n\n//\n\n// # Safety\n\n//\n\n// The safety of this struct depends on drop order, with the iterator\n\n// needing to drop before the collection.\n\n#[derive(Clone)]\n\npub struct TablePropertiesCollectionIteratorHandle {\n", "file_path": "src/table_properties_rc_handles.rs", "rank": 23, "score": 108780.23177859125 }, { "content": "#[bench]\n\nfn bench_wal_with_recycle_log(b: &mut Bencher) {\n\n run_bench_wal_recycle_log(b, \"_rust_rocksdb_wal_with_recycle_log\", true);\n\n}\n\n\n", "file_path": "benches/cases/bench_wal.rs", "rank": 24, "score": 106552.81227758595 }, { "content": "#[bench]\n\nfn bench_wal_disalbe_wal(b: &mut Bencher) {\n\n let opts = DBOptions::new();\n\n let mut wopts = WriteOptions::new();\n\n wopts.disable_wal(true);\n\n\n\n run_bench_wal(b, \"_rust_rocksdb_wal_disable_wal\", opts, wopts);\n\n}\n", "file_path": "benches/cases/bench_wal.rs", "rank": 25, "score": 106552.81227758595 }, { "content": "pub fn set_perf_level(level: PerfLevel) {\n\n let v = match level {\n\n PerfLevel::Uninitialized => 0,\n\n PerfLevel::Disable => 1,\n\n PerfLevel::EnableCount => 2,\n\n PerfLevel::EnableTimeExceptForMutex => 3,\n\n PerfLevel::EnableTimeAndCPUTimeExceptForMutex => 4,\n\n PerfLevel::EnableTime => 5,\n\n PerfLevel::OutOfBounds => 6,\n\n };\n\n unsafe {\n\n crocksdb_ffi::crocksdb_set_perf_level(v);\n\n }\n\n}\n\n\n\npub struct PerfContext {\n\n inner: *mut DBPerfContext,\n\n}\n\n\n\nimpl PerfContext {\n", "file_path": "src/perf_context.rs", "rank": 26, "score": 106429.5679156531 }, { "content": "#[bench]\n\nfn bench_wal_without_recycle_log(b: &mut Bencher) {\n\n run_bench_wal_recycle_log(b, \"_rust_rocksdb_wal_without_recycle_log\", false);\n\n}\n\n\n", "file_path": "benches/cases/bench_wal.rs", "rank": 27, "score": 104308.94140895345 }, { "content": "struct FixedSuffixTransform {\n\n pub suffix_len: usize,\n\n}\n\n\n\nimpl SliceTransform for FixedSuffixTransform {\n\n fn transform<'a>(&mut self, key: &'a [u8]) -> &'a [u8] {\n\n &key[..(key.len() - self.suffix_len)]\n\n }\n\n\n\n fn in_domain(&mut self, key: &[u8]) -> bool {\n\n key.len() >= self.suffix_len\n\n }\n\n}\n\n\n", "file_path": "tests/cases/test_iterator.rs", "rank": 28, "score": 103855.99090597522 }, { "content": "struct FixedPrefixTransform {\n\n pub prefix_len: usize,\n\n}\n\n\n\nimpl SliceTransform for FixedPrefixTransform {\n\n fn transform<'a>(&mut self, key: &'a [u8]) -> &'a [u8] {\n\n &key[..self.prefix_len]\n\n }\n\n\n\n fn in_domain(&mut self, key: &[u8]) -> bool {\n\n key.len() >= self.prefix_len\n\n }\n\n}\n\n\n", "file_path": "tests/cases/test_iterator.rs", "rank": 29, "score": 103855.99090597522 }, { "content": "#[test]\n\nfn read_with_upper_bound() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_read_with_upper_bound_test\");\n\n let mut opts = DBOptions::new();\n\n opts.create_if_missing(true);\n\n {\n\n let db = DB::open(opts, path.path().to_str().unwrap()).unwrap();\n\n let writeopts = WriteOptions::new();\n\n db.put_opt(b\"k1-0\", b\"a\", &writeopts).unwrap();\n\n db.put_opt(b\"k1-1\", b\"b\", &writeopts).unwrap();\n\n db.put_opt(b\"k2-0\", b\"c\", &writeopts).unwrap();\n\n\n\n let mut readopts = ReadOptions::new();\n\n let upper_bound = b\"k2\".to_vec();\n\n readopts.set_iterate_upper_bound(upper_bound);\n\n assert_eq!(readopts.iterate_upper_bound(), b\"k2\");\n\n let mut iter = db.iter_opt(readopts);\n\n iter.seek(SeekKey::Start).unwrap();\n\n let vec = next_collect(&mut iter);\n\n assert_eq!(vec.len(), 2);\n\n }\n\n}\n\n\n", "file_path": "tests/cases/test_iterator.rs", "rank": 30, "score": 101974.56742216273 }, { "content": "#[test]\n\nfn test_seek_for_prev() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_seek_for_prev\");\n\n let mut opts = DBOptions::new();\n\n opts.create_if_missing(true);\n\n {\n\n let db = DB::open(opts, path.path().to_str().unwrap()).unwrap();\n\n let writeopts = WriteOptions::new();\n\n db.put_opt(b\"k1-0\", b\"a\", &writeopts).unwrap();\n\n db.put_opt(b\"k1-1\", b\"b\", &writeopts).unwrap();\n\n db.put_opt(b\"k1-3\", b\"d\", &writeopts).unwrap();\n\n\n\n let mut iter = db.iter();\n\n iter.seek_for_prev(SeekKey::Key(b\"k1-2\")).unwrap();\n\n assert!(iter.valid().unwrap());\n\n assert_eq!(iter.key(), b\"k1-1\");\n\n assert_eq!(iter.value(), b\"b\");\n\n\n\n let mut iter = db.iter();\n\n iter.seek_for_prev(SeekKey::Key(b\"k1-3\")).unwrap();\n\n assert!(iter.valid().unwrap());\n", "file_path": "tests/cases/test_iterator.rs", "rank": 31, "score": 101974.56742216273 }, { "content": "struct TablePropertiesCollectorHandle {\n\n name: CString,\n\n rep: Box<dyn TablePropertiesCollector>,\n\n}\n\n\n\nimpl TablePropertiesCollectorHandle {\n\n fn new(name: &str, rep: Box<dyn TablePropertiesCollector>) -> TablePropertiesCollectorHandle {\n\n TablePropertiesCollectorHandle {\n\n name: CString::new(name).unwrap(),\n\n rep: rep,\n\n }\n\n }\n\n}\n\n\n\nextern \"C\" fn name(handle: *mut c_void) -> *const c_char {\n\n unsafe {\n\n let handle = &mut *(handle as *mut TablePropertiesCollectorHandle);\n\n handle.name.as_ptr()\n\n }\n\n}\n", "file_path": "src/table_properties_collector.rs", "rank": 32, "score": 101054.23825957037 }, { "content": "struct crocksdb_table_properties_collection_iterator_t {\n\n TablePropertiesCollection::const_iterator cur_;\n\n TablePropertiesCollection::const_iterator end_;\n\n};\n\n\n\ncrocksdb_table_properties_collection_iterator_t*\n\ncrocksdb_table_properties_collection_iter_create(\n\n const crocksdb_table_properties_collection_t* collection) {\n\n auto it = new crocksdb_table_properties_collection_iterator_t;\n\n it->cur_ = collection->rep_.begin();\n\n it->end_ = collection->rep_.end();\n\n return it;\n\n}\n\n\n\nvoid crocksdb_table_properties_collection_iter_destroy(\n\n crocksdb_table_properties_collection_iterator_t* it) {\n\n delete it;\n\n}\n\n\n\nunsigned char crocksdb_table_properties_collection_iter_valid(\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 33, "score": 101043.21028313405 }, { "content": "struct crocksdb_user_collected_properties_iterator_t {\n\n UserCollectedProperties::const_iterator cur_;\n\n UserCollectedProperties::const_iterator end_;\n\n};\n\n\n\ncrocksdb_user_collected_properties_iterator_t*\n\ncrocksdb_user_collected_properties_iter_create(\n\n const crocksdb_user_collected_properties_t* props) {\n\n auto it = new crocksdb_user_collected_properties_iterator_t;\n\n it->cur_ = props->rep.begin();\n\n it->end_ = props->rep.end();\n\n return it;\n\n}\n\n\n\nvoid crocksdb_user_collected_properties_iter_destroy(\n\n crocksdb_user_collected_properties_iterator_t* it) {\n\n delete it;\n\n}\n\n\n\nunsigned char crocksdb_user_collected_properties_iter_valid(\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 34, "score": 101043.21028313405 }, { "content": "struct crocksdb_column_family_handle_t { ColumnFamilyHandle* rep; };\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 35, "score": 99210.77045389875 }, { "content": "#[test]\n\nfn test_fixed_suffix_seek() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_fixed_suffix_seek\");\n\n let mut bbto = BlockBasedOptions::new();\n\n bbto.set_bloom_filter(10, false);\n\n bbto.set_whole_key_filtering(false);\n\n let mut opts = DBOptions::new();\n\n let mut cf_opts = ColumnFamilyOptions::new();\n\n opts.create_if_missing(true);\n\n cf_opts.set_block_based_table_factory(&bbto);\n\n cf_opts\n\n .set_prefix_extractor(\n\n \"FixedSuffixTransform\",\n\n Box::new(FixedSuffixTransform { suffix_len: 2 }),\n\n )\n\n .unwrap();\n\n\n\n let db = DB::open_cf(\n\n opts,\n\n path.path().to_str().unwrap(),\n\n vec![(\"default\", cf_opts)],\n", "file_path": "tests/cases/test_iterator.rs", "rank": 36, "score": 99172.32727937699 }, { "content": "#[test]\n\nfn test_total_order_seek() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_total_order_seek\");\n\n let mut bbto = BlockBasedOptions::new();\n\n bbto.set_bloom_filter(10, false);\n\n bbto.set_whole_key_filtering(false);\n\n let mut cf_opts = ColumnFamilyOptions::new();\n\n let mut opts = DBOptions::new();\n\n opts.create_if_missing(true);\n\n cf_opts.set_block_based_table_factory(&bbto);\n\n cf_opts\n\n .set_prefix_extractor(\n\n \"FixedPrefixTransform\",\n\n Box::new(FixedPrefixTransform { prefix_len: 2 }),\n\n )\n\n .unwrap();\n\n // also create prefix bloom for memtable\n\n cf_opts.set_memtable_prefix_bloom_size_ratio(0.1 as f64);\n\n\n\n let keys = vec![\n\n b\"k1-1\", b\"k1-2\", b\"k1-3\", b\"k2-1\", b\"k2-2\", b\"k2-3\", b\"k3-1\", b\"k3-2\", b\"k3-3\",\n", "file_path": "tests/cases/test_iterator.rs", "rank": 37, "score": 99172.32727937699 }, { "content": "struct TablePropertiesCollectorFactoryHandle {\n\n name: CString,\n\n rep: Box<dyn TablePropertiesCollectorFactory>,\n\n}\n\n\n\nimpl TablePropertiesCollectorFactoryHandle {\n\n fn new(\n\n name: &str,\n\n rep: Box<dyn TablePropertiesCollectorFactory>,\n\n ) -> TablePropertiesCollectorFactoryHandle {\n\n TablePropertiesCollectorFactoryHandle {\n\n name: CString::new(name).unwrap(),\n\n rep: rep,\n\n }\n\n }\n\n}\n\n\n\nextern \"C\" fn name(handle: *mut c_void) -> *const c_char {\n\n unsafe {\n\n let handle = &mut *(handle as *mut TablePropertiesCollectorFactoryHandle);\n", "file_path": "src/table_properties_collector_factory.rs", "rank": 38, "score": 96030.84074511478 }, { "content": "fn run_bench_wal(b: &mut Bencher, name: &str, mut opts: DBOptions, wopts: WriteOptions) {\n\n let path = tempfile::Builder::new().prefix(name).tempdir().expect(\"\");\n\n let path_str = path.path().to_str().unwrap();\n\n opts.create_if_missing(true);\n\n opts.set_max_background_jobs(6);\n\n opts.set_max_subcompactions(2);\n\n\n\n let mut cf_opts = ColumnFamilyOptions::new();\n\n cf_opts.set_write_buffer_size(16 * 1024);\n\n cf_opts.set_max_write_buffer_number(10);\n\n\n\n let db = DB::open_cf(opts, path_str, vec![(\"default\", cf_opts)]).unwrap();\n\n\n\n let value = vec![1; 1024];\n\n\n\n let mut i = 0;\n\n b.iter(|| {\n\n let key = format!(\"key_{}\", i);\n\n db.put_opt(key.as_bytes(), &value, &wopts).unwrap();\n\n i += 1;\n\n });\n\n\n\n drop(db);\n\n}\n\n\n", "file_path": "benches/cases/bench_wal.rs", "rank": 39, "score": 95029.32528589008 }, { "content": "pub fn run_ldb_tool(ldb_args: &[String], opts: &DBOptions) {\n\n unsafe {\n\n let ldb_args_cstrs: Vec<_> = ldb_args\n\n .iter()\n\n .map(|s| CString::new(s.as_bytes()).unwrap())\n\n .collect();\n\n let args: Vec<_> = ldb_args_cstrs.iter().map(|s| s.as_ptr()).collect();\n\n crocksdb_ffi::crocksdb_run_ldb_tool(\n\n args.len() as i32,\n\n args.as_ptr() as *const *const c_char,\n\n opts.inner,\n\n );\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use std::fs;\n\n use std::path::Path;\n\n use std::str;\n", "file_path": "src/rocksdb.rs", "rank": 40, "score": 94811.65565106431 }, { "content": "struct crocksdb_logger_impl_t : public Logger {\n\n void* rep;\n\n\n\n void (*destructor_)(void*);\n\n void (*logv_internal_)(void* logger, int log_level, const char* format,\n\n va_list ap);\n\n\n\n void Logv(const char* format, va_list ap) override {\n\n logv_internal_(rep, InfoLogLevel::INFO_LEVEL, format, ap);\n\n }\n\n\n\n void Logv(const InfoLogLevel log_level, const char* format,\n\n va_list ap) override {\n\n logv_internal_(rep, log_level, format, ap);\n\n }\n\n\n\n virtual ~crocksdb_logger_impl_t() { (*destructor_)(rep); }\n\n};\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 41, "score": 93761.23529038797 }, { "content": "fn create_cfs(db: &mut DB, cfs: &[&str]) {\n\n for cf in cfs {\n\n if *cf != \"default\" {\n\n db.create_cf(*cf).unwrap();\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/cases/test_ingest_external_file.rs", "rank": 42, "score": 92957.22207488671 }, { "content": "#[test]\n\nfn test_delete_files_in_range_with_iter() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_test_delete_files_in_range_with_iter\");\n\n let path_str = path.path().to_str().unwrap();\n\n let db = initial_data(path_str);\n\n\n\n // construct iterator before DeleteFilesInRange\n\n let mut iter = db.iter();\n\n\n\n // delete sst2\n\n db.delete_files_in_range(b\"key2\", b\"key7\", false).unwrap();\n\n\n\n let mut count = 0;\n\n assert!(iter.seek(SeekKey::Start).unwrap());\n\n while iter.valid().unwrap() {\n\n iter.next().unwrap();\n\n count = count + 1;\n\n }\n\n\n\n // iterator will pin all sst files.\n\n assert_eq!(count, 9);\n\n}\n\n\n", "file_path": "tests/cases/test_delete_files_in_range.rs", "rank": 43, "score": 91925.55016272252 }, { "content": "fn get_files_cf(db: &DB, cf: &CFHandle, max_level: usize) -> Vec<String> {\n\n let mut files = Vec::new();\n\n let cf_meta = db.get_column_family_meta_data(cf);\n\n for (i, level) in cf_meta.get_levels().iter().enumerate() {\n\n if i > max_level {\n\n break;\n\n }\n\n for f in level.get_files() {\n\n files.push(f.get_name());\n\n }\n\n }\n\n files\n\n}\n\n\n", "file_path": "tests/cases/test_metadata.rs", "rank": 44, "score": 91179.3233608292 }, { "content": "struct crocksdb_encryption_key_manager_impl_t : public KeyManager {\n\n void* state;\n\n void (*destructor)(void*);\n\n crocksdb_encryption_key_manager_get_file_cb get_file;\n\n crocksdb_encryption_key_manager_new_file_cb new_file;\n\n crocksdb_encryption_key_manager_delete_file_cb delete_file;\n\n crocksdb_encryption_key_manager_link_file_cb link_file;\n\n crocksdb_encryption_key_manager_rename_file_cb rename_file;\n\n\n\n virtual ~crocksdb_encryption_key_manager_impl_t() { destructor(state); }\n\n\n\n Status GetFile(const std::string& fname,\n\n FileEncryptionInfo* file_info) override {\n\n crocksdb_file_encryption_info_t info;\n\n info.rep = file_info;\n\n const char* ret = get_file(state, fname.c_str(), &info);\n\n Status s;\n\n if (ret != nullptr) {\n\n s = Status::Corruption(std::string(ret));\n\n delete ret;\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 45, "score": 86480.64435385415 }, { "content": "fn concat_merge(_: &[u8], existing_val: Option<&[u8]>, operands: &mut MergeOperands) -> Vec<u8> {\n\n let mut result: Vec<u8> = Vec::with_capacity(operands.size_hint().0);\n\n match existing_val {\n\n Some(v) => {\n\n for e in v {\n\n result.push(*e)\n\n }\n\n }\n\n None => (),\n\n }\n\n for op in operands {\n\n for e in op {\n\n result.push(*e);\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 46, "score": 78950.08117169735 }, { "content": "fn test_ctr_encrypted_env_impl(encrypted_env: Arc<Env>) {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_cryption_env\");\n\n let path_str = path.path().to_str().unwrap();\n\n\n\n let mut opts = DBOptions::new();\n\n opts.create_if_missing(true);\n\n opts.set_env(encrypted_env.clone());\n\n let db = DB::open(opts, path_str).unwrap();\n\n\n\n let samples = vec![\n\n (b\"key1\".to_vec(), b\"value1\".to_vec()),\n\n (b\"key2\".to_vec(), b\"value2\".to_vec()),\n\n (b\"key3\".to_vec(), b\"value3\".to_vec()),\n\n (b\"key4\".to_vec(), b\"value4\".to_vec()),\n\n ];\n\n for &(ref k, ref v) in &samples {\n\n db.put(k, v).unwrap();\n\n\n\n // check value\n\n assert_eq!(v.as_slice(), &*db.get(k).unwrap().unwrap());\n", "file_path": "tests/cases/test_encryption.rs", "rank": 47, "score": 78598.96878713023 }, { "content": "// Generates a file with `range` and put it to the bottommost level.\n\nfn generate_file_bottom_level(db: &DB, handle: &CFHandle, range: ops::Range<u32>) {\n\n for i in range {\n\n let k = format!(\"key{}\", i);\n\n let v = format!(\"value{}\", i);\n\n db.put_cf(handle, k.as_bytes(), v.as_bytes()).unwrap();\n\n }\n\n db.flush_cf(handle, true).unwrap();\n\n\n\n let opts = db.get_options_cf(handle);\n\n let mut compact_opts = CompactOptions::new();\n\n compact_opts.set_change_level(true);\n\n compact_opts.set_target_level(opts.get_num_levels() as i32 - 1);\n\n compact_opts.set_bottommost_level_compaction(DBBottommostLevelCompaction::Skip);\n\n db.compact_range_cf_opt(handle, &compact_opts, None, None);\n\n}\n\n\n", "file_path": "tests/cases/test_titan.rs", "rank": 48, "score": 77512.45166336364 }, { "content": "/// Generates a file with `range` and put it to the bottommost level.\n\nfn generate_file_bottom_level(db: &DB, handle: &CFHandle, range: ops::Range<u32>) {\n\n for i in range {\n\n let k = format!(\"key{}\", i);\n\n let v = format!(\"value{}\", i);\n\n db.put_cf(handle, k.as_bytes(), v.as_bytes()).unwrap();\n\n }\n\n db.flush_cf(handle, true).unwrap();\n\n\n\n let opts = db.get_options_cf(handle);\n\n let mut compact_opts = CompactOptions::new();\n\n compact_opts.set_change_level(true);\n\n compact_opts.set_target_level(opts.get_num_levels() as i32 - 1);\n\n compact_opts.set_bottommost_level_compaction(DBBottommostLevelCompaction::Skip);\n\n db.compact_range_cf_opt(handle, &compact_opts, None, None);\n\n}\n\n\n", "file_path": "tests/cases/test_delete_files_in_range.rs", "rank": 49, "score": 74543.07660309947 }, { "content": "fn concat_merge(_: &[u8], existing_val: Option<&[u8]>, operands: &mut MergeOperands) -> Vec<u8> {\n\n let mut result: Vec<u8> = Vec::with_capacity(operands.size_hint().0);\n\n match existing_val {\n\n Some(v) => {\n\n for e in v {\n\n result.push(*e)\n\n }\n\n }\n\n None => (),\n\n }\n\n for op in operands {\n\n for e in op {\n\n result.push(*e);\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "tests/cases/test_ingest_external_file.rs", "rank": 50, "score": 72981.8873094832 }, { "content": "#[cfg(not(feature = \"valgrind\"))]\n\nfn main() {\n\n let path = \"/tmp/rust-rocksdb\";\n\n let db = DB::open_default(path).unwrap();\n\n assert!(db.put(b\"my key\", b\"my value\").is_ok());\n\n match db.get(b\"my key\") {\n\n Ok(Some(value)) => match value.to_utf8() {\n\n Some(v) => println!(\"retrieved utf8 value: {}\", v),\n\n None => println!(\"did not read valid utf-8 out of the db\"),\n\n },\n\n Ok(None) => panic!(\"value not present!\"),\n\n Err(e) => println!(\"error retrieving value: {}\", e),\n\n }\n\n\n\n assert!(db.delete(b\"my key\").is_ok());\n\n\n\n custom_merge();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 51, "score": 72210.12556819669 }, { "content": "struct crocksdb_env_t {\n\n Env* rep;\n\n bool is_default;\n\n EncryptionProvider* encryption_provider;\n\n BlockCipher* block_cipher;\n\n};\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 52, "score": 70368.89287742818 }, { "content": "struct crocksdb_keyversions_t {\n\n std::vector<KeyVersion> rep;\n\n};\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 53, "score": 70368.89287742818 }, { "content": "struct crocksdb_writestallinfo_t {\n\n WriteStallInfo rep;\n\n};\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 54, "score": 70368.89287742818 }, { "content": "struct ctitandb_readoptions_t {\n\n TitanReadOptions rep;\n\n};\n\n\n\nctitandb_readoptions_t* ctitandb_readoptions_create() {\n\n return new ctitandb_readoptions_t;\n\n}\n\n\n\nvoid ctitandb_readoptions_destroy(ctitandb_readoptions_t* opts) {\n\n delete opts;\n\n}\n\n\n\nunsigned char ctitandb_readoptions_key_only(ctitandb_readoptions_t* opts) {\n\n return opts->rep.key_only;\n\n}\n\n\n\nvoid ctitandb_readoptions_set_key_only(ctitandb_readoptions_t* opts,\n\n unsigned char v) {\n\n opts->rep.key_only = v;\n\n}\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 55, "score": 70368.89287742818 }, { "content": "struct crocksdb_compactoptions_t {\n\n CompactRangeOptions rep;\n\n};\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 56, "score": 70368.89287742818 }, { "content": "struct crocksdb_compactionfiltercontext_t {\n\n CompactionFilter::Context rep;\n\n};\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 57, "score": 70368.89287742818 }, { "content": "struct TableFilter {\n\n // After passing TableFilter to ReadOptions, ReadOptions will be copyed\n\n // several times, so we need use shared_ptr to control the ctx_ resource\n\n // destroy ctx_ only when the last ReadOptions out of its life time.\n\n TableFilter(void* ctx,\n\n int (*table_filter)(void*, const crocksdb_table_properties_t*),\n\n void (*destroy)(void*))\n\n : ctx_(std::make_shared<TableFilterCtx>(ctx, destroy)),\n\n table_filter_(table_filter) {}\n\n\n\n TableFilter(const TableFilter& f)\n\n : ctx_(f.ctx_),\n\n table_filter_(f.table_filter_) {}\n\n\n\n bool operator()(const TableProperties& prop) {\n\n return table_filter_(ctx_->ctx_, reinterpret_cast<const crocksdb_table_properties_t*>(&prop));\n\n }\n\n\n\n shared_ptr<TableFilterCtx> ctx_;\n\n int (*table_filter_)(void*, const crocksdb_table_properties_t*);\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 58, "score": 70368.89287742818 }, { "content": "struct ctitandb_options_t {\n\n TitanOptions rep;\n\n};\n\n\n\n// TODO: Simplify the API by merging db_options into tdb_options, and\n\n// column_family_options into titan_column_family_options, since the later\n\n// of the pairs already contain the former.\n\ncrocksdb_t* ctitandb_open_column_families(\n\n const char* name, const crocksdb_options_t* db_options,\n\n const ctitandb_options_t* tdb_options, int num_column_families,\n\n const char** column_family_names,\n\n const crocksdb_options_t** column_family_options,\n\n const ctitandb_options_t** titan_column_family_options,\n\n crocksdb_column_family_handle_t** column_family_handles, char** errptr) {\n\n std::vector<TitanCFDescriptor> column_families;\n\n for (int i = 0; i < num_column_families; i++) {\n\n *((ColumnFamilyOptions*)(&titan_column_family_options[i]->rep)) =\n\n column_family_options[i]->rep;\n\n column_families.push_back(\n\n TitanCFDescriptor(std::string(column_family_names[i]),\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 59, "score": 70368.89287742818 }, { "content": "struct crocksdb_writestallcondition_t {\n\n WriteStallCondition rep;\n\n};\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 60, "score": 70368.89287742818 }, { "content": "struct crocksdb_readoptions_t {\n\n ReadOptions rep;\n\n Slice upper_bound; // stack variable to set pointer to in ReadOptions\n\n Slice lower_bound;\n\n};\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 61, "score": 70368.89287742818 }, { "content": "struct crocksdb_compactionjobinfo_t {\n\n CompactionJobInfo rep;\n\n};\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 62, "score": 70368.89287742818 }, { "content": "struct crocksdb_externalfileingestioninfo_t {\n\n ExternalFileIngestionInfo rep;\n\n};\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 63, "score": 70368.89287742818 }, { "content": "struct crocksdb_flushjobinfo_t {\n\n FlushJobInfo rep;\n\n};\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 64, "score": 70368.89287742818 }, { "content": "fn custom_merge() {\n\n let path = \"_rust_rocksdb_mergetest\";\n\n let mut opts = DBOptions::new();\n\n opts.create_if_missing(true);\n\n let mut cf_opts = ColumnFamilyOptions::new();\n\n cf_opts.add_merge_operator(\"test operator\", concat_merge);\n\n {\n\n let db = DB::open_cf(opts, path, vec![(\"default\", cf_opts)]).unwrap();\n\n db.put(b\"k1\", b\"a\").unwrap();\n\n db.merge(b\"k1\", b\"b\").unwrap();\n\n db.merge(b\"k1\", b\"c\").unwrap();\n\n db.merge(b\"k1\", b\"d\").unwrap();\n\n db.merge(b\"k1\", b\"efg\").unwrap();\n\n db.merge(b\"k1\", b\"h\").unwrap();\n\n match db.get(b\"k1\") {\n\n Ok(Some(value)) => match value.to_utf8() {\n\n Some(v) => println!(\"retrieved utf8 value: {}\", v),\n\n None => println!(\"did not read valid utf-8 out of the db\"),\n\n },\n\n Ok(None) => panic!(\"value not present!\"),\n", "file_path": "src/main.rs", "rank": 65, "score": 70251.6958061557 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-env-changed=UPDATE_BIND\");\n\n\n\n let mut build = build_rocksdb();\n\n\n\n build.cpp(true).file(\"crocksdb/c.cc\");\n\n if !cfg!(target_os = \"windows\") {\n\n build.flag(\"-std=c++11\");\n\n build.flag(\"-fno-rtti\");\n\n }\n\n link_cpp(&mut build);\n\n build.warnings(false).compile(\"libcrocksdb.a\");\n\n}\n\n\n", "file_path": "librocksdb_sys/build.rs", "rank": 66, "score": 70251.6958061557 }, { "content": "// This is for the DB and write batches to share the same API\n\npub trait Writable {\n\n fn put(&self, key: &[u8], value: &[u8]) -> Result<(), String>;\n\n fn put_cf(&self, cf: &CFHandle, key: &[u8], value: &[u8]) -> Result<(), String>;\n\n fn merge(&self, key: &[u8], value: &[u8]) -> Result<(), String>;\n\n fn merge_cf(&self, cf: &CFHandle, key: &[u8], value: &[u8]) -> Result<(), String>;\n\n fn delete(&self, key: &[u8]) -> Result<(), String>;\n\n fn delete_cf(&self, cf: &CFHandle, key: &[u8]) -> Result<(), String>;\n\n fn single_delete(&self, key: &[u8]) -> Result<(), String>;\n\n fn single_delete_cf(&self, cf: &CFHandle, key: &[u8]) -> Result<(), String>;\n\n fn delete_range(&self, begin_key: &[u8], end_key: &[u8]) -> Result<(), String>;\n\n fn delete_range_cf(\n\n &self,\n\n cf: &CFHandle,\n\n begin_key: &[u8],\n\n end_key: &[u8],\n\n ) -> Result<(), String>;\n\n}\n\n\n\n/// A range of keys, `start_key` is included, but not `end_key`.\n\n///\n", "file_path": "src/rocksdb.rs", "rank": 67, "score": 69103.96300194357 }, { "content": "#[derive(Default)]\n\nstruct TitanCollector {\n\n num_blobs: u32,\n\n num_entries: u32,\n\n}\n\n\n\nimpl TitanCollector {\n\n fn add(&mut self, other: &TitanCollector) {\n\n self.num_blobs += other.num_blobs;\n\n self.num_entries += other.num_entries;\n\n }\n\n\n\n fn encode(&self) -> HashMap<Vec<u8>, Vec<u8>> {\n\n let mut props = HashMap::new();\n\n props.insert(vec![0], encode_u32(self.num_blobs));\n\n props.insert(vec![1], encode_u32(self.num_entries));\n\n props\n\n }\n\n\n\n fn decode(props: &UserCollectedProperties) -> TitanCollector {\n\n let mut c = TitanCollector::default();\n", "file_path": "tests/cases/test_titan.rs", "rank": 68, "score": 68717.48830799125 }, { "content": "#[repr(C)]\n\nstruct CompactionFilterProxy {\n\n name: CString,\n\n filter: Box<dyn CompactionFilter>,\n\n}\n\n\n\nextern \"C\" fn name(filter: *mut c_void) -> *const c_char {\n\n unsafe { (*(filter as *mut CompactionFilterProxy)).name.as_ptr() }\n\n}\n\n\n\nextern \"C\" fn destructor(filter: *mut c_void) {\n\n unsafe {\n\n Box::from_raw(filter as *mut CompactionFilterProxy);\n\n }\n\n}\n\n\n\nextern \"C\" fn filter(\n\n filter: *mut c_void,\n\n level: c_int,\n\n key: *const u8,\n\n key_len: size_t,\n", "file_path": "src/compaction_filter.rs", "rank": 69, "score": 68717.48830799125 }, { "content": "struct crocksdb_compaction_options_t {\n\n CompactionOptions rep;\n\n};\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 70, "score": 68717.48830799125 }, { "content": "struct TableFilterCtx {\n\n TableFilterCtx(void* ctx, void(*destroy)(void*))\n\n : ctx_(ctx), destroy_(destroy) {}\n\n ~TableFilterCtx() { destroy_(ctx_); }\n\n\n\n void* ctx_;\n\n void (*destroy_)(void*);\n\n};\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 71, "score": 68717.48830799125 }, { "content": "struct crocksdb_perf_context_t {\n\n PerfContext rep;\n\n};\n\n\n\ncrocksdb_perf_context_t* crocksdb_get_perf_context(void) {\n\n return reinterpret_cast<crocksdb_perf_context_t*>(rocksdb::get_perf_context());\n\n}\n\n\n\nvoid crocksdb_perf_context_reset(crocksdb_perf_context_t* ctx) {\n\n ctx->rep.Reset();\n\n}\n\n\n\nuint64_t crocksdb_perf_context_user_key_comparison_count(crocksdb_perf_context_t* ctx) {\n\n return ctx->rep.user_key_comparison_count;\n\n}\n\n\n\nuint64_t crocksdb_perf_context_block_cache_hit_count(crocksdb_perf_context_t* ctx) {\n\n return ctx->rep.block_cache_hit_count;\n\n}\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 72, "score": 68717.48830799125 }, { "content": "struct crocksdb_iostats_context_t {\n\n IOStatsContext rep;\n\n};\n\n\n\ncrocksdb_iostats_context_t* crocksdb_get_iostats_context(void) {\n\n return reinterpret_cast<crocksdb_iostats_context_t*>(rocksdb::get_iostats_context());\n\n}\n\n\n\nvoid crocksdb_iostats_context_reset(crocksdb_iostats_context_t* ctx) {\n\n ctx->rep.Reset();\n\n}\n\n\n\nuint64_t crocksdb_iostats_context_bytes_written(crocksdb_iostats_context_t* ctx) {\n\n return ctx->rep.bytes_written;\n\n}\n\n\n\nuint64_t crocksdb_iostats_context_bytes_read(crocksdb_iostats_context_t* ctx) {\n\n return ctx->rep.bytes_read;\n\n}\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 73, "score": 68717.48830799125 }, { "content": "struct crocksdb_table_properties_t {\n\n const TableProperties rep;\n\n};\n\n\n\nuint64_t crocksdb_table_properties_get_u64(\n\n const crocksdb_table_properties_t* props, crocksdb_table_property_t prop) {\n\n const TableProperties& rep = props->rep;\n\n switch (prop) {\n\n case kDataSize:\n\n return rep.data_size;\n\n case kIndexSize:\n\n return rep.index_size;\n\n case kFilterSize:\n\n return rep.filter_size;\n\n case kRawKeySize:\n\n return rep.raw_key_size;\n\n case kRawValueSize:\n\n return rep.raw_value_size;\n\n case kNumDataBlocks:\n\n return rep.num_data_blocks;\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 74, "score": 68717.48830799125 }, { "content": "#[derive(Default, Clone)]\n\nstruct TestDrop {\n\n called: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl Drop for TestDrop {\n\n fn drop(&mut self) {\n\n self.called.fetch_add(1, Ordering::SeqCst);\n\n }\n\n}\n\n\n", "file_path": "tests/cases/test_logger.rs", "rank": 75, "score": 68717.48830799125 }, { "content": "#[derive(Default, Clone)]\n\nstruct TestLogger {\n\n print: Arc<AtomicUsize>,\n\n drop: Option<TestDrop>,\n\n}\n\n\n\nimpl Logger for TestLogger {\n\n fn logv(&self, _log_level: InfoLogLevel, _format: *const c_char, _ap: VaList) {\n\n self.print.fetch_add(1, Ordering::SeqCst);\n\n }\n\n}\n\n\n", "file_path": "tests/cases/test_logger.rs", "rank": 76, "score": 68717.48830799125 }, { "content": "struct crocksdb_map_property_t {\n\n std::map<std::string, std::string> rep;\n\n};\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 77, "score": 68717.48830799125 }, { "content": "struct Filter {\n\n drop_called: Arc<AtomicBool>,\n\n filtered_kvs: Arc<RwLock<Vec<(Vec<u8>, Vec<u8>)>>>,\n\n}\n\n\n\nimpl CompactionFilter for Filter {\n\n fn filter(\n\n &mut self,\n\n _: usize,\n\n key: &[u8],\n\n value: &[u8],\n\n _: &mut Vec<u8>,\n\n _: &mut bool,\n\n ) -> bool {\n\n self.filtered_kvs\n\n .write()\n\n .unwrap()\n\n .push((key.to_vec(), value.to_vec()));\n\n true\n\n }\n\n}\n\n\n\nimpl Drop for Filter {\n\n fn drop(&mut self) {\n\n self.drop_called.store(true, Ordering::Relaxed);\n\n }\n\n}\n\n\n", "file_path": "tests/cases/test_compaction_filter.rs", "rank": 78, "score": 68717.48830799125 }, { "content": "struct ExternalSstFileModifier {\n\n ExternalSstFileModifier(Env *env, const EnvOptions& env_options,\n\n ColumnFamilyHandle* handle)\n\n : env_(env), env_options_(env_options), handle_(handle) {}\n\n\n\n Status Open(std::string file) {\n\n file_ = file;\n\n // Get External Sst File Size\n\n uint64_t file_size;\n\n auto status = env_->GetFileSize(file_, &file_size);\n\n if (!status.ok()) {\n\n return status;\n\n }\n\n\n\n // Open External Sst File\n\n std::unique_ptr<RandomAccessFile> sst_file;\n\n std::unique_ptr<RandomAccessFileReader> sst_file_reader;\n\n status = env_->NewRandomAccessFile(file_, &sst_file, env_options_);\n\n if (!status.ok()) {\n\n return status;\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 79, "score": 67198.24839978224 }, { "content": "#[derive(Default)]\n\nstruct TitanCollectorFactory {}\n\n\n\nimpl TablePropertiesCollectorFactory for TitanCollectorFactory {\n\n fn create_table_properties_collector(&mut self, _: u32) -> Box<dyn TablePropertiesCollector> {\n\n Box::new(TitanCollector::default())\n\n }\n\n}\n\n\n", "file_path": "tests/cases/test_titan.rs", "rank": 80, "score": 67198.24839978224 }, { "content": "#ifdef OPENSSL\n\nstruct crocksdb_file_encryption_info_t {\n\n FileEncryptionInfo* rep;\n\n};\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 81, "score": 67198.24839978224 }, { "content": "struct crocksdb_encryption_key_manager_t {\n\n std::shared_ptr<KeyManager> rep;\n\n};\n\n#endif\n\n\n\nstatic bool SaveError(char** errptr, const Status& s) {\n\n assert(errptr != nullptr);\n\n if (s.ok()) {\n\n return false;\n\n } else if (*errptr == nullptr) {\n\n *errptr = strdup(s.ToString().c_str());\n\n } else {\n\n // TODO(sanjay): Merge with existing error?\n\n // This is a bug if *errptr is not created by malloc()\n\n free(*errptr);\n\n *errptr = strdup(s.ToString().c_str());\n\n }\n\n return true;\n\n}\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 82, "score": 67198.24839978224 }, { "content": "struct ExampleCollector {\n\n num_keys: u32,\n\n num_puts: u32,\n\n num_merges: u32,\n\n num_deletes: u32,\n\n last_key: Vec<u8>,\n\n}\n\n\n\nimpl ExampleCollector {\n\n fn new() -> ExampleCollector {\n\n ExampleCollector {\n\n num_keys: 0,\n\n num_puts: 0,\n\n num_merges: 0,\n\n num_deletes: 0,\n\n last_key: Vec::new(),\n\n }\n\n }\n\n\n\n fn add(&mut self, other: &ExampleCollector) {\n", "file_path": "tests/cases/test_table_properties.rs", "rank": 83, "score": 67198.24839978224 }, { "content": "struct crocksdb_universal_compaction_options_t {\n\n rocksdb::CompactionOptionsUniversal *rep;\n\n};\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 84, "score": 67198.24839978224 }, { "content": "struct crocksdb_table_properties_collection_t {\n\n TablePropertiesCollection rep_;\n\n};\n\n\n\nsize_t crocksdb_table_properties_collection_len(\n\n const crocksdb_table_properties_collection_t* props) {\n\n return props->rep_.size();\n\n}\n\n\n\nvoid crocksdb_table_properties_collection_destroy(\n\n crocksdb_table_properties_collection_t* t) {\n\n delete t;\n\n}\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 85, "score": 67198.24839978224 }, { "content": "struct crocksdb_column_family_descriptor {\n\n ColumnFamilyDescriptor rep;\n\n};\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 86, "score": 67198.24839978224 }, { "content": "#[repr(C)]\n\nstruct CompactionFilterFactoryProxy {\n\n name: CString,\n\n factory: Box<dyn CompactionFilterFactory>,\n\n}\n\n\n\nmod factory {\n\n use super::{CompactionFilterContext, CompactionFilterFactoryProxy};\n\n use crocksdb_ffi::{DBCompactionFilter, DBCompactionFilterContext};\n\n use libc::{c_char, c_void};\n\n\n\n pub(super) extern \"C\" fn name(factory: *mut c_void) -> *const c_char {\n\n unsafe {\n\n let proxy = &*(factory as *mut CompactionFilterFactoryProxy);\n\n proxy.name.as_ptr()\n\n }\n\n }\n\n\n\n pub(super) extern \"C\" fn destructor(factory: *mut c_void) {\n\n unsafe {\n\n Box::from_raw(factory as *mut CompactionFilterFactoryProxy);\n", "file_path": "src/compaction_filter.rs", "rank": 87, "score": 67198.24839978224 }, { "content": "struct crocksdb_user_collected_properties_t {\n\n UserCollectedProperties rep;\n\n};\n\n\n\nvoid crocksdb_user_collected_properties_add(\n\n crocksdb_user_collected_properties_t* props,\n\n const char* k, size_t klen,\n\n const char* v, size_t vlen) {\n\n props->rep.emplace(\n\n std::make_pair(std::string(k, klen), std::string(v, vlen)));\n\n}\n\n\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 88, "score": 67198.24839978224 }, { "content": "struct crocksdb_lru_cache_options_t {\n\n LRUCacheOptions rep;\n\n};\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 89, "score": 67198.24839978224 }, { "content": "struct crocksdb_level_meta_data_t {\n\n LevelMetaData rep;\n\n};\n", "file_path": "librocksdb_sys/crocksdb/c.cc", "rank": 90, "score": 67198.24839978224 }, { "content": "#[derive(Default, Clone)]\n\nstruct EventCounter {\n\n flush: Arc<AtomicUsize>,\n\n compaction: Arc<AtomicUsize>,\n\n ingestion: Arc<AtomicUsize>,\n\n drop_count: Arc<AtomicUsize>,\n\n input_records: Arc<AtomicUsize>,\n\n output_records: Arc<AtomicUsize>,\n\n input_bytes: Arc<AtomicUsize>,\n\n output_bytes: Arc<AtomicUsize>,\n\n manual_compaction: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl Drop for EventCounter {\n\n fn drop(&mut self) {\n\n self.drop_count.fetch_add(1, Ordering::SeqCst);\n\n }\n\n}\n\n\n\nimpl EventListener for EventCounter {\n\n fn on_flush_completed(&self, info: &FlushJobInfo) {\n", "file_path": "tests/cases/test_event_listener.rs", "rank": 91, "score": 67198.24839978224 }, { "content": "struct ExampleFactory {}\n\n\n\nimpl ExampleFactory {\n\n fn new() -> ExampleFactory {\n\n ExampleFactory {}\n\n }\n\n}\n\n\n\nimpl TablePropertiesCollectorFactory for ExampleFactory {\n\n fn create_table_properties_collector(&mut self, _: u32) -> Box<dyn TablePropertiesCollector> {\n\n Box::new(ExampleCollector::new())\n\n }\n\n}\n\n\n", "file_path": "tests/cases/test_table_properties.rs", "rank": 92, "score": 67198.24839978224 }, { "content": "// Make sure all compression types are supported.\n\nfn test_compression() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_test_metadata\");\n\n let compression_types = [\n\n DBCompressionType::Snappy,\n\n DBCompressionType::Zlib,\n\n DBCompressionType::Bz2,\n\n DBCompressionType::Lz4,\n\n DBCompressionType::Lz4hc,\n\n DBCompressionType::Zstd,\n\n ];\n\n for compression_type in compression_types.iter() {\n\n let mut opts = DBOptions::new();\n\n opts.create_if_missing(true);\n\n let mut cf_opts = ColumnFamilyOptions::new();\n\n cf_opts.compression(*compression_type);\n\n // DB open will fail if compression type is not supported.\n\n DB::open_cf(\n\n opts,\n\n path.path().to_str().unwrap(),\n\n vec![(\"default\", cf_opts)],\n\n )\n\n .unwrap();\n\n }\n\n}\n", "file_path": "tests/cases/test_compression.rs", "rank": 93, "score": 66829.95492028927 }, { "content": "// Determine if need to update bindings. Supported platforms do not\n\n// need to be updated by default unless the UPDATE_BIND is specified.\n\n// Other platforms use bindgen to generate the bindings every time.\n\nfn config_binding_path() {\n\n let file_path: PathBuf;\n\n\n\n let target = env::var(\"TARGET\").unwrap_or_else(|_| \"\".to_owned());\n\n match target.as_str() {\n\n \"x86_64-unknown-linux-gnu\" | \"aarch64-unknown-linux-gnu\" => {\n\n file_path = PathBuf::from(env::var(\"CARGO_MANIFEST_DIR\").unwrap())\n\n .join(\"bindings\")\n\n .join(format!(\"{}-bindings.rs\", target));\n\n if env::var(\"UPDATE_BIND\")\n\n .map(|s| s.as_str() == \"1\")\n\n .unwrap_or(false)\n\n {\n\n bindgen_rocksdb(&file_path);\n\n }\n\n }\n\n _ => {\n\n file_path = PathBuf::from(env::var(\"OUT_DIR\").unwrap()).join(\"rocksdb-bindings.rs\");\n\n bindgen_rocksdb(&file_path);\n\n }\n\n };\n\n println!(\n\n \"cargo:rustc-env=BINDING_PATH={}\",\n\n file_path.to_str().unwrap()\n\n );\n\n}\n\n\n", "file_path": "librocksdb_sys/build.rs", "rank": 94, "score": 66829.1338400598 }, { "content": "fn main() {\n\n // RocksDB cmake script expect libz.a being under ${DEP_Z_ROOT}/lib, but libz-sys crate put it\n\n // under ${DEP_Z_ROOT}/build. Append the path to CMAKE_PREFIX_PATH to get around it.\n\n env::set_var(\"CMAKE_PREFIX_PATH\", {\n\n let zlib_path = format!(\"{}/build\", env::var(\"DEP_Z_ROOT\").unwrap());\n\n if let Ok(prefix_path) = env::var(\"CMAKE_PREFIX_PATH\") {\n\n format!(\"{};{}\", prefix_path, zlib_path)\n\n } else {\n\n zlib_path\n\n }\n\n });\n\n let cur_dir = std::env::current_dir().unwrap();\n\n let mut cfg = cmake::Config::new(\"titan\");\n\n if cfg!(feature = \"portable\") {\n\n cfg.define(\"PORTABLE\", \"ON\");\n\n }\n\n if cfg!(feature = \"sse\") {\n\n cfg.define(\"FORCE_SSE42\", \"ON\");\n\n }\n\n let dst = cfg\n", "file_path": "librocksdb_sys/libtitan_sys/build.rs", "rank": 95, "score": 66824.64560859461 }, { "content": "#[test]\n\nfn test_titandb() {\n\n let max_value_size = 10;\n\n\n\n let path = tempdir_with_prefix(\"test_titandb\");\n\n let tdb_path = path.path().join(\"titandb\");\n\n let mut tdb_opts = TitanDBOptions::new();\n\n tdb_opts.set_dirname(tdb_path.to_str().unwrap());\n\n tdb_opts.set_min_blob_size(max_value_size / 2 + 1);\n\n tdb_opts.set_blob_file_compression(DBCompressionType::No);\n\n tdb_opts.set_disable_background_gc(true);\n\n tdb_opts.set_purge_obsolete_files_period(10);\n\n tdb_opts.set_level_merge(false);\n\n tdb_opts.set_range_merge(false);\n\n tdb_opts.set_max_sorted_runs(20);\n\n\n\n let mut opts = DBOptions::new();\n\n opts.create_if_missing(true);\n\n opts.set_titandb_options(&tdb_opts);\n\n let mut cf_opts = ColumnFamilyOptions::new();\n\n let f = TitanCollectorFactory::default();\n", "file_path": "tests/cases/test_titan.rs", "rank": 96, "score": 66824.64560859461 }, { "content": "#[test]\n\nfn test_metadata() {\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_test_metadata\");\n\n let mut opts = DBOptions::new();\n\n opts.create_if_missing(true);\n\n let mut cf_opts = ColumnFamilyOptions::new();\n\n cf_opts.set_disable_auto_compactions(true);\n\n let db = DB::open_cf(\n\n opts,\n\n path.path().to_str().unwrap(),\n\n vec![(\"default\", cf_opts)],\n\n )\n\n .unwrap();\n\n let cf_handle = db.cf_handle(\"default\").unwrap();\n\n\n\n let num_files = 5;\n\n for i in 0..num_files {\n\n db.put(&[i], &[i]).unwrap();\n\n db.flush(true).unwrap();\n\n }\n\n\n", "file_path": "tests/cases/test_metadata.rs", "rank": 97, "score": 66824.64560859461 }, { "content": "#[test]\n\nfn test_logger() {\n\n let drop_called = Arc::new(AtomicUsize::new(0));\n\n let path = tempdir_with_prefix(\"_rust_rocksdb_test_create_info_rust_log_opt\");\n\n let mut opts = DBOptions::new();\n\n let logger = TestLogger {\n\n drop: Some(TestDrop {\n\n called: drop_called.clone(),\n\n }),\n\n print: Default::default(),\n\n };\n\n opts.set_info_log(logger.clone());\n\n opts.create_if_missing(true);\n\n opts.set_info_log_level(InfoLogLevel::Debug);\n\n let db = DB::open(opts.clone(), path.path().to_str().unwrap()).unwrap();\n\n thread::sleep(Duration::from_secs(2));\n\n assert_ne!(logger.print.load(Ordering::SeqCst), 0);\n\n drop(db);\n\n drop(opts);\n\n assert_eq!(0, drop_called.load(Ordering::SeqCst));\n\n drop(logger);\n\n assert_eq!(1, drop_called.load(Ordering::SeqCst));\n\n}\n", "file_path": "tests/cases/test_logger.rs", "rank": 98, "score": 66824.64560859461 }, { "content": "fn split_descriptors<'a>(\n\n list: Vec<ColumnFamilyDescriptor<'a>>,\n\n) -> (Vec<&'a str>, Vec<ColumnFamilyOptions>) {\n\n let mut v1 = Vec::with_capacity(list.len());\n\n let mut v2 = Vec::with_capacity(list.len());\n\n for d in list {\n\n v1.push(d.name);\n\n v2.push(d.options);\n\n }\n\n (v1, v2)\n\n}\n\n\n", "file_path": "src/rocksdb.rs", "rank": 99, "score": 66298.93664143763 } ]
Rust
src/mesh/pointcloud.rs
elrnv/meshx
8119edab6eb744bc2a7d21577a472de8927e81e5
use crate::attrib::*; use crate::mesh::topology::*; use crate::mesh::{VertexMesh, VertexPositions}; use crate::Real; #[derive(Clone, Debug, PartialEq, Attrib, Intrinsic)] pub struct PointCloud<T: Real> { #[intrinsic(VertexPositions)] pub vertex_positions: IntrinsicAttribute<[T; 3], VertexIndex>, pub vertex_attributes: AttribDict<VertexIndex>, } impl<T: Real> PointCloud<T> { #[inline] pub fn new(verts: Vec<[T; 3]>) -> PointCloud<T> { PointCloud { vertex_positions: IntrinsicAttribute::from_vec(verts), vertex_attributes: AttribDict::new(), } } } impl<T: Real> Default for PointCloud<T> { fn default() -> Self { PointCloud::new(vec![]) } } impl<T: Real> NumVertices for PointCloud<T> { #[inline] fn num_vertices(&self) -> usize { self.vertex_positions.len() } } impl<T: Real, M: VertexMesh<T>> From<&M> for PointCloud<T> { fn from(mesh: &M) -> PointCloud<T> { let vertex_attributes = mesh.attrib_dict::<VertexIndex>().clone(); PointCloud { vertex_positions: IntrinsicAttribute::from_slice(mesh.vertex_positions()), vertex_attributes, } } } impl<T: Real> From<super::PolyMesh<T>> for PointCloud<T> { fn from(polymesh: super::PolyMesh<T>) -> PointCloud<T> { let super::PolyMesh { vertex_positions, vertex_attributes, .. } = polymesh; PointCloud { vertex_positions, vertex_attributes, } } } impl<T: Real> From<super::TriMesh<T>> for PointCloud<T> { fn from(mesh: super::TriMesh<T>) -> PointCloud<T> { let super::TriMesh { vertex_positions, vertex_attributes, .. } = mesh; PointCloud { vertex_positions, vertex_attributes, } } } impl<T: Real> From<super::QuadMesh<T>> for PointCloud<T> { fn from(mesh: super::QuadMesh<T>) -> PointCloud<T> { let super::QuadMesh { vertex_positions, vertex_attributes, .. } = mesh; PointCloud { vertex_positions, vertex_attributes, } } } impl<T: Real> From<super::TetMesh<T>> for PointCloud<T> { fn from(mesh: super::TetMesh<T>) -> PointCloud<T> { let super::TetMesh { vertex_positions, vertex_attributes, .. } = mesh; PointCloud { vertex_positions, vertex_attributes, } } } #[cfg(test)] mod tests { use super::*; #[test] fn pointcloud_test() { let pts = vec![ [0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [1.0, 1.0, 0.0], ]; let mut ptcloud = PointCloud::new(pts.clone()); assert_eq!(ptcloud.num_vertices(), 4); for (pt1, pt2) in ptcloud.vertex_position_iter().zip(pts.iter()) { assert_eq!(*pt1, *pt2); } for (pt1, pt2) in ptcloud.vertex_position_iter_mut().zip(pts.iter()) { assert_eq!(*pt1, *pt2); } for (pt1, pt2) in ptcloud.vertex_positions().iter().zip(pts.iter()) { assert_eq!(*pt1, *pt2); } for (pt1, pt2) in ptcloud.vertex_positions_mut().iter().zip(pts.iter()) { assert_eq!(*pt1, *pt2); } } #[test] fn convert_test() { use crate::mesh::{PolyMesh, QuadMesh, TetMesh, TriMesh}; let pts = vec![ [0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [1.0, 1.0, 0.0], ]; let ptcloud = PointCloud::new(pts.clone()); let polymesh = PolyMesh::new(pts.clone(), &vec![]); let trimesh = TriMesh::new(pts.clone(), vec![]); let quadmesh = QuadMesh::new(pts.clone(), vec![]); let tetmesh = TetMesh::new(pts.clone(), vec![]); assert_eq!(PointCloud::from(&polymesh), ptcloud); assert_eq!(PointCloud::from(&trimesh), ptcloud); assert_eq!(PointCloud::from(&quadmesh), ptcloud); assert_eq!(PointCloud::from(&tetmesh), ptcloud); assert_eq!(PointCloud::from(polymesh), ptcloud); assert_eq!(PointCloud::from(trimesh), ptcloud); assert_eq!(PointCloud::from(quadmesh), ptcloud); assert_eq!(PointCloud::from(tetmesh), ptcloud); } }
use crate::attrib::*; use crate::mesh::topology::*; use crate::mesh::{VertexMesh, VertexPositions}; use crate::Real; #[derive(Clone, Debug, PartialEq, Attrib, Intrinsic)] pub struct PointCloud<T: Real> { #[intrinsic(VertexPositions)] pub vertex_positions: IntrinsicAttribute<[T; 3], VertexIndex>, pub vertex_attributes: AttribDict<VertexIndex>, } impl<T: Real> PointCloud<T> { #[inline] pub fn new(verts: Vec<[T; 3]>) -> PointCloud<T> { PointCloud { vertex_positions: IntrinsicAttribute::from_vec(verts), vertex_attributes: AttribDict::new(), } } } impl<T: Real> Default for PointCloud<T> { fn default() -> Self { PointCloud::new(vec![]) } } impl<T: Real> NumVertices for PointCloud<T> { #[inline] fn num_vertices(&self) -> usize { self.vertex_positions.len() } } impl<T: Real, M: VertexMesh<T>> From<&M> for PointCloud<T> { fn from(mesh: &M) -> PointCloud<T> { let vertex_attributes = mesh.attrib_dict::<VertexIndex>().clone(); PointCloud { vertex_positions: IntrinsicAttribute::from_slice(mesh.vertex_positions()), vertex_attributes, } } } impl<T: Real> From<super::PolyMesh<T>> for PointCloud<T> { fn from(polymesh: super::PolyMesh<T>) -> PointCloud<T> { let super::PolyMesh { vertex_positions, vertex_attributes, .. } = polymesh; PointCloud { vertex_positions, vertex_attributes, } } } impl<T: Real> From<super::TriMesh<T>> for PointCloud<T> { fn from(mesh: super::TriMesh<T>) -> PointCloud<T> { let super::TriMesh { vertex_positions, vertex_attributes, .. } = mesh; PointCloud { vertex_positions, vertex_attributes, } } } impl<T: Real> From<super::QuadMesh<T>> for PointCloud<T> { fn from(mesh: super::QuadMesh<T>) -> PointCloud<T> { let super::QuadMesh { vertex_positions, vertex_attributes, .. } = mesh; PointCloud { vertex_positions, vertex_attributes, } } } impl<T: Real> From<super::TetMesh<T>> for PointCloud<T> { fn from(mesh: super::TetMesh<T>) -> PointCloud<T> { let super::TetMesh { vertex_positions, vertex_attributes, .. } = mesh; PointCloud { vertex_positions, vertex_attributes, } } } #[cfg(test)] mod tests { use super::*; #[test] fn pointcloud_test() { let pts = vec![ [0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [1.0, 1.0, 0.0], ]; let mut ptcloud = PointCloud::new(pts.clone()); assert_eq!(ptcloud.num_vertices(), 4); for (pt1, pt2) in ptcloud.vertex_positio
#[test] fn convert_test() { use crate::mesh::{PolyMesh, QuadMesh, TetMesh, TriMesh}; let pts = vec![ [0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [1.0, 1.0, 0.0], ]; let ptcloud = PointCloud::new(pts.clone()); let polymesh = PolyMesh::new(pts.clone(), &vec![]); let trimesh = TriMesh::new(pts.clone(), vec![]); let quadmesh = QuadMesh::new(pts.clone(), vec![]); let tetmesh = TetMesh::new(pts.clone(), vec![]); assert_eq!(PointCloud::from(&polymesh), ptcloud); assert_eq!(PointCloud::from(&trimesh), ptcloud); assert_eq!(PointCloud::from(&quadmesh), ptcloud); assert_eq!(PointCloud::from(&tetmesh), ptcloud); assert_eq!(PointCloud::from(polymesh), ptcloud); assert_eq!(PointCloud::from(trimesh), ptcloud); assert_eq!(PointCloud::from(quadmesh), ptcloud); assert_eq!(PointCloud::from(tetmesh), ptcloud); } }
n_iter().zip(pts.iter()) { assert_eq!(*pt1, *pt2); } for (pt1, pt2) in ptcloud.vertex_position_iter_mut().zip(pts.iter()) { assert_eq!(*pt1, *pt2); } for (pt1, pt2) in ptcloud.vertex_positions().iter().zip(pts.iter()) { assert_eq!(*pt1, *pt2); } for (pt1, pt2) in ptcloud.vertex_positions_mut().iter().zip(pts.iter()) { assert_eq!(*pt1, *pt2); } }
function_block-function_prefixed
[ { "content": "pub fn convert_polymesh_to_obj_format<T: Real>(mesh: &PolyMesh<T>) -> Result<ObjData, Error> {\n\n let position: Vec<[f32; 3]> = mesh\n\n .vertex_position_iter()\n\n .cloned()\n\n .map(|[x, y, z]| {\n\n [\n\n x.to_f32().unwrap(),\n\n y.to_f32().unwrap(),\n\n z.to_f32().unwrap(),\n\n ]\n\n })\n\n .collect();\n\n\n\n // Find UVs\n\n let uvs: Option<(Vec<_>, TopologyType)> = if let Ok(uvs) =\n\n mesh.direct_attrib_clone_into_vec::<[f32; 2], VertexIndex>(UV_ATTRIB_NAME)\n\n {\n\n Some((uvs, TopologyType::Vertex))\n\n } else if let Ok(uvs) = mesh.attrib_iter::<[f32; 3], VertexIndex>(UV_ATTRIB_NAME) {\n\n Some((uvs.map(|&[u, v, _]| [u, v]).collect(), TopologyType::Vertex))\n", "file_path": "src/io/obj.rs", "rank": 0, "score": 208231.3483323838 }, { "content": "pub fn save_pointcloud_impl<T: Real>(ptcloud: &PointCloud<T>, file: &Path) -> Result<(), Error> {\n\n match file.extension().and_then(|ext| ext.to_str()) {\n\n Some(\"vtk\") | Some(\"vtu\") | Some(\"vtp\") | Some(\"pvtu\") | Some(\"pvtp\") => {\n\n let vtk =\n\n vtk::convert_pointcloud_to_vtk_format(ptcloud, vtk::VTKPolyExportStyle::PolyData)?;\n\n vtk.export_be(file)?;\n\n Ok(())\n\n }\n\n Some(\"obj\") => {\n\n let obj = obj::convert_pointcloud_to_obj_format(ptcloud)?;\n\n obj.save(file)?;\n\n Ok(())\n\n }\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 1, "score": 202505.6711936099 }, { "content": "pub fn convert_pointcloud_to_obj_format<T: Real>(mesh: &PointCloud<T>) -> Result<ObjData, Error> {\n\n let position: Vec<[f32; 3]> = mesh\n\n .vertex_position_iter()\n\n .cloned()\n\n .map(|[x, y, z]| {\n\n [\n\n x.to_f32().unwrap(),\n\n y.to_f32().unwrap(),\n\n z.to_f32().unwrap(),\n\n ]\n\n })\n\n .collect();\n\n\n\n // Find UVs\n\n let uvs: Vec<_> = if let Ok(uvs) =\n\n mesh.direct_attrib_clone_into_vec::<[f32; 2], VertexIndex>(UV_ATTRIB_NAME)\n\n {\n\n uvs\n\n } else if let Ok(uvs) = mesh.attrib_iter::<[f32; 3], VertexIndex>(UV_ATTRIB_NAME) {\n\n uvs.map(|&[u, v, _]| [u, v]).collect()\n", "file_path": "src/io/obj.rs", "rank": 2, "score": 197506.86591522105 }, { "content": "pub fn convert_pointcloud_to_vtk_format<T: Real>(\n\n ptcloud: &PointCloud<T>,\n\n style: VTKPolyExportStyle,\n\n) -> Result<model::Vtk, Error> {\n\n let num_verts = ptcloud.num_vertices() as u32;\n\n let points: Vec<T> = ptcloud\n\n .vertex_positions()\n\n .iter()\n\n .flat_map(|x| x.iter().cloned())\n\n .collect();\n\n\n\n let point_attribs = ptcloud\n\n .attrib_dict::<VertexIndex>()\n\n .iter()\n\n .filter_map(|(name, attrib)| mesh_to_vtk_named_attrib(name, attrib))\n\n .collect();\n\n\n\n Ok(model::Vtk {\n\n version: model::Version::new((0, 1)),\n\n title: String::from(\"Point Cloud\"),\n", "file_path": "src/io/vtk.rs", "rank": 3, "score": 182152.17826095957 }, { "content": "pub fn convert_polymesh_to_vtk_format<T: Real>(\n\n mesh: &PolyMesh<T>,\n\n style: VTKPolyExportStyle,\n\n) -> Result<model::Vtk, Error> {\n\n let points: Vec<T> = mesh\n\n .vertex_positions()\n\n .iter()\n\n .flat_map(|x| x.iter().cloned())\n\n .collect();\n\n let mut vertices = Vec::new();\n\n for face in mesh.face_iter() {\n\n vertices.push(face.len() as u32);\n\n for &vtx in face.iter() {\n\n vertices.push(vtx as u32);\n\n }\n\n }\n\n\n\n let point_attribs = mesh\n\n .attrib_dict::<VertexIndex>()\n\n .iter()\n", "file_path": "src/io/vtk.rs", "rank": 4, "score": 182091.81412495932 }, { "content": "pub fn convert_mesh_to_vtk_format<T: Real>(mesh: &Mesh<T>) -> Result<model::Vtk, Error> {\n\n let points: Vec<T> = mesh\n\n .vertex_positions()\n\n .iter()\n\n .flat_map(|x| x.iter().cloned())\n\n .collect();\n\n let mut vertices = Vec::new();\n\n for cell in mesh.cell_iter() {\n\n vertices.push(cell.len() as u32);\n\n for &vtx in cell.iter() {\n\n vertices.push(vtx as u32);\n\n }\n\n }\n\n\n\n let cell_types: Vec<_> = mesh\n\n .cell_type_iter()\n\n .map(|cell_type| match cell_type {\n\n CellType::Tetrahedron => model::CellType::Tetra,\n\n CellType::Triangle => model::CellType::Triangle,\n\n })\n", "file_path": "src/io/vtk.rs", "rank": 5, "score": 181114.45285081136 }, { "content": "/// Save a mesh to a file in ASCII format.\n\npub fn save_mesh_ascii<T: Real>(mesh: &Mesh<T>, file: impl AsRef<Path>) -> Result<(), Error> {\n\n save_mesh_ascii_impl(mesh, file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 6, "score": 176078.54575138132 }, { "content": "/// Load a polygonal mesh from a given file.\n\npub fn load_polymesh<T: Real, P: AsRef<Path>>(file: P) -> Result<PolyMesh<T>, Error> {\n\n load_polymesh_impl(file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 7, "score": 173888.6727149459 }, { "content": "/// Save a mesh to a file.\n\npub fn save_mesh<T: Real, P: AsRef<Path>>(mesh: &Mesh<T>, file: P) -> Result<(), Error> {\n\n save_mesh_impl(mesh, file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 8, "score": 173814.45952162892 }, { "content": "/// Save a point cloud to a file.\n\npub fn save_pointcloud<T: Real, P: AsRef<Path>>(\n\n ptcloud: &PointCloud<T>,\n\n file: P,\n\n) -> Result<(), Error> {\n\n save_pointcloud_impl(ptcloud, file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 9, "score": 172418.69074435407 }, { "content": "/// Save a polygonal mesh to a file.\n\npub fn save_polymesh<T: Real, P: AsRef<Path>>(\n\n polymesh: &PolyMesh<T>,\n\n file: P,\n\n) -> Result<(), Error> {\n\n save_polymesh_impl(polymesh, file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 10, "score": 172362.85665434066 }, { "content": "fn save_polymesh_impl<T: Real>(polymesh: &PolyMesh<T>, file: &Path) -> Result<(), Error> {\n\n match file.extension().and_then(|ext| ext.to_str()) {\n\n Some(\"vtk\") | Some(\"vtu\") | Some(\"vtp\") | Some(\"pvtu\") | Some(\"pvtp\") => {\n\n let vtk =\n\n vtk::convert_polymesh_to_vtk_format(polymesh, vtk::VTKPolyExportStyle::PolyData)?;\n\n vtk.export_be(file)?;\n\n Ok(())\n\n }\n\n Some(\"obj\") => {\n\n let obj = obj::convert_polymesh_to_obj_format(polymesh)?;\n\n obj.save(file)?;\n\n Ok(())\n\n }\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 11, "score": 169407.1022071184 }, { "content": "/// Save a point cloud to a file.\n\npub fn save_pointcloud_ascii<T: Real, P: AsRef<Path>>(\n\n ptcloud: &PointCloud<T>,\n\n file: P,\n\n) -> Result<(), Error> {\n\n save_pointcloud_ascii_impl(ptcloud, file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 12, "score": 168176.4673353981 }, { "content": "/// Save a polygonal mesh to a file.\n\npub fn save_polymesh_ascii<T: Real, P: AsRef<Path>>(\n\n polymesh: &PolyMesh<T>,\n\n file: P,\n\n) -> Result<(), Error> {\n\n save_polymesh_ascii_impl(polymesh, file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 13, "score": 168122.83622235802 }, { "content": "fn save_pointcloud_ascii_impl<T: Real>(ptcloud: &PointCloud<T>, file: &Path) -> Result<(), Error> {\n\n match file.extension().and_then(|ext| ext.to_str()) {\n\n Some(\"vtk\") => {\n\n let vtk =\n\n vtk::convert_pointcloud_to_vtk_format(ptcloud, vtk::VTKPolyExportStyle::PolyData)?;\n\n vtk.export_ascii(file)?;\n\n Ok(())\n\n }\n\n Some(\"obj\") => {\n\n let obj = obj::convert_pointcloud_to_obj_format(ptcloud)?;\n\n obj.save(file)?;\n\n Ok(())\n\n }\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n\n/*\n\n * Low-level IO operations\n\n */\n", "file_path": "src/io.rs", "rank": 14, "score": 167534.86536577783 }, { "content": "fn save_polymesh_ascii_impl<T: Real>(polymesh: &PolyMesh<T>, file: &Path) -> Result<(), Error> {\n\n match file.extension().and_then(|ext| ext.to_str()) {\n\n Some(\"vtk\") => {\n\n // NOTE: Currently writing to ascii is supported only for Legacy VTK files.\n\n let vtk =\n\n vtk::convert_polymesh_to_vtk_format(polymesh, vtk::VTKPolyExportStyle::PolyData)?;\n\n vtk.export_ascii(file)?;\n\n Ok(())\n\n }\n\n Some(\"obj\") => {\n\n let obj = obj::convert_polymesh_to_obj_format(polymesh)?;\n\n obj.save(file)?;\n\n Ok(())\n\n }\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n\n/*\n\n * IO calls for TriMeshes\n\n *\n\n * NOTE: These functions simply call into PolyMesh IO API, since it allows for\n\n * more flexibility (e.g. can load polymesh as a trimesh by triangulating) and\n\n * code reuse.\n\n */\n\n\n", "file_path": "src/io.rs", "rank": 15, "score": 166356.31339456 }, { "content": "/// Partition a given slice by unique values.\n\n///\n\n/// This version of `partition` is useful when `T` doesn't implement `Eq` and `Hash` or\n\n/// `PartitionHashEq` but has `PartialOrd`. For the majority of use cases it is better to use\n\n/// `partition`.\n\npub fn partition_slice<T: PartialOrd>(slice: &[T]) -> (Vec<usize>, usize) {\n\n use std::cmp::Ordering;\n\n\n\n // Sort attrib via an explicit permutation.\n\n // The permutation then acts as a map from sorted back to unsorted attribs.\n\n let mut permutation: Vec<_> = (0..slice.len()).collect();\n\n\n\n // SAFETY: permutation indices are guaranteed to be below slice.len();\n\n permutation.sort_by(|&i, &j| unsafe {\n\n slice\n\n .get_unchecked(i)\n\n .partial_cmp(slice.get_unchecked(j))\n\n .unwrap_or(Ordering::Less)\n\n });\n\n\n\n let mut permutation_iter = permutation\n\n .iter()\n\n .map(|&i| (i, unsafe { slice.get_unchecked(i) }))\n\n .peekable();\n\n let mut partition = vec![0; slice.len()];\n", "file_path": "src/algo/partition.rs", "rank": 16, "score": 165437.6743244772 }, { "content": "/// Load a tetrahedral mesh from a given file.\n\npub fn load_mesh<T: Real, P: AsRef<Path>>(file: P) -> Result<Mesh<T>, Error> {\n\n load_mesh_impl(file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 17, "score": 164458.4672998141 }, { "content": "/// Attribute interfaces for meshes. In order to derive this trait the mesh must have a field\n\n/// called `attributes` with type `AttribDict`.\n\npub trait Attrib\n\nwhere\n\n Self: ::std::marker::Sized,\n\n{\n\n /// Get the size of the attribute at the appropriate mesh location determined by `I`.\n\n fn attrib_size<I: AttribIndex<Self>>(&self) -> usize;\n\n\n\n /// Read only access to the attribute dictionary.\n\n fn attrib_dict<I: AttribIndex<Self>>(&self) -> &AttribDict<I>;\n\n\n\n /// Read and write access to the attribute dictionary.\n\n fn attrib_dict_mut<I: AttribIndex<Self>>(&mut self) -> &mut AttribDict<I>;\n\n\n\n /// Add an attribute at the appropriate location with a given default.\n\n fn add_attrib<'a, T, I: AttribIndex<Self>>(\n\n &mut self,\n\n name: &'a str,\n\n def: T,\n\n ) -> Result<&mut Attribute<I>, Error>\n\n where\n", "file_path": "src/mesh/attrib_dynamic.rs", "rank": 18, "score": 163190.2515829787 }, { "content": "/// Save a tetrahedral mesh to a file.\n\npub fn save_tetmesh_ascii<T: Real>(\n\n tetmesh: &TetMesh<T>,\n\n file: impl AsRef<Path>,\n\n) -> Result<(), Error> {\n\n save_tetmesh_ascii_impl(tetmesh, file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 19, "score": 160098.00299298618 }, { "content": "#[inline]\n\npub fn apply_permutation<A: Swap + ?Sized>(permutation: &[usize], array: &mut A) {\n\n let mut seen = vec![false; array.len()];\n\n apply_permutation_with_stride_and_seen(permutation, array, 1, &mut seen);\n\n}\n\n\n\n/// Apply a given `permutation` of indices to the given `array` of values in place.\n\n///\n\n/// This version of `apply_permutation` accepts a workspace `seen` vector of `bool`s.\n\n/// This is useful when the allocation of `seen` affects performance.\n", "file_path": "src/utils/slice.rs", "rank": 20, "score": 158682.23186235916 }, { "content": "fn load_polymesh_impl<T: Real>(file: &Path) -> Result<PolyMesh<T>, Error> {\n\n match file.extension().and_then(|ext| ext.to_str()) {\n\n Some(\"vtk\") | Some(\"vtu\") | Some(\"vtp\") | Some(\"pvtu\") | Some(\"pvtp\") => {\n\n let vtk = Vtk::import(file)?;\n\n vtk.extract_polymesh()\n\n }\n\n Some(\"obj\") => {\n\n let obj = obj::Obj::load_with_config(file, obj::LoadConfig { strict: false })?;\n\n obj.data.extract_polymesh()\n\n }\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 21, "score": 155116.83846658934 }, { "content": "fn save_mesh_impl<T: Real>(mesh: &Mesh<T>, file: &Path) -> Result<(), Error> {\n\n match file.extension().and_then(|ext| ext.to_str()) {\n\n Some(\"vtk\") | Some(\"vtu\") | Some(\"pvtu\") => {\n\n let vtk = vtk::convert_mesh_to_vtk_format(mesh)?;\n\n vtk.export_be(file)?;\n\n Ok(())\n\n }\n\n // NOTE: wavefront obj files don't support unstructured meshes.\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 22, "score": 152825.26449057384 }, { "content": "/// Load a point cloud from a given file.\n\npub fn load_pointcloud<T: Real, P: AsRef<Path>>(file: P) -> Result<PointCloud<T>, Error> {\n\n load_pointcloud_impl(file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 23, "score": 152300.76450248863 }, { "content": "fn save_mesh_ascii_impl<T: Real>(mesh: &Mesh<T>, file: &Path) -> Result<(), Error> {\n\n match file.extension() {\n\n Some(ext) if ext.to_str() == Some(\"vtk\") => {\n\n // NOTE: Currently writing to ascii is supported only for Legacy VTK files.\n\n let vtk = vtk::convert_mesh_to_vtk_format(mesh)?;\n\n vtk.export_ascii(file)?;\n\n Ok(())\n\n }\n\n // NOTE: wavefront obj files don't support unstructured meshes.\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n\n/*\n\n * IO calls for TetMeshes\n\n */\n\n\n", "file_path": "src/io.rs", "rank": 24, "score": 150448.24657642114 }, { "content": "fn flatten33<T: Clone>(vec: Vec<[[T; 3]; 3]>) -> Vec<T> {\n\n vec.iter()\n\n .flat_map(|x| x.iter().flat_map(|y| y.iter().cloned()))\n\n .collect()\n\n}\n\n\n", "file_path": "src/io/vtk.rs", "rank": 25, "score": 149986.7194017631 }, { "content": "/// Partition a given slice by unique values.\n\n///\n\n/// It may be more efficient to implement this function by hand, especially when the number of\n\n/// partitions is known ahead of time.\n\npub fn partition<'a, T: Hash + Eq + 'a>(iter: impl Iterator<Item = &'a T>) -> (Vec<usize>, usize) {\n\n let mut partition = Vec::new();\n\n let mut map: HashMap<&'a T, usize> = HashMap::default();\n\n\n\n let mut part_counter = 0;\n\n for val in iter {\n\n let part = map.entry(val).or_insert_with(|| {\n\n let part = part_counter;\n\n part_counter += 1;\n\n part\n\n });\n\n partition.push(*part);\n\n }\n\n (partition, part_counter)\n\n}\n\n\n", "file_path": "src/algo/partition.rs", "rank": 26, "score": 146509.53979749908 }, { "content": "pub trait Contains<RHS = Self> {\n\n /// Check if this object contains another.\n\n fn contains(&self, obj: RHS) -> bool;\n\n}\n\n\n", "file_path": "src/ops/mod.rs", "rank": 27, "score": 144743.29477520767 }, { "content": "pub trait Absorb<RHS = Self> {\n\n type Output;\n\n\n\n /// Absorb another object.\n\n /// For example if a = [-1, 2] and b = [3, 4] is are closed intervals,\n\n /// then a.absorb(b) == [-1 4].\n\n fn absorb(self, rhs: RHS) -> Self::Output;\n\n}\n\n\n", "file_path": "src/ops/mod.rs", "rank": 28, "score": 144743.29477520767 }, { "content": "/// Intersection trait, describes the intersection operation between two objects.\n\npub trait Intersect<RHS = Self> {\n\n type Output;\n\n\n\n /// Intersect on one object with another, producing the resulting intersection.\n\n /// For example if [-1, 2] and [0, 4] are two closed intervals, then their\n\n /// intersection is a closed interval [0, 2].\n\n /// Note that the intersection object can be of a different type\n\n fn intersect(self, rhs: RHS) -> Self::Output;\n\n\n\n /// Check if this object intersects another.\n\n fn intersects(self, rhs: RHS) -> bool;\n\n}\n\n\n", "file_path": "src/ops/mod.rs", "rank": 29, "score": 144743.29477520767 }, { "content": "fn flatten4<T: Clone>(vec: Vec<[T; 4]>) -> Vec<T> {\n\n vec.iter().flat_map(|x| x.iter().cloned()).collect()\n\n}\n", "file_path": "src/io/vtk.rs", "rank": 30, "score": 144265.97726880305 }, { "content": "fn flatten3<T: Clone>(vec: Vec<[T; 3]>) -> Vec<T> {\n\n vec.iter().flat_map(|x| x.iter().cloned()).collect()\n\n}\n", "file_path": "src/io/vtk.rs", "rank": 31, "score": 144186.6598261636 }, { "content": "fn load_mesh_impl<T: Real>(file: &Path) -> Result<Mesh<T>, Error> {\n\n match file.extension().and_then(|ext| ext.to_str()) {\n\n Some(\"vtk\") | Some(\"vtu\") | Some(\"pvtu\") => {\n\n let vtk = Vtk::import(file)?;\n\n vtk.extract_mesh()\n\n }\n\n #[cfg(feature = \"mshio\")]\n\n Some(\"msh\") => {\n\n let msh_bytes = std::fs::read(file)?;\n\n let msh = mshio::parse_msh_bytes(msh_bytes.as_slice()).map_err(msh::MshError::from)?;\n\n msh.extract_mesh()\n\n }\n\n // NOTE: wavefront obj files don't support unstructured meshes.\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 32, "score": 143312.27961022107 }, { "content": "/// Saves a triangle mesh to a file.\n\npub fn save_trimesh_ascii<T: Real, P: AsRef<Path>>(\n\n trimesh: &TriMesh<T>,\n\n file: P,\n\n) -> Result<(), Error> {\n\n save_polymesh_ascii_impl(&PolyMesh::from(trimesh.clone()), file.as_ref())\n\n}\n\n\n\n/*\n\n * IO calls for Point clouds\n\n */\n\n\n", "file_path": "src/io.rs", "rank": 33, "score": 140052.92925905553 }, { "content": "/// Attribute interfaces for meshes. In order to derive this trait the mesh must have a field\n\n/// called `attributes` with type `AttribDict`.\n\npub trait Attrib\n\nwhere\n\n Self: std::marker::Sized,\n\n{\n\n /// Get the size of the attribute at the appropriate mesh location determined by `I`.\n\n fn attrib_size<I: AttribIndex<Self>>(&self) -> usize;\n\n\n\n /// Read only access to the attribute dictionary.\n\n fn attrib_dict<I: AttribIndex<Self>>(&self) -> &AttribDict<I>;\n\n\n\n /// Read and write access to the attribute dictionary.\n\n fn attrib_dict_mut<I: AttribIndex<Self>>(&mut self) -> &mut AttribDict<I>;\n\n\n\n /// Read and write access to the attribute dictionary along with a cache for indirect attribute\n\n /// values.\n\n fn attrib_dict_and_cache_mut<I: AttribIndex<Self>>(\n\n &mut self,\n\n ) -> (&mut AttribDict<I>, Option<&mut AttribValueCache>);\n\n\n\n /// Insert an attribute at the appropriate location with a given default.\n", "file_path": "src/attrib.rs", "rank": 34, "score": 137124.61940255045 }, { "content": "pub fn convert_tetmesh_to_vtk_format<T: Real>(tetmesh: &TetMesh<T>) -> Result<model::Vtk, Error> {\n\n let points: Vec<T> = tetmesh\n\n .vertex_positions()\n\n .iter()\n\n .flat_map(|x| x.iter().cloned())\n\n .collect();\n\n let mut vertices = Vec::new();\n\n for cell in tetmesh.cell_iter() {\n\n vertices.push(cell.len() as u32);\n\n for &vtx in cell.iter() {\n\n vertices.push(vtx as u32);\n\n }\n\n }\n\n\n\n let point_attribs = tetmesh\n\n .attrib_dict::<VertexIndex>()\n\n .iter()\n\n .filter_map(|(name, attrib)| mesh_to_vtk_named_attrib(name, attrib))\n\n .collect();\n\n\n", "file_path": "src/io/vtk.rs", "rank": 35, "score": 134742.75441119424 }, { "content": "/// A trait for specific scene, object or mesh models to extract mesh data from.\n\n///\n\n/// All methods are optional and default implementations simply return an `UnsupportedDataFormat` error.\n\n/// This trait defines an API for converting file specific object models to `meshx` mesh formats.\n\npub trait MeshExtractor<T: crate::Real> {\n\n /// Constructs an unstructured Mesh from this VTK model.\n\n ///\n\n /// This function may clone the given model as necessary.\n\n fn extract_mesh(&self) -> Result<Mesh<T>, Error> {\n\n Err(Error::UnsupportedDataFormat)\n\n }\n\n /// Constructs a PolyMesh from this VTK model.\n\n ///\n\n /// This function may clone the given model as necessary.\n\n fn extract_polymesh(&self) -> Result<PolyMesh<T>, Error> {\n\n Err(Error::UnsupportedDataFormat)\n\n }\n\n /// Constructs a TetMesh from this VTK model.\n\n ///\n\n /// This function may clone the given model as necessary.\n\n fn extract_tetmesh(&self) -> Result<TetMesh<T>, Error> {\n\n Err(Error::UnsupportedDataFormat)\n\n }\n\n /// Constructs a PointCloud from this VTK model.\n", "file_path": "src/io.rs", "rank": 36, "score": 134305.59570098954 }, { "content": "/// Load a tetrahedral mesh from a given file.\n\npub fn load_tetmesh<T: Real, P: AsRef<Path>>(file: P) -> Result<TetMesh<T>, Error> {\n\n load_tetmesh_impl(file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 37, "score": 133808.85324559652 }, { "content": "/// Loads a triangle mesh from a given file.\n\npub fn load_trimesh<T: Real, P: AsRef<Path>>(file: P) -> Result<TriMesh<T>, Error> {\n\n load_polymesh_impl(file.as_ref()).map(TriMesh::from)\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 38, "score": 133808.85324559652 }, { "content": "fn load_pointcloud_impl<T: Real>(file: &Path) -> Result<PointCloud<T>, Error> {\n\n match file.extension().and_then(|ext| ext.to_str()) {\n\n Some(\"vtk\") | Some(\"vtu\") | Some(\"vtp\") | Some(\"pvtu\") | Some(\"pvtp\") => {\n\n let vtk = Vtk::import(file)?;\n\n vtk.extract_pointcloud()\n\n }\n\n Some(\"obj\") => {\n\n let obj = obj::Obj::load_with_config(file, obj::LoadConfig { strict: false })?;\n\n obj.data.extract_pointcloud()\n\n }\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 39, "score": 131403.530723195 }, { "content": "#[proc_macro_derive(Attrib, attributes(attributes))]\n\npub fn attrib(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let input: DeriveInput = syn::parse(input).unwrap();\n\n\n\n let gen = attrib::impl_attrib(&input);\n\n\n\n gen.into()\n\n}\n\n\n", "file_path": "meshx-derive/src/lib.rs", "rank": 40, "score": 130247.39698600268 }, { "content": "/// Saves a triangle mesh to a file.\n\npub fn save_trimesh<T: Real, P: AsRef<Path>>(trimesh: &TriMesh<T>, file: P) -> Result<(), Error> {\n\n save_polymesh_impl(&PolyMesh::from(trimesh.clone()), file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 41, "score": 129592.31635678388 }, { "content": "/// Save a tetrahedral mesh to a file.\n\npub fn save_tetmesh<T: Real, P: AsRef<Path>>(tetmesh: &TetMesh<T>, file: P) -> Result<(), Error> {\n\n save_tetmesh_impl(tetmesh, file.as_ref())\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 42, "score": 129592.3163567839 }, { "content": "/// VertexMesh is a marker trait to allow user code to be generic over vertex centric meshes with\n\n/// intrinsic vertex positions attributes.\n\npub trait VertexMesh<T>:\n\n Attrib + VertexAttrib + NumVertices + VertexPositions<Element = [T; 3]>\n\n{\n\n}\n\nimpl<M, T> VertexMesh<T> for M where\n\n M: Attrib + VertexAttrib + NumVertices + VertexPositions<Element = [T; 3]>\n\n{\n\n}\n", "file_path": "src/mesh/mod.rs", "rank": 43, "score": 128878.04366351705 }, { "content": " /// A basic value that can be stored as an attribute in a mesh type.\n\n pub trait AttributeValue: Clone + PartialEq + std::fmt::Debug + Send + Sync + 'static {}\n\n impl<T> AttributeValue for T where T: Clone + PartialEq + std::fmt::Debug + Send + Sync + 'static {}\n\n\n", "file_path": "src/attrib/attribute.rs", "rank": 44, "score": 126576.90675444534 }, { "content": "pub trait Real:\n\n math::ComplexField + num_traits::Float + ::std::fmt::Debug + std::iter::Sum + Pod\n\n{\n\n}\n\nimpl<T> Real for T where\n\n T: math::ComplexField + num_traits::Float + ::std::fmt::Debug + std::iter::Sum + Pod\n\n{\n\n}\n", "file_path": "src/lib.rs", "rank": 45, "score": 125206.25527128359 }, { "content": "/// This trait provides an interface for the implementer of `Attrib` to access attributes\n\n/// associated with a specific topology within a mesh.\n\npub trait AttribIndex<M>\n\nwhere\n\n Self: ::std::marker::Sized + Clone,\n\n{\n\n /// Get the size of the attribute at the appropriate mesh location determined by `I`.\n\n fn attrib_size(mesh: &M) -> usize;\n\n\n\n /// Read only access to the attribute dictionary.\n\n fn attrib_dict(mesh: &M) -> &AttribDict<Self>;\n\n\n\n /// Read and write access to the attribute dictionary.\n\n fn attrib_dict_mut(mesh: &mut M) -> &mut AttribDict<Self>;\n\n}\n\n\n\nmacro_rules! impl_attrib_index {\n\n ($topo_attrib:ident, $type:ty, $topo_num:ident) => {\n\n impl Attribute<$type> {\n\n /// Get `i`'th attribute value.\n\n #[inline]\n\n pub fn get<T: Any + Copy, I: Into<$type>>(&self, i: I) -> Result<T, Error> {\n", "file_path": "src/mesh/attrib_dynamic.rs", "rank": 46, "score": 124583.49445271643 }, { "content": "fn flatten2<T: Clone>(vec: Vec<[T; 2]>) -> Vec<T> {\n\n vec.iter().flat_map(|x| x.iter().cloned()).collect()\n\n}\n", "file_path": "src/io/vtk.rs", "rank": 47, "score": 124002.0455707182 }, { "content": "/// A utility function to index a slice using three indices, creating a new array of 3\n\n/// corresponding entries of the slice.\n\nfn tri_at<T: Copy>(slice: &[T], tri: &[usize; 3]) -> [T; 3] {\n\n [slice[tri[0]], slice[tri[1]], slice[tri[2]]]\n\n}\n\n\n\n/// Consider any permutation of the triangle to be equivalent to the original.\n\nimpl PartialEq for TetFace {\n\n fn eq(&self, other: &TetFace) -> bool {\n\n for p in Self::PERMUTATIONS.iter() {\n\n if tri_at(&other.tri, p) == self.tri {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n }\n\n}\n\n\n\nimpl PartialOrd for TetFace {\n\n fn partial_cmp(&self, other: &TetFace) -> Option<std::cmp::Ordering> {\n\n Some(self.cmp(other))\n", "file_path": "src/mesh/tetmesh/surface.rs", "rank": 48, "score": 123106.32300033298 }, { "content": "fn mesh_to_vtk_attrib_impl<I>(name: &str, attrib: &Attribute<I>) -> Option<model::Attribute> {\n\n // Try to match a scalar field.\n\n if let Ok(data) = try_interpret_attrib!(attrib, IOBuffer::from) {\n\n return Some(model::Attribute::scalars(name, 1).with_data(data));\n\n }\n\n\n\n // Try to match a vector field.\n\n if let Ok(data) = try_interpret_attrib!(attrib, 2, |x| IOBuffer::from(flatten2(x))) {\n\n return Some(model::Attribute::scalars(name, 2).with_data(data));\n\n }\n\n\n\n // Try to match a vector field.\n\n if let Ok(data) = try_interpret_attrib!(attrib, 3, |x| IOBuffer::from(flatten3(x))) {\n\n return Some(model::Attribute::vectors(name).with_data(data));\n\n }\n\n\n\n // Try to match a vector field.\n\n if let Ok(data) = try_interpret_attrib!(attrib, 4, |x| IOBuffer::from(flatten4(x))) {\n\n return Some(model::Attribute::scalars(name, 4).with_data(data));\n\n }\n", "file_path": "src/io/vtk.rs", "rank": 49, "score": 120331.27942559749 }, { "content": "fn mesh_to_vtk_named_attrib<I>(name: &str, attrib: &Attribute<I>) -> Option<model::Attribute> {\n\n // Try to match special attributes\n\n if name == UV_ATTRIB_NAME {\n\n let attrib = into_vtk_attrib_uv(name, attrib);\n\n if attrib.is_some() {\n\n return attrib;\n\n }\n\n } else if name == NORMAL_ATTRIB_NAME {\n\n let mut maybe_iobuf: Result<IOBuffer, _> = attrib\n\n .direct_clone_into_vec::<[f32; 3]>()\n\n .map(|y| flatten3(y).into());\n\n if maybe_iobuf.is_err() {\n\n // try with f64\n\n maybe_iobuf = attrib\n\n .direct_clone_into_vec::<[f64; 3]>()\n\n .map(|y| flatten3(y).into());\n\n }\n\n\n\n if let Ok(data) = maybe_iobuf {\n\n return Some(model::Attribute::normals(name).with_data(data));\n\n }\n\n }\n\n\n\n // Match with other vtk attributes.\n\n mesh_to_vtk_attrib_impl(name, attrib)\n\n}\n\n\n", "file_path": "src/io/vtk.rs", "rank": 50, "score": 120331.27942559749 }, { "content": "#[allow(clippy::cognitive_complexity)]\n\nfn vtk_to_mesh_attrib<M, I>(\n\n attribs: Vec<model::Attribute>,\n\n mesh: &mut M,\n\n orig_map: Option<&[usize]>,\n\n) -> Vec<model::Attribute>\n\nwhere\n\n M: Attrib,\n\n I: AttribIndex<M>,\n\n{\n\n // We populate another vector instead of using filter_map to allow for errors to propagate.\n\n let mut remainder = Vec::with_capacity(attribs.len());\n\n\n\n for attrib in attribs {\n\n match attrib {\n\n model::Attribute::DataArray(model::DataArray { name, elem, data }) => {\n\n let name = name.as_str();\n\n match elem {\n\n model::ElementType::Scalars { num_comp: dim, .. } | model::ElementType::TCoords(dim) => {\n\n match dim {\n\n // Note that only the first found attribute with the same name and location\n", "file_path": "src/io/vtk.rs", "rank": 51, "score": 118541.6003662517 }, { "content": "#[cfg(not(feature = \"vtkio\"))]\n\npub trait Real: crate::Real + std::str::FromStr {}\n\n#[cfg(not(feature = \"vtkio\"))]\n\nimpl<T> Real for T where T: crate::Real + std::str::FromStr {}\n\n\n\n// These names are chosen to be rather short to reduce the const of comparisons.\n\n// Although code that relies on this is not idiomatic, it can sometimes be simpler.\n\nconst UV_ATTRIB_NAME: &str = \"uv\";\n\nconst NORMAL_ATTRIB_NAME: &str = \"N\";\n\n\n", "file_path": "src/io.rs", "rank": 52, "score": 117452.81929298231 }, { "content": "/// Transfer attribute data from `attrib_dict` to a vtk FIELD attribute. This is useful for storing\n\n/// attributes for topologies that Vtk doesn't directly support like `FaceVertex` or `CellVertex`\n\n/// attributes which are important for passing through texture coordinates with seams.\n\nfn mesh_to_vtk_named_field_attribs<I>(\n\n field_data_name: &str,\n\n attrib_dict: &AttribDict<I>,\n\n) -> Option<model::Attribute> {\n\n let data_array: Vec<_> = attrib_dict\n\n .iter()\n\n .filter_map(|(name, attrib)| {\n\n // Try to match a scalar field.\n\n if let Ok(data) = try_interpret_attrib!(attrib, IOBuffer::from) {\n\n return Some(model::FieldArray::new(name, 1).with_data(data));\n\n }\n\n\n\n // Try to match a 2D vector field.\n\n if let Ok(data) = try_interpret_attrib!(attrib, 2, |x| IOBuffer::from(flatten2(x))) {\n\n return Some(model::FieldArray::new(name, 2).with_data(data));\n\n }\n\n\n\n // Try to match a 3D vector field.\n\n if let Ok(data) = try_interpret_attrib!(attrib, 3, |x| IOBuffer::from(flatten3(x))) {\n\n return Some(model::FieldArray::new(name, 3).with_data(data));\n", "file_path": "src/io/vtk.rs", "rank": 53, "score": 116129.0165556588 }, { "content": "#[allow(clippy::cognitive_complexity)]\n\nfn vtk_field_to_mesh_attrib<M>(\n\n attribs: Vec<model::Attribute>,\n\n mesh: &mut M,\n\n orig_map: Option<&[usize]>,\n\n) where\n\n M: Attrib + FaceVertex,\n\n FaceVertexIndex: AttribIndex<M>,\n\n{\n\n for attrib in attribs {\n\n if !special_field_attributes().contains(&attrib.name()) {\n\n continue;\n\n }\n\n if let model::Attribute::Field { data_array, .. } = attrib {\n\n for model::FieldArray { name, elem, data } in data_array {\n\n let name = name.as_str();\n\n match elem {\n\n // Note that only the first found attribute with the same name and location\n\n // will be inserted.\n\n 1 => match_buf!( &data, v => insert_array_attrib::<_, _, FaceVertexIndex>(v, name, mesh, orig_map) ),\n\n 2 => match_buf!( &data, v => insert_array_attrib_n::<_, _, FaceVertexIndex,U2>(v, name, mesh, orig_map) ),\n", "file_path": "src/io/vtk.rs", "rank": 54, "score": 116121.92868274217 }, { "content": "fn load_tetmesh_impl<T: Real>(file: &Path) -> Result<TetMesh<T>, Error> {\n\n match file.extension().and_then(|ext| ext.to_str()) {\n\n Some(\"vtk\") | Some(\"vtu\") | Some(\"pvtu\") => {\n\n let vtk = Vtk::import(file)?;\n\n vtk.extract_tetmesh()\n\n }\n\n // NOTE: wavefront obj files don't support tetrahedral meshes.\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 55, "score": 109707.93792813156 }, { "content": "#[cfg(feature = \"vtkio\")]\n\npub trait Real: vtkio::model::Scalar + std::str::FromStr + crate::Real {}\n\n#[cfg(feature = \"vtkio\")]\n\nimpl<T> Real for T where T: vtkio::model::Scalar + std::str::FromStr + crate::Real {}\n\n\n", "file_path": "src/io.rs", "rank": 56, "score": 106294.97746409292 }, { "content": "fn save_tetmesh_impl<T: Real>(tetmesh: &TetMesh<T>, file: &Path) -> Result<(), Error> {\n\n match file.extension().and_then(|ext| ext.to_str()) {\n\n Some(\"vtk\") | Some(\"vtu\") | Some(\"pvtu\") => {\n\n let vtk = vtk::convert_tetmesh_to_vtk_format(tetmesh)?;\n\n vtk.export_be(file)?;\n\n Ok(())\n\n }\n\n // NOTE: wavefront obj files don't support tetrahedral meshes.\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 57, "score": 105822.2617874176 }, { "content": "fn save_tetmesh_ascii_impl<T: Real>(tetmesh: &TetMesh<T>, file: &Path) -> Result<(), Error> {\n\n match file.extension() {\n\n Some(ext) if ext.to_str() == Some(\"vtk\") => {\n\n // NOTE: Currently writing to ascii is supported only for Legacy VTK files.\n\n let vtk = vtk::convert_tetmesh_to_vtk_format(tetmesh)?;\n\n vtk.export_ascii(file)?;\n\n Ok(())\n\n }\n\n // NOTE: wavefront obj files don't support tetrahedral meshes.\n\n _ => Err(Error::UnsupportedFileFormat),\n\n }\n\n}\n\n\n\n/*\n\n * IO calls for PolyMeshes\n\n */\n\n\n", "file_path": "src/io.rs", "rank": 58, "score": 103604.36073502932 }, { "content": "// Simple quantifiers\n\npub trait NumMeshes {\n\n fn num_meshes(&self) -> usize;\n\n}\n", "file_path": "src/mesh/topology.rs", "rank": 59, "score": 100090.61817697181 }, { "content": "#[inline]\n\npub fn apply_permutation_with_seen<A: Swap + ?Sized>(\n\n permutation: &[usize],\n\n array: &mut A,\n\n seen: &mut [bool],\n\n) {\n\n apply_permutation_with_stride_and_seen(permutation, array, 1, seen);\n\n}\n\n\n", "file_path": "src/utils/slice.rs", "rank": 60, "score": 99739.54705895747 }, { "content": "#[derive(Copy, Clone, PartialOrd, Ord, PartialEq, Eq, Hash, Debug)]\n\nstruct SortedTri {\n\n pub sorted_indices: [usize; 3],\n\n}\n\n\n\nimpl SortedTri {\n\n fn new([a, b, c]: [usize; 3]) -> Self {\n\n SortedTri {\n\n sorted_indices: {\n\n if a <= b {\n\n if b <= c {\n\n [a, b, c]\n\n } else if a <= c {\n\n [a, c, b]\n\n } else {\n\n [c, a, b]\n\n }\n\n } else if a <= c {\n\n [b, a, c]\n\n } else if b <= c {\n\n [b, c, a]\n", "file_path": "src/mesh/tetmesh/surface.rs", "rank": 61, "score": 98333.54805597725 }, { "content": "/// Transfer a `uv` attribute from this attribute to the `vtk` model.\n\nfn into_vtk_attrib_uv<I>(name: &str, attrib: &Attribute<I>) -> Option<model::Attribute> {\n\n // Try 2d texture coordinates\n\n let mut maybe_iobuf = attrib\n\n .direct_clone_into_vec::<[f32; 2]>()\n\n .map(|y| IOBuffer::from(flatten2(y)));\n\n if maybe_iobuf.is_err() {\n\n // try with f64\n\n maybe_iobuf = attrib\n\n .direct_clone_into_vec::<[f64; 2]>()\n\n .map(|y| IOBuffer::from(flatten2(y)));\n\n }\n\n\n\n if let Ok(data) = maybe_iobuf {\n\n return Some(model::Attribute::tcoords(name, 2).with_data(data));\n\n }\n\n\n\n // Try 3d texture coordinates\n\n maybe_iobuf = attrib\n\n .direct_clone_into_vec::<[f32; 3]>()\n\n .map(|y| flatten3(y).into());\n", "file_path": "src/io/vtk.rs", "rank": 62, "score": 98314.29977984267 }, { "content": "pub fn compute_vertex_area_weighted_normals<T, V3>(\n\n vertex_positions: &[V3],\n\n topo: &[[usize; 3]],\n\n normals: &mut [[T; 3]],\n\n) where\n\n T: Copy + math::SimdRealField,\n\n V3: Into<[T; 3]> + Clone,\n\n{\n\n // Clear the normals.\n\n for nml in normals.iter_mut() {\n\n *nml = [T::zero(); 3];\n\n }\n\n\n\n for tri_indices in topo.iter() {\n\n let tri = Triangle::from_indexed_slice(tri_indices, vertex_positions);\n\n let area_nml = Vector3::from(tri.area_normal());\n\n normals[tri_indices[0]] = (Vector3::from(normals[tri_indices[0]]) + area_nml).into();\n\n normals[tri_indices[1]] = (Vector3::from(normals[tri_indices[1]]) + area_nml).into();\n\n normals[tri_indices[2]] = (Vector3::from(normals[tri_indices[2]]) + area_nml).into();\n\n }\n\n}\n", "file_path": "src/algo/normals.rs", "rank": 63, "score": 97621.6747816611 }, { "content": "/// This trait provides an interface for the implementer of `Attrib` to access attributes\n\n/// associated with a specific topology within a mesh.\n\npub trait AttribIndex<M>\n\nwhere\n\n Self: std::marker::Sized + Clone,\n\n{\n\n /// Get the size of the attribute at the appropriate mesh location determined by `I`.\n\n fn attrib_size(mesh: &M) -> usize;\n\n\n\n /// Read only access to the attribute dictionary.\n\n fn attrib_dict(mesh: &M) -> &AttribDict<Self>;\n\n\n\n /// Read and write access to the attribute dictionary.\n\n fn attrib_dict_mut(mesh: &mut M) -> &mut AttribDict<Self>;\n\n\n\n /// Read and write access to the attribute dictionary as well as an optional cache for indirect\n\n /// attribute values.\n\n fn attrib_dict_and_cache_mut(\n\n mesh: &mut M,\n\n ) -> (&mut AttribDict<Self>, Option<&mut AttribValueCache>);\n\n}\n\n\n", "file_path": "src/attrib/index.rs", "rank": 64, "score": 97537.02612514354 }, { "content": "/// Apply a given `permutation` of indices to the given `array` of values in place.\n\n///\n\n/// This version of `apply_permutation_with_seen` accepts a stride which interprets\n\n/// `array` as an array of chunks with size `stride`.\n\npub fn apply_permutation_with_stride_and_seen<A: Swap + ?Sized>(\n\n permutation: &[usize],\n\n array: &mut A,\n\n stride: usize,\n\n seen: &mut [bool],\n\n) {\n\n // Total number of elements being tracked.\n\n let nelem = seen.len();\n\n\n\n assert!(permutation.iter().all(|&i| i < nelem));\n\n assert_eq!(permutation.len(), nelem);\n\n debug_assert_eq!(nelem * stride, array.len());\n\n\n\n for unseen_i in 0..nelem {\n\n // SAFETY: unseen_i is explicitly between 0 and seen.len()\n\n if unsafe { *seen.get_unchecked(unseen_i) } {\n\n continue;\n\n }\n\n\n\n let mut i = unseen_i;\n", "file_path": "src/utils/slice.rs", "rank": 65, "score": 97453.65344220912 }, { "content": "/// Promote attributes from one topology to another.\n\npub trait AttribPromote<SI, TI>\n\nwhere\n\n Self: Sized,\n\n SI: AttribIndex<Self>,\n\n TI: AttribIndex<Self>,\n\n{\n\n /// Promote the given attribute from source topology `SI` to target topology `TI`.\n\n ///\n\n /// A mutable reference to the resulting attribute is returned upon success.\n\n ///\n\n /// Collisions are handled using the given `combine` closure which takes two attribute\n\n /// components of type `T` and combines them into a single `T` to be written to the target\n\n /// topology attribute.\n\n ///\n\n /// If an attribute with the same name already exists at the target topology, it will be\n\n /// combined with the promoted attribute. If that attribute has the wrong type, an error is\n\n /// returned.\n\n fn attrib_promote<'a, U, F>(\n\n &mut self,\n\n name: &'a str,\n", "file_path": "src/attrib.rs", "rank": 66, "score": 96291.69953485965 }, { "content": "/// A trait for building meshes representing various objects.\n\n///\n\n/// This interface is optional. All builders have standalone public build functions for specific\n\n/// mesh types.\n\n///\n\n/// To use this interface one must specify the desired output mesh type beside the output binding as follows:\n\n/// ```\n\n/// use meshx::{builder::MeshBuilder, mesh::PolyMesh, builder::BoxBuilder};\n\n/// let mesh: PolyMesh<f64> = BoxBuilder { divisions: [0; 3] }.build();\n\n/// ```\n\n/// in contrast to\n\n/// ```\n\n/// use meshx::{builder::MeshBuilder, mesh::PolyMesh, builder::BoxBuilder};\n\n/// let mesh = BoxBuilder { divisions: [0; 3] }.build_polymesh::<f64>();\n\n/// ```\n\n///\n\n/// NOTE: `PlatonicSolidBuilder` does not implement this interface since it has specialized\n\n/// output types based on the type of polyhedron being built. This may change in the future.\n\npub trait MeshBuilder<M> {\n\n /// Builds a mesh of the given type `M`.\n\n fn build(self) -> M;\n\n}\n\n\n\n/// Axis plane orientation.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum AxisPlaneOrientation {\n\n XY,\n\n YZ,\n\n ZX,\n\n}\n\n\n\n/// Parameters that define a grid that lies in one of the 3 axis planes in 3D space.\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct GridBuilder {\n\n /// Number of grid cells in each column.\n\n pub rows: usize,\n\n /// Number of grid cells in each row.\n\n pub cols: usize,\n", "file_path": "src/mesh/builder.rs", "rank": 67, "score": 95997.43508152204 }, { "content": "pub trait Skew {\n\n type Output;\n\n /// Produce a skew form of self. For instance a 3D vector can be rearranged in a skew symmetric\n\n /// matrix, that corresponds to the cross product operator.\n\n fn skew(&self) -> Self::Output;\n\n}\n", "file_path": "src/ops/mod.rs", "rank": 68, "score": 94487.20733349294 }, { "content": "pub trait Empty {\n\n /// Construct an empty object.\n\n fn empty() -> Self;\n\n /// Check if this object is empty.\n\n fn is_empty(&self) -> bool;\n\n}\n\n\n", "file_path": "src/ops/mod.rs", "rank": 69, "score": 94487.20733349294 }, { "content": "/// Helper trait to interpret a slice of bytes representing a sized type.\n\n///\n\n/// Note: this trait is meant to be used transiently on the same platform, and so doesn't care\n\n/// about endianness. In other words, the slice of bytes generated by this trait should not be\n\n/// stored anywhere that outlives the lifetime of the program.\n\npub trait Bytes\n\nwhere\n\n Self: Sized,\n\n{\n\n /// Get a slice of bytes representing `Self`.\n\n #[inline]\n\n fn as_bytes(&self) -> &[u8] {\n\n let byte_ptr = self as *const Self as *const u8;\n\n unsafe { std::slice::from_raw_parts(byte_ptr, std::mem::size_of::<Self>()) }\n\n }\n\n\n\n /// Panics if the size of the given bytes slice is not equal to the size of `Self`.\n\n #[inline]\n\n fn interpret_bytes(bytes: &[u8]) -> &Self {\n\n assert_eq!(bytes.len(), std::mem::size_of::<Self>());\n\n let ptr = bytes.as_ptr() as *const Self;\n\n unsafe { &*ptr }\n\n }\n\n}\n\n\n", "file_path": "src/attrib/bytes.rs", "rank": 70, "score": 93532.04882106784 }, { "content": "pub trait Centroid<T> {\n\n /// Compute the centroid of the object.\n\n fn centroid(self) -> T;\n\n}\n\n\n", "file_path": "src/ops/mod.rs", "rank": 71, "score": 89889.21727593924 }, { "content": "pub trait Normal<T> {\n\n /// Compute the unit normal of this object.\n\n fn normal(self) -> T;\n\n}\n\n\n", "file_path": "src/ops/mod.rs", "rank": 72, "score": 89889.21727593924 }, { "content": "pub trait Area<T> {\n\n /// Compute the area of the object.\n\n fn area(self) -> T;\n\n\n\n /// Compute the signed area of the object. The area is negative when\n\n /// the object is inverted.\n\n fn signed_area(self) -> T;\n\n}\n\n\n", "file_path": "src/ops/mod.rs", "rank": 73, "score": 89889.21727593924 }, { "content": "pub trait Volume<T> {\n\n /// Compute the volume of the object.\n\n fn volume(self) -> T;\n\n\n\n /// Compute the signed volume of the object. The volume is negative when\n\n /// the object is inverted.\n\n fn signed_volume(self) -> T;\n\n}\n\n\n", "file_path": "src/ops/mod.rs", "rank": 74, "score": 89889.21727593924 }, { "content": "pub trait NumCells {\n\n fn num_cells(&self) -> usize;\n\n}\n\n\n\n// Topology interfaces\n\n\n\nmacro_rules! def_topo_trait {\n\n (__impl,\n\n $type:ident :\n\n $topo_fn:ident,\n\n $dst_fn:ident,\n\n $transport_fn:ident,\n\n $num_fn:ident,\n\n $num_at_fn:ident;\n\n $from_index:ident,\n\n $to_index:ident,\n\n $topo_index:ident\n\n ) => {\n\n /// Index of the destination element from the source index.\n\n fn $transport_fn<I>(&self, i: I, k: usize) -> Option<$to_index>\n", "file_path": "src/mesh/topology.rs", "rank": 75, "score": 89181.91150159587 }, { "content": "pub trait NumEdges {\n\n fn num_edges(&self) -> usize;\n\n}\n", "file_path": "src/mesh/topology.rs", "rank": 76, "score": 89181.91150159587 }, { "content": "pub trait NumFaces {\n\n fn num_faces(&self) -> usize;\n\n}\n", "file_path": "src/mesh/topology.rs", "rank": 77, "score": 89181.91150159587 }, { "content": "pub trait NumVertices {\n\n fn num_vertices(&self) -> usize;\n\n}\n", "file_path": "src/mesh/topology.rs", "rank": 78, "score": 89181.91150159587 }, { "content": "fn impl_new_vec_collection(\n\n generics: &Generics,\n\n name: &Ident,\n\n vec_type: &Type,\n\n parm: &GenericArgument,\n\n field_idx: Index,\n\n) -> TokenStream {\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n quote! {\n\n //impl #impl_generics ::std::ops::Deref for #name #ty_generics #where_clause {\n\n // type Target = #vec_type;\n\n // #[inline]\n\n // fn deref(&self) -> &#vec_type {\n\n // &self.#field_idx\n\n // }\n\n //}\n\n //impl #impl_generics ::std::ops::DerefMut for #name #ty_generics #where_clause {\n\n // #[inline]\n\n // fn deref_mut(&mut self) -> &mut #vec_type {\n\n // &mut self.#field_idx\n", "file_path": "meshx-derive/src/lib.rs", "rank": 79, "score": 89151.64210320605 }, { "content": "/// Shape matrices are useful for finite element analysis.\n\npub trait ShapeMatrix<M> {\n\n /// Return a shape matrix of the given type `M`.\n\n fn shape_matrix(self) -> M;\n\n}\n\n\n", "file_path": "src/ops/mod.rs", "rank": 80, "score": 86967.9980427881 }, { "content": "pub trait BoundingBox<T> {\n\n /// Compute the bounding box of this object.\n\n fn bounding_box(&self) -> BBox<T>;\n\n}\n\n\n", "file_path": "src/ops/mod.rs", "rank": 81, "score": 86963.3136646756 }, { "content": "/// An \"intrinsic\" trait for accessing vertex positions on a mesh.\n\n/// This trait can be implemented automatically by deriving the virtual \"Intrinsic\" trait and\n\n/// taggin the field with the vertex positions field with the `#[intrinsic(VertexPositions)]`\n\n/// attribute. Make sure that `VertexPositions` is in scope, or specify the path in the argument to\n\n/// the `intrinsic` attribute directly.\n\npub trait VertexPositions {\n\n type Element;\n\n\n\n /// Vertex positions as a slice of triplets.\n\n fn vertex_positions(&self) -> &[Self::Element];\n\n\n\n /// Vertex positions as a mutable slice of triplets.\n\n fn vertex_positions_mut(&mut self) -> &mut [Self::Element];\n\n\n\n /// Vertex iterator.\n\n #[inline]\n\n fn vertex_position_iter(&self) -> Iter<Self::Element> {\n\n self.vertex_positions().iter()\n\n }\n\n\n\n /// Mutable vertex iterator.\n\n #[inline]\n\n fn vertex_position_iter_mut(&mut self) -> IterMut<Self::Element> {\n\n self.vertex_positions_mut().iter_mut()\n\n }\n", "file_path": "src/mesh/vertex_positions.rs", "rank": 82, "score": 86349.13888633662 }, { "content": "/// This trait identifies all indices that identify a particular element in a mesh, whether it is a\n\n/// vertex or a triangle, or even a connectivity between triangles and vertices like a\n\n/// triangle-vertex.\n\npub trait ElementIndex<T>:\n\n Copy\n\n + Clone\n\n + PartialEq\n\n + PartialOrd\n\n + Eq\n\n + Ord\n\n + From<T>\n\n + Into<T>\n\n + Add<Output = Self>\n\n + Add<T, Output = Self>\n\n + Sub<Output = Self>\n\n + Sub<T, Output = Self>\n\n + Mul<T, Output = Self>\n\n + Div<T, Output = Self>\n\n + Rem<T, Output = Self>\n\n{\n\n}\n\n\n\nmacro_rules! impl_index_type {\n", "file_path": "src/mesh/topology.rs", "rank": 83, "score": 84951.14839429861 }, { "content": "#[proc_macro_derive(Intrinsic, attributes(intrinsics, intrinsic))]\n\npub fn intrinsic(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let input: DeriveInput = syn::parse(input).unwrap();\n\n\n\n let gen = intrinsic::impl_intrinsic(&input);\n\n\n\n gen.into()\n\n}\n\n\n\n/// Derive macro for implementing the `Attrib` trait for given struct.\n", "file_path": "meshx-derive/src/lib.rs", "rank": 84, "score": 84337.95791964488 }, { "content": "/// Rotate a given object by a certain amount. All functions rotate the object using the\n\n/// right-hand-rule.\n\npub trait Rotate<T: RealField> {\n\n /// Rotate the object using the given column-major rotation matrix.\n\n fn rotate_by_matrix(&mut self, mtx: [[T; 3]; 3]);\n\n\n\n /// Rotate the object around the given unit vector `u` by the given angle `theta` (in radians).\n\n ///\n\n /// Note that it is assumed that `u` is indeed a unit vector, no further normalization should\n\n /// be performed.\n\n fn rotate(&mut self, axis: [T; 3], theta: T) {\n\n let u = Vector3::from(axis.clone());\n\n let [x, y, z] = axis;\n\n let id = Matrix3::identity();\n\n let u_skew = u.skew();\n\n let cos_theta = theta.clone().cos();\n\n\n\n // Compute rotation matrix\n\n // R = cos(theta) * I + sin(theta)*[u]_X + (1 - cos(theta))(uu^T)\n\n // Compute outer product\n\n let u_v_t = {\n\n let [a, b, c]: [T; 3] = (u * (T::one() - cos_theta.clone())).into();\n", "file_path": "src/ops/transform.rs", "rank": 85, "score": 83000.60451622929 }, { "content": "fn insert_2d_array_attrib<'a, T, M, I>(\n\n buf: &[T],\n\n name: &'a str,\n\n mesh: &mut M,\n\n remap: Option<&[usize]>,\n\n) -> Result<(), Error>\n\nwhere\n\n T: AttributeValue + Copy + Default,\n\n I: AttribIndex<M>,\n\n M: Attrib,\n\n{\n\n let n = 9;\n\n let mut vecs = Vec::with_capacity(buf.len() / n);\n\n let mut count_comp = 0;\n\n let mut cur = [[T::default(); 3]; 3];\n\n let mut push_val = |val| {\n\n cur[count_comp / 3][count_comp % 3] = val; // row-major -> col-major\n\n count_comp += 1;\n\n if count_comp == n {\n\n vecs.push(cur);\n", "file_path": "src/io/vtk.rs", "rank": 86, "score": 82508.88715784022 }, { "content": "fn insert_array_attrib<'a, T, M, I>(\n\n buf: &[T],\n\n name: &'a str,\n\n mesh: &mut M,\n\n remap: Option<&[usize]>,\n\n) -> Result<(), Error>\n\nwhere\n\n T: AttributeValue + Default,\n\n I: AttribIndex<M>,\n\n M: Attrib,\n\n{\n\n let remapped_buf = if let Some(remap) = remap {\n\n remap.iter().map(|&i| buf[i].clone()).collect()\n\n } else {\n\n buf.to_vec()\n\n };\n\n mesh.insert_attrib_data::<_, I>(name, remapped_buf)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/io/vtk.rs", "rank": 87, "score": 82508.88715784022 }, { "content": "fn insert_array_attrib_n<'a, T, M, I: AttribIndex<M>, N>(\n\n buf: &[T],\n\n name: &'a str,\n\n mesh: &mut M,\n\n remap: Option<&[usize]>,\n\n) -> Result<(), Error>\n\nwhere\n\n T: bytemuck::Pod + AttributeValue + Default,\n\n M: Attrib,\n\n N: flatk::Unsigned + Default + flatk::Array<T>,\n\n <N as flatk::Array<T>>::Array: Default + PartialEq + std::fmt::Debug + Send + Sync,\n\n{\n\n let remapped_buf = if let Some(remap) = remap {\n\n remap\n\n .iter()\n\n .flat_map(|&i| (0..N::to_usize()).map(move |j| buf[N::to_usize() * i + j]))\n\n .collect()\n\n } else {\n\n buf.to_vec()\n\n };\n\n let chunked = flatk::UniChunked::<_, U<N>>::from_flat(remapped_buf);\n\n mesh.insert_attrib_data::<_, I>(name, chunked.into_arrays())?;\n\n Ok(())\n\n}\n\n\n\n/// Adds VTK attributes to the given mesh, and returns any unprocessed attributes that can be\n\n/// processed further.\n\n///\n\n/// If the reason an attribute is not processed is because it\n\n/// has an unsupported type, we leave it out of the remainder.\n", "file_path": "src/io/vtk.rs", "rank": 88, "score": 82464.83763435925 }, { "content": "/// Helper to split attributes based on the given connectivity info.\n\nfn split_attributes<A: Clone, I: Into<Option<usize>>>(\n\n src_dict: &AttribDict<A>,\n\n num_components: usize,\n\n connectivity: impl Iterator<Item = I> + Clone,\n\n caches: &mut [AttribValueCache],\n\n) -> Vec<AttribDict<A>> {\n\n split_attributes_with(src_dict, num_components, |attrib, num_components| {\n\n let mut new_attribs = vec![attrib.duplicate_empty(); num_components];\n\n // Get an iterator of typeless values for this attribute.\n\n match &attrib.data {\n\n AttributeData::Direct(d) => {\n\n connectivity\n\n .clone()\n\n .zip(d.data().iter())\n\n .filter_map(|(comp_id, val_ref)| {\n\n comp_id.into().map(|comp_id| (comp_id, val_ref))\n\n })\n\n .for_each(|(valid_idx, val_ref)| {\n\n new_attribs[valid_idx]\n\n .data\n", "file_path": "src/algo/split.rs", "rank": 89, "score": 81432.85558944577 }, { "content": "#[proc_macro_derive(NewCollectionType)]\n\npub fn new_collection_type(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let input: DeriveInput = syn::parse(input).unwrap();\n\n\n\n let gen = impl_new_collection_type(&input);\n\n\n\n gen.into()\n\n}\n\n\n", "file_path": "meshx-derive/src/lib.rs", "rank": 90, "score": 81421.26259927492 }, { "content": "/// A trait defining the primary method for determining connectivity in a mesh.\n\n///\n\n/// `Src` specifies the element index for which to determine connectivity.\n\n/// `Via` specifies a secondary element index which identifies elements through which the\n\n/// connectivity is determined.\n\npub trait Connectivity<Src: ElementIndex<usize>, Via: ElementIndex<usize>> {\n\n /// Additional topology that may aid in computing connectivity.\n\n ///\n\n /// This is computed with `precompute_topo` and used in `push_neighbours`.\n\n type Topo: Default;\n\n\n\n /// Precompute additional topology information prior to determining connectivity.\n\n ///\n\n /// An optional function that allows implementers to precompute topology information to help\n\n /// with the implementation of `push_neighbours` when the mesh doesn't already support a\n\n /// certain type of topology.\n\n fn precompute_topo(&self) -> Self::Topo {\n\n Default::default()\n\n }\n\n\n\n /// Get the number of elements which are considered for connectivity\n\n ///\n\n /// E.g. triangles in triangle meshes or tets in a tetmesh.\n\n fn num_elements(&self) -> usize;\n\n\n", "file_path": "src/algo/connectivity.rs", "rank": 91, "score": 80531.36175676844 }, { "content": "/// TopoIndex is an index for data at a point of connectivity. For instance a `FaceVertexIndex`\n\n/// identifies a specific vertex pointed to by a face. This means it has a source and a\n\n/// destination. This trait defines the indices for this source and destination indices.\n\npub trait TopoIndex<I>: ElementIndex<I> {\n\n type SrcIndex: ElementIndex<I>;\n\n type DstIndex: ElementIndex<I>;\n\n}\n\n\n\nmacro_rules! impl_topo_index {\n\n ($topo_index:ident, $from_index:ident, $to_index:ident) => {\n\n impl TopoIndex<usize> for $topo_index {\n\n type SrcIndex = $from_index;\n\n type DstIndex = $to_index;\n\n }\n\n };\n\n}\n\n\n\nimpl_topo_index!(EdgeVertexIndex, EdgeIndex, VertexIndex);\n\nimpl_topo_index!(FaceVertexIndex, FaceIndex, VertexIndex);\n\nimpl_topo_index!(FaceEdgeIndex, FaceIndex, EdgeIndex);\n\nimpl_topo_index!(CellVertexIndex, CellIndex, VertexIndex);\n\nimpl_topo_index!(CellEdgeIndex, CellIndex, EdgeIndex);\n\nimpl_topo_index!(CellFaceIndex, CellIndex, FaceIndex);\n\n\n\nimpl_topo_index!(VertexEdgeIndex, VertexIndex, EdgeIndex);\n\nimpl_topo_index!(VertexFaceIndex, VertexIndex, FaceIndex);\n\nimpl_topo_index!(VertexCellIndex, VertexIndex, CellIndex);\n\nimpl_topo_index!(EdgeFaceIndex, EdgeIndex, FaceIndex);\n\nimpl_topo_index!(EdgeCellIndex, EdgeIndex, CellIndex);\n\nimpl_topo_index!(FaceCellIndex, FaceIndex, CellIndex);\n\n\n", "file_path": "src/mesh/topology.rs", "rank": 92, "score": 80004.78654406166 } ]
Rust
rust/rsmgp-sys/src/testing.rs
memgraph/mage
8c389146dfce35c436e941b04655d9f758351e46
#[cfg(test)] pub mod alloc { use libc::malloc; use std::mem::size_of; use crate::mgp::*; pub(crate) unsafe fn alloc_mgp_type() -> *mut mgp_type { malloc(size_of::<mgp_type>()) as *mut mgp_type } pub(crate) unsafe fn alloc_mgp_value() -> *mut mgp_value { malloc(size_of::<mgp_value>()) as *mut mgp_value } pub(crate) unsafe fn alloc_mgp_list() -> *mut mgp_list { malloc(size_of::<mgp_list>()) as *mut mgp_list } pub(crate) unsafe fn alloc_mgp_map() -> *mut mgp_map { malloc(size_of::<mgp_map>()) as *mut mgp_map } pub(crate) unsafe fn alloc_mgp_map_items_iterator() -> *mut mgp_map_items_iterator { malloc(size_of::<mgp_map_items_iterator>()) as *mut mgp_map_items_iterator } pub(crate) unsafe fn alloc_mgp_vertex() -> *mut mgp_vertex { malloc(size_of::<mgp_vertex>()) as *mut mgp_vertex } pub(crate) unsafe fn alloc_mgp_edge() -> *mut mgp_edge { malloc(size_of::<mgp_edge>()) as *mut mgp_edge } pub(crate) unsafe fn alloc_mgp_path() -> *mut mgp_path { malloc(size_of::<mgp_path>()) as *mut mgp_path } pub(crate) unsafe fn alloc_mgp_date() -> *mut mgp_date { malloc(size_of::<mgp_date>()) as *mut mgp_date } pub(crate) unsafe fn alloc_mgp_local_time() -> *mut mgp_local_time { malloc(size_of::<mgp_local_time>()) as *mut mgp_local_time } pub(crate) unsafe fn alloc_mgp_local_date_time() -> *mut mgp_local_date_time { malloc(size_of::<mgp_local_date_time>()) as *mut mgp_local_date_time } pub(crate) unsafe fn alloc_mgp_duration() -> *mut mgp_duration { malloc(size_of::<mgp_duration>()) as *mut mgp_duration } pub(crate) unsafe fn alloc_mgp_proc() -> *mut mgp_proc { malloc(size_of::<mgp_proc>()) as *mut mgp_proc } pub(crate) unsafe fn alloc_mgp_result_record() -> *mut mgp_result_record { malloc(size_of::<mgp_result_record>()) as *mut mgp_result_record } #[macro_export] macro_rules! mock_mgp_once { ($c_func_name:ident, $rs_return_func:expr) => { let $c_func_name = $c_func_name(); $c_func_name.expect().times(1).returning($rs_return_func); }; } #[macro_export] macro_rules! with_dummy { ($rs_test_func:expr) => { let memgraph = Memgraph::new_default(); $rs_test_func(&memgraph); }; (List, $rs_test_func:expr) => { let memgraph = Memgraph::new_default(); let list = List::new(std::ptr::null_mut(), &memgraph); $rs_test_func(&list); }; (Map, $rs_test_func:expr) => { let memgraph = Memgraph::new_default(); let map = Map::new(std::ptr::null_mut(), &memgraph); $rs_test_func(&map); }; (Vertex, $rs_test_func:expr) => { let memgraph = Memgraph::new_default(); let vertex = Vertex::new(std::ptr::null_mut(), &memgraph); $rs_test_func(&vertex); }; (Edge, $rs_test_func:expr) => { let memgraph = Memgraph::new_default(); let edge = Edge::new(std::ptr::null_mut(), &memgraph); $rs_test_func(&edge); }; (Path, $rs_test_func:expr) => { let memgraph = Memgraph::new_default(); let path = Path::new(std::ptr::null_mut(), &memgraph); $rs_test_func(&path); }; (Date, $rs_test_func:expr) => { let date = Date::new(std::ptr::null_mut()); $rs_test_func(&date); }; (LocalTime, $rs_test_func:expr) => { let local_time = LocalTime::new(std::ptr::null_mut()); $rs_test_func(&local_time); }; (LocalDateTime, $rs_test_func:expr) => { let local_date_time = LocalDateTime::new(std::ptr::null_mut()); $rs_test_func(&local_date_time); }; (Duration, $rs_test_func:expr) => { let duration = Duration::new(std::ptr::null_mut()); $rs_test_func(&duration); }; } }
#[cfg(test)] pub mod alloc { use libc::malloc; use std::mem::size_of; use crate::mgp::*; pub(crate) unsafe fn alloc_mgp_type() -> *mut mgp_type { malloc(size_of::<mgp_type>()) as *mut mgp_type } pub(crate) unsafe fn alloc_mgp_value() -> *mut mgp_value { malloc(size_of::<mgp_value>()) as *mut mgp_value } pub(crate) unsafe fn alloc_mgp_list() -> *mut mgp_list { malloc(size_of::<mgp_list>()) as *mut mgp_list } pub(crate) unsafe fn alloc_mgp_map() -> *mut mgp_map { malloc(size_of::<mgp_map>()) as *mut mgp_map } pub(crate) unsafe fn alloc_mgp_map_items_iterator() -> *mut mgp_map_items_iterator { malloc(size_of::<mgp_map_items_iterator>()) as *mut mgp_map_items_iterator } pub(crate) unsafe fn alloc_mgp_vertex() -> *mut mgp_vertex { malloc(size_of::<mgp_vertex>()) as *mut mgp_vertex } pub(crate) unsafe fn alloc_mgp_edge() -> *mut mgp_edge { malloc(size_of::<mgp_edge>()) as *mut mgp_edge } pub(crate) unsafe fn alloc_mgp_path() -> *mut mgp_path {
$c_func_name.expect().times(1).returning($rs_return_func); }; } #[macro_export] macro_rules! with_dummy { ($rs_test_func:expr) => { let memgraph = Memgraph::new_default(); $rs_test_func(&memgraph); }; (List, $rs_test_func:expr) => { let memgraph = Memgraph::new_default(); let list = List::new(std::ptr::null_mut(), &memgraph); $rs_test_func(&list); }; (Map, $rs_test_func:expr) => { let memgraph = Memgraph::new_default(); let map = Map::new(std::ptr::null_mut(), &memgraph); $rs_test_func(&map); }; (Vertex, $rs_test_func:expr) => { let memgraph = Memgraph::new_default(); let vertex = Vertex::new(std::ptr::null_mut(), &memgraph); $rs_test_func(&vertex); }; (Edge, $rs_test_func:expr) => { let memgraph = Memgraph::new_default(); let edge = Edge::new(std::ptr::null_mut(), &memgraph); $rs_test_func(&edge); }; (Path, $rs_test_func:expr) => { let memgraph = Memgraph::new_default(); let path = Path::new(std::ptr::null_mut(), &memgraph); $rs_test_func(&path); }; (Date, $rs_test_func:expr) => { let date = Date::new(std::ptr::null_mut()); $rs_test_func(&date); }; (LocalTime, $rs_test_func:expr) => { let local_time = LocalTime::new(std::ptr::null_mut()); $rs_test_func(&local_time); }; (LocalDateTime, $rs_test_func:expr) => { let local_date_time = LocalDateTime::new(std::ptr::null_mut()); $rs_test_func(&local_date_time); }; (Duration, $rs_test_func:expr) => { let duration = Duration::new(std::ptr::null_mut()); $rs_test_func(&duration); }; } }
malloc(size_of::<mgp_path>()) as *mut mgp_path } pub(crate) unsafe fn alloc_mgp_date() -> *mut mgp_date { malloc(size_of::<mgp_date>()) as *mut mgp_date } pub(crate) unsafe fn alloc_mgp_local_time() -> *mut mgp_local_time { malloc(size_of::<mgp_local_time>()) as *mut mgp_local_time } pub(crate) unsafe fn alloc_mgp_local_date_time() -> *mut mgp_local_date_time { malloc(size_of::<mgp_local_date_time>()) as *mut mgp_local_date_time } pub(crate) unsafe fn alloc_mgp_duration() -> *mut mgp_duration { malloc(size_of::<mgp_duration>()) as *mut mgp_duration } pub(crate) unsafe fn alloc_mgp_proc() -> *mut mgp_proc { malloc(size_of::<mgp_proc>()) as *mut mgp_proc } pub(crate) unsafe fn alloc_mgp_result_record() -> *mut mgp_result_record { malloc(size_of::<mgp_result_record>()) as *mut mgp_result_record } #[macro_export] macro_rules! mock_mgp_once { ($c_func_name:ident, $rs_return_func:expr) => { let $c_func_name = $c_func_name();
random
[ { "content": "/// Combines the given array of types from left to right to construct [mgp_type]. E.g., if the\n\n/// input is [Type::List, Type::Int], the constructed [mgp_type] is going to be list of integers.\n\nfn resolve_mgp_type(types: &[Type]) -> *mut mgp_type {\n\n unsafe {\n\n let mut mgp_type_ptr: *mut mgp_type = std::ptr::null_mut();\n\n for field_type in types.iter().rev() {\n\n mgp_type_ptr = match field_type {\n\n Type::Any => invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_any).unwrap(),\n\n Type::Bool => invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_bool).unwrap(),\n\n Type::Number => invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_number).unwrap(),\n\n Type::Int => invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_int).unwrap(),\n\n Type::Double => invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_float).unwrap(),\n\n Type::String => invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_string).unwrap(),\n\n Type::Map => invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_map).unwrap(),\n\n Type::Vertex => invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_node).unwrap(),\n\n Type::Edge => invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_relationship).unwrap(),\n\n Type::Path => invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_path).unwrap(),\n\n Type::Nullable => {\n\n invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_nullable, mgp_type_ptr).unwrap()\n\n }\n\n Type::List => {\n\n invoke_mgp_func!(*mut mgp_type, ffi::mgp_type_list, mgp_type_ptr).unwrap()\n", "file_path": "rust/rsmgp-sys/src/memgraph/mod.rs", "rank": 0, "score": 193648.50716798106 }, { "content": "fn create_naive_time(\n\n hour: u32,\n\n minute: u32,\n\n second: u32,\n\n millisecond: u32,\n\n microsecond: u32,\n\n) -> NaiveTime {\n\n NaiveTime::from_hms_micro(\n\n hour,\n\n minute,\n\n second,\n\n millisecond * MICROS_PER_MILLIS + microsecond,\n\n )\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/mod.rs", "rank": 1, "score": 125052.14176498681 }, { "content": "#[test]\n\n#[serial]\n\nfn test_date_unable_to_allocate() {\n\n mock_mgp_once!(mgp_date_from_parameters_context, move |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let error = Date::from_naive_date(&NaiveDate::from_num_days_from_ce(0), &memgraph);\n\n assert!(error.is_err());\n\n assert_eq!(error.err().unwrap(), Error::UnableToCreateDateFromNaiveDate);\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 2, "score": 124021.40319609246 }, { "content": "#[test]\n\n#[serial]\n\nfn test_duration_unable_to_allocate() {\n\n mock_mgp_once!(mgp_duration_from_microseconds_context, move |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let error = Duration::from_chrono_duration(&chrono::Duration::microseconds(0), &memgraph);\n\n assert!(error.is_err());\n\n assert_eq!(\n\n error.err().unwrap(),\n\n Error::UnableToCreateDurationFromChronoDuration\n\n );\n\n });\n\n}\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 3, "score": 124021.40319609246 }, { "content": "/// Sets error that will be returned to the caller.\n\npub fn set_memgraph_error_msg(msg: &CStr, memgraph: &Memgraph) {\n\n unsafe {\n\n invoke_void_mgp_func!(\n\n ffi::mgp_result_set_error_msg,\n\n memgraph.result_ptr(),\n\n msg.as_ptr()\n\n )\n\n .expect(\"Unable to pass error message to the Memgraph engine.\");\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use c_str_macro::c_str;\n\n use serial_test::serial;\n\n\n\n use super::*;\n\n use crate::mgp::mock_ffi::*;\n\n use crate::{mock_mgp_once, with_dummy};\n\n\n", "file_path": "rust/rsmgp-sys/src/rsmgp.rs", "rank": 4, "score": 123938.46159979876 }, { "content": "#[test]\n\n#[serial]\n\nfn test_local_time_unable_to_allocate() {\n\n mock_mgp_once!(mgp_local_time_from_parameters_context, move |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let error =\n\n LocalTime::from_naive_time(&NaiveTime::from_num_seconds_from_midnight(0, 0), &memgraph);\n\n assert!(error.is_err());\n\n assert_eq!(\n\n error.err().unwrap(),\n\n Error::UnableToCreateLocalTimeFromNaiveTime\n\n );\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 5, "score": 122422.23764063348 }, { "content": "#[test]\n\n#[serial]\n\nfn test_local_date_time_unable_to_allocate() {\n\n mock_mgp_once!(\n\n mgp_local_date_time_from_parameters_context,\n\n move |_, _, _| { mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE }\n\n );\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let error =\n\n LocalDateTime::from_naive_date_time(&NaiveDateTime::from_timestamp(0, 0), &memgraph);\n\n assert!(error.is_err());\n\n assert_eq!(\n\n error.err().unwrap(),\n\n Error::UnableToCreateLocalDateTimeFromNaiveDateTime\n\n );\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 6, "score": 120881.2626916116 }, { "content": "fn create_mgp_local_time_parameters(from: &NaiveTime) -> mgp_local_time_parameters {\n\n let eliminate_leap_seconds = |nanos: u32| {\n\n const NANOS_PER_SECONDS: u32 = 1_000_000_000;\n\n nanos % NANOS_PER_SECONDS\n\n };\n\n\n\n let nanoseconds = eliminate_leap_seconds(from.nanosecond());\n\n mgp_local_time_parameters {\n\n hour: from.hour() as i32,\n\n minute: from.minute() as i32,\n\n second: from.second() as i32,\n\n millisecond: (nanoseconds / NANOS_PER_MILLIS) as i32,\n\n microsecond: (nanoseconds % NANOS_PER_MILLIS / NANOS_PER_MICROS) as i32,\n\n }\n\n}\n\n\n\npub(crate) struct LocalTime {\n\n ptr: *mut mgp_local_time,\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/mod.rs", "rank": 7, "score": 102411.06996483446 }, { "content": "fn main() {\n\n let mg_procedure_path = \"mgp/mg_procedure.h\";\n\n\n\n // Tell cargo to invalidate the built crate whenever the wrapper changes\n\n println!(\n\n \"{}\",\n\n format!(\"{}{}\", \"cargo:rerun-if-changed=\", mg_procedure_path)\n\n );\n\n\n\n // The bindgen::Builder is the main entry point\n\n // to bindgen, and lets you build up options for\n\n // the resulting bindings.\n\n let bindings = bindgen::Builder::default()\n\n .header(mg_procedure_path)\n\n .blacklist_function(\"mgp_*\")\n\n .rustified_enum(\"mgp_error\")\n\n .rustified_enum(\"mgp_value_type\")\n\n .parse_callbacks(Box::new(bindgen::CargoCallbacks))\n\n .generate()\n\n .expect(\"Unable to generate bindings\")\n", "file_path": "rust/rsmgp-sys/build.rs", "rank": 8, "score": 85194.50760718147 }, { "content": "#[test]\n\n#[serial]\n\nfn test_at() {\n\n mock_mgp_once!(mgp_map_at_context, |_, _, value_ptr_ptr| unsafe {\n\n (*value_ptr_ptr) = null_mut();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n\n\n with_dummy!(Map, |map: &Map| {\n\n let value = map.at(c_str!(\"key\"));\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/map/tests.rs", "rank": 9, "score": 83294.34611536594 }, { "content": "#[test]\n\n#[serial]\n\nfn test_value_at() {\n\n mock_mgp_once!(mgp_list_at_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_OUT_OF_RANGE\n\n });\n\n\n\n with_dummy!(List, |list: &List| {\n\n assert!(list.value_at(0).is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/list/tests.rs", "rank": 10, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_label_at() {\n\n let test_label = CString::new(\"test\");\n\n mock_mgp_once!(\n\n mgp_vertex_label_at_context,\n\n move |vertex, _, result| unsafe {\n\n assert_eq!(vertex, null_mut());\n\n (*result).name = test_label.as_ref().unwrap().as_ptr();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n\n\n with_dummy!(Vertex, |vertex: &Vertex| {\n\n assert_eq!(vertex.label_at(5).unwrap(), CString::new(\"test\").unwrap());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/vertex/tests.rs", "rank": 11, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_vertex_at() {\n\n mock_mgp_once!(mgp_path_vertex_at_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_OUT_OF_RANGE\n\n });\n\n\n\n with_dummy!(Path, |path: &Path| {\n\n assert!(path.vertex_at(0).is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/path/tests.rs", "rank": 12, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_capacity() {\n\n mock_mgp_once!(mgp_list_capacity_context, |_, capacity_ptr| unsafe {\n\n (*capacity_ptr) = 42;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n\n\n with_dummy!(List, |list: &List| {\n\n assert_eq!(list.capacity(), 42);\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/list/tests.rs", "rank": 13, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_to_vertex() {\n\n mock_mgp_once!(mgp_edge_get_to_context, |_, _| {\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_vertex_copy_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(Edge, |edge: &Edge| {\n\n assert!(edge.to_vertex().is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/edge/tests.rs", "rank": 14, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_size() {\n\n mock_mgp_once!(mgp_path_size_context, |_, size_ptr| unsafe {\n\n (*size_ptr) = 2;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n\n\n with_dummy!(Path, |path: &Path| {\n\n assert_eq!(path.size(), 2);\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/path/tests.rs", "rank": 15, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_id() {\n\n mock_mgp_once!(mgp_edge_get_id_context, |_, edge_id_ptr| unsafe {\n\n (*edge_id_ptr).as_int = 1;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n\n\n with_dummy!(Edge, |edge: &Edge| {\n\n assert_eq!(edge.id(), 1);\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/edge/tests.rs", "rank": 16, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_from_vertex() {\n\n mock_mgp_once!(mgp_edge_get_from_context, |_, _| {\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_vertex_copy_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(Edge, |edge: &Edge| {\n\n assert!(edge.from_vertex().is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/edge/tests.rs", "rank": 17, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_expand() {\n\n mock_mgp_once!(mgp_path_expand_context, |_, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let edge = Edge::new(std::ptr::null_mut(), &memgraph);\n\n let path = Path::new(std::ptr::null_mut(), &memgraph);\n\n assert!(path.expand(&edge).is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/path/tests.rs", "rank": 18, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_id() {\n\n mock_mgp_once!(mgp_vertex_get_id_context, |_, vertex_id_ptr| unsafe {\n\n (*vertex_id_ptr).as_int = 72;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n\n\n with_dummy!(Vertex, |vertex: &Vertex| {\n\n assert_eq!(vertex.id(), 72);\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/vertex/tests.rs", "rank": 19, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_size() {\n\n mock_mgp_once!(mgp_list_size_context, |_, size_ptr| unsafe {\n\n (*size_ptr) = 3;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n\n\n with_dummy!(List, |list: &List| {\n\n assert_eq!(list.size(), 3);\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/list/tests.rs", "rank": 20, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_in_edges() {\n\n mock_mgp_once!(mgp_vertex_iter_in_edges_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(Vertex, |vertex: &Vertex| {\n\n let iter = vertex.in_edges();\n\n assert!(iter.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/vertex/tests.rs", "rank": 21, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_size() {\n\n mock_mgp_once!(mgp_map_size_context, |_, size_ptr| unsafe {\n\n (*size_ptr) = 3;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n\n\n with_dummy!(Map, |map: &Map| {\n\n let value = map.size();\n\n assert_eq!(value, 3);\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/map/tests.rs", "rank": 22, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_property() {\n\n mock_mgp_once!(mgp_edge_get_property_context, |_, _, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(Edge, |edge: &Edge| {\n\n assert!(edge.property(c_str!(\"prop\")).is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/edge/tests.rs", "rank": 23, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_insert() {\n\n mock_mgp_once!(mgp_value_make_null_context, |_, value_ptr_ptr| unsafe {\n\n (*value_ptr_ptr) = alloc_mgp_value();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_map_insert_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_KEY_ALREADY_EXISTS\n\n });\n\n mock_mgp_once!(mgp_value_destroy_context, |ptr| unsafe {\n\n free(ptr as *mut c_void);\n\n });\n\n\n\n with_dummy!(Map, |map: &Map| {\n\n let value = Value::Null;\n\n assert!(map.insert(c_str!(\"key\"), &value).is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/map/tests.rs", "rank": 24, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_append() {\n\n mock_mgp_once!(mgp_value_make_null_context, |_, value_ptr_ptr| unsafe {\n\n (*value_ptr_ptr) = alloc_mgp_value();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_list_append_context, |_, _| {\n\n mgp_error::MGP_ERROR_INSUFFICIENT_BUFFER\n\n });\n\n mock_mgp_once!(mgp_value_destroy_context, |ptr| unsafe {\n\n free(ptr as *mut c_void);\n\n });\n\n\n\n with_dummy!(List, |list: &List| {\n\n assert!(list.append(&Value::Null).is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/list/tests.rs", "rank": 25, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_properties() {\n\n mock_mgp_once!(mgp_vertex_iter_properties_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(Vertex, |vertex: &Vertex| {\n\n let iter = vertex.properties();\n\n assert!(iter.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/vertex/tests.rs", "rank": 26, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_out_edges() {\n\n mock_mgp_once!(mgp_vertex_iter_out_edges_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(Vertex, |vertex: &Vertex| {\n\n let iter = vertex.out_edges();\n\n assert!(iter.is_err());\n\n });\n\n}\n", "file_path": "rust/rsmgp-sys/src/vertex/tests.rs", "rank": 27, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_has_label() {\n\n mock_mgp_once!(\n\n mgp_vertex_has_label_context,\n\n |vertex, label, result| unsafe {\n\n assert_eq!(vertex, null_mut());\n\n assert_eq!(CStr::from_ptr(label.name), c_str!(\"labela\"));\n\n (*result) = 1;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n\n\n with_dummy!(Vertex, |vertex: &Vertex| {\n\n assert_eq!(vertex.has_label(c_str!(\"labela\")).unwrap(), true);\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/vertex/tests.rs", "rank": 28, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_edge_at() {\n\n mock_mgp_once!(mgp_path_edge_at_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_OUT_OF_RANGE\n\n });\n\n\n\n with_dummy!(Path, |path: &Path| {\n\n assert!(path.edge_at(0).is_err());\n\n });\n\n}\n", "file_path": "rust/rsmgp-sys/src/path/tests.rs", "rank": 29, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_property() {\n\n mock_mgp_once!(\n\n mgp_vertex_get_property_context,\n\n move |vertex, prop_name, memory, _| {\n\n assert_eq!(vertex, null_mut());\n\n assert_eq!(prop_name, c_str!(\"test\").as_ptr());\n\n assert_eq!(memory, null_mut());\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n }\n\n );\n\n\n\n with_dummy!(Vertex, |vertex: &Vertex| {\n\n assert_eq!(\n\n vertex.property(c_str!(\"test\")).err().unwrap(),\n\n Error::UnableToGetVertexProperty\n\n );\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/vertex/tests.rs", "rank": 30, "score": 82400.1930318776 }, { "content": "#[test]\n\n#[serial]\n\nfn test_properties_iterator() {\n\n let property_getter = |_, prop_ptr_ptr: *mut *mut mgp_property| unsafe {\n\n (*prop_ptr_ptr) = null_mut();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n };\n\n mock_mgp_once!(mgp_properties_iterator_get_context, property_getter);\n\n mock_mgp_once!(mgp_properties_iterator_next_context, property_getter);\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let mut iterator = PropertiesIterator::new(null_mut(), &memgraph);\n\n\n\n let value_1 = iterator.next();\n\n assert!(value_1.is_none());\n\n\n\n let value_2 = iterator.next();\n\n assert!(value_2.is_none());\n\n });\n\n}\n", "file_path": "rust/rsmgp-sys/src/property/tests.rs", "rank": 31, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_from_naive_date() {\n\n let test_date = |date: NaiveDate| {\n\n mock_mgp_once!(\n\n mgp_date_from_parameters_context,\n\n move |date_params_ptr, _, date_ptr_ptr| unsafe {\n\n check_date_params(&*date_params_ptr, &date);\n\n (*date_ptr_ptr) = alloc_mgp_date();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n mock_mgp_once!(mgp_date_destroy_context, |ptr| unsafe {\n\n free(ptr as *mut c_void);\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let _mgp_date = Date::from_naive_date(&date, &memgraph);\n\n });\n\n };\n\n test_date(NaiveDate::from_ymd(0, 1, 1));\n\n test_date(NaiveDate::from_ymd(1834, 1, 1));\n\n test_date(NaiveDate::from_ymd(1996, 12, 7));\n\n test_date(NaiveDate::from_ymd(9999, 12, 31));\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 32, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_edge_type() {\n\n let edge_type = CString::new(\"type\").unwrap();\n\n mock_mgp_once!(mgp_edge_get_type_context, move |_, edge_type_ptr| unsafe {\n\n (*edge_type_ptr).name = edge_type.as_ptr();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n\n\n with_dummy!(Edge, |edge: &Edge| {\n\n let value = edge.edge_type().unwrap();\n\n assert_eq!(value, CString::new(\"type\").unwrap());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/edge/tests.rs", "rank": 33, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_date_accessors() {\n\n let year = 1934;\n\n let month = 2;\n\n let day = 31;\n\n mock_mgp_once!(mgp_date_get_year_context, move |_, year_ptr| unsafe {\n\n (*year_ptr) = year;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_date_get_month_context, move |_, month_ptr| unsafe {\n\n (*month_ptr) = month;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_date_get_day_context, move |_, day_ptr| unsafe {\n\n (*day_ptr) = day;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n\n\n with_dummy!(Date, |date: &Date| {\n\n assert_eq!(date.year(), year);\n\n assert_eq!(date.month() as i32, month);\n\n assert_eq!(date.day() as i32, day);\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 34, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_from_chrono_duration() {\n\n let test_duration = |duration: chrono::Duration| {\n\n mock_mgp_once!(\n\n mgp_duration_from_microseconds_context,\n\n move |micros, _, duration_ptr_ptr| unsafe {\n\n assert_eq!(micros, duration.num_microseconds().unwrap());\n\n (*duration_ptr_ptr) = alloc_mgp_duration();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n mock_mgp_once!(mgp_duration_destroy_context, |ptr| unsafe {\n\n free(ptr as *mut c_void);\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let _duration = Duration::from_chrono_duration(&duration, &memgraph);\n\n });\n\n };\n\n test_duration(chrono::Duration::microseconds(0));\n\n test_duration(chrono::Duration::microseconds(-1));\n\n test_duration(chrono::Duration::microseconds(1));\n\n test_duration(chrono::Duration::microseconds(20_000));\n\n test_duration(chrono::Duration::microseconds(-23_456));\n\n test_duration(chrono::Duration::microseconds(2i64.pow(31)));\n\n test_duration(chrono::Duration::microseconds(-(2i64.pow(31))));\n\n test_duration(chrono::Duration::microseconds(std::i64::MAX));\n\n test_duration(chrono::Duration::microseconds(std::i64::MIN));\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 35, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_to_naive_date() {\n\n let test_date = |date_to_test: NaiveDate| {\n\n mock_mgp_once!(mgp_date_get_year_context, move |_, year_ptr| unsafe {\n\n (*year_ptr) = date_to_test.year() as i32;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_date_get_month_context, move |_, month_ptr| unsafe {\n\n (*month_ptr) = date_to_test.month() as i32;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_date_get_day_context, move |_, day_ptr| unsafe {\n\n (*day_ptr) = date_to_test.day() as i32;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n with_dummy!(Date, |date: &Date| {\n\n assert_eq!(date.to_naive_date(), date_to_test)\n\n });\n\n };\n\n test_date(NaiveDate::from_ymd(0, 1, 1));\n\n test_date(NaiveDate::from_ymd(1834, 1, 1));\n\n test_date(NaiveDate::from_ymd(1996, 12, 7));\n\n test_date(NaiveDate::from_ymd(9999, 12, 31));\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 36, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_edges_iterator() {\n\n let edge_getter = |_, edge_ptr: *mut *mut mgp_edge| unsafe {\n\n (*edge_ptr) = null_mut();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n };\n\n mock_mgp_once!(mgp_edges_iterator_get_context, edge_getter);\n\n mock_mgp_once!(mgp_edges_iterator_next_context, edge_getter);\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let mut iterator = EdgesIterator::new(null_mut(), &memgraph);\n\n assert!(iterator.next().is_none());\n\n assert!(iterator.next().is_none());\n\n });\n\n}\n", "file_path": "rust/rsmgp-sys/src/edge/tests.rs", "rank": 37, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_from_naive_time() {\n\n let test_time = |time: NaiveTime, millis: i32, micros: i32| {\n\n mock_mgp_once!(\n\n mgp_local_time_from_parameters_context,\n\n move |local_time_params_ptr, _, local_time_ptr_ptr| unsafe {\n\n check_local_time_params(&*local_time_params_ptr, &time, millis, micros);\n\n (*local_time_ptr_ptr) = alloc_mgp_local_time();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n mock_mgp_once!(mgp_local_time_destroy_context, |ptr| unsafe {\n\n free(ptr as *mut c_void);\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let _mgp_local_time = LocalTime::from_naive_time(&time, &memgraph);\n\n });\n\n };\n\n test_time(NaiveTime::from_hms_micro(0, 0, 0, 0), 0, 0);\n\n test_time(NaiveTime::from_hms_micro(23, 59, 59, 999_999), 999, 999);\n\n test_time(NaiveTime::from_hms_micro(1, 2, 3, 444_555), 444, 555);\n\n // Leaps seconds handling\n\n test_time(NaiveTime::from_hms_micro(23, 59, 59, 1_999_999), 999, 999);\n\n test_time(NaiveTime::from_hms_micro(1, 2, 3, 1_444_555), 444, 555);\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 38, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_to_chrono_duration() {\n\n let test_duration = |duration_to_test: chrono::Duration| {\n\n mock_mgp_once!(\n\n mgp_duration_get_microseconds_context,\n\n move |_, microseconds_ptr| unsafe {\n\n (*microseconds_ptr) = duration_to_test.num_microseconds().unwrap();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n\n\n with_dummy!(Duration, |duration: &Duration| {\n\n assert_eq!(duration.to_chrono_duration(), duration_to_test);\n\n });\n\n };\n\n test_duration(chrono::Duration::microseconds(0));\n\n test_duration(chrono::Duration::microseconds(-1));\n\n test_duration(chrono::Duration::microseconds(1));\n\n test_duration(chrono::Duration::microseconds(20_000));\n\n test_duration(chrono::Duration::microseconds(-23_456));\n\n test_duration(chrono::Duration::microseconds(2i64.pow(31)));\n\n test_duration(chrono::Duration::microseconds(-(2i64.pow(31))));\n\n test_duration(chrono::Duration::microseconds(std::i64::MAX));\n\n test_duration(chrono::Duration::microseconds(std::i64::MIN));\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 39, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_mgp_copy() {\n\n mock_mgp_once!(mgp_map_make_empty_context, |_, map_ptr_ptr| unsafe {\n\n (*map_ptr_ptr) = alloc_mgp_map();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_map_iter_items_context, |_, _, iter_ptr_ptr| unsafe {\n\n (*iter_ptr_ptr) = alloc_mgp_map_items_iterator();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(\n\n mgp_map_items_iterator_get_context,\n\n |_, item_ptr_ptr| unsafe {\n\n (*item_ptr_ptr) = null_mut();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n mock_mgp_once!(mgp_map_destroy_context, |ptr| unsafe {\n\n free(ptr as *mut c_void);\n\n });\n\n mock_mgp_once!(mgp_map_items_iterator_destroy_context, |ptr| unsafe {\n", "file_path": "rust/rsmgp-sys/src/map/tests.rs", "rank": 40, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_vertex_by_id() {\n\n mock_mgp_once!(mgp_graph_get_vertex_by_id_context, |_, _, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = memgraph.vertex_by_id(0);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n\n#[no_mangle]\n\nextern \"C\" fn dummy_c_func(\n\n _: *mut mgp_list,\n\n _: *mut mgp_graph,\n\n _: *mut mgp_result,\n\n _: *mut mgp_memory,\n\n) {\n\n}\n\n\n\nmacro_rules! mock_mgp_type_once {\n\n ($c_func_name:ident) => {\n\n mock_mgp_once!($c_func_name, |type_ptr_ptr| unsafe {\n\n (*type_ptr_ptr) = alloc_mgp_type();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n };\n\n}\n", "file_path": "rust/rsmgp-sys/src/memgraph/tests.rs", "rank": 41, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_vertices_iterator() {\n\n mock_mgp_once!(mgp_graph_iter_vertices_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = memgraph.vertices_iter();\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/memgraph/tests.rs", "rank": 42, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_duration_accessors() {\n\n let microseconds: i64 = 4;\n\n mock_mgp_once!(\n\n mgp_duration_get_microseconds_context,\n\n move |_, microseconds_ptr| unsafe {\n\n (*microseconds_ptr) = microseconds;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n\n\n with_dummy!(Duration, |duration: &Duration| {\n\n assert_eq!(duration.microseconds() as i64, microseconds);\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 43, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_labels_count() {\n\n mock_mgp_once!(mgp_vertex_labels_count_context, |_, labels_count| unsafe {\n\n (*labels_count) = 2;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n\n\n with_dummy!(Vertex, |vertex: &Vertex| {\n\n assert_eq!(vertex.labels_count().unwrap(), 2);\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/vertex/tests.rs", "rank": 44, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_properties_iterator() {\n\n mock_mgp_once!(mgp_edge_iter_properties_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(Edge, |edge: &Edge| {\n\n assert!(edge.properties().is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/edge/tests.rs", "rank": 45, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_mgp_copy() {\n\n mock_mgp_once!(mgp_path_copy_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n unsafe {\n\n let path = Path::mgp_copy(std::ptr::null_mut(), &memgraph);\n\n assert!(path.is_err());\n\n }\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/path/tests.rs", "rank": 46, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_mgp_copy() {\n\n mock_mgp_once!(mgp_list_size_context, |_, size_ptr| unsafe {\n\n (*size_ptr) = 1;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_list_make_empty_context, |_, _, list_ptr_ptr| unsafe {\n\n (*list_ptr_ptr) = alloc_mgp_list();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_list_at_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_list_append_context, |_, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n mock_mgp_once!(mgp_list_destroy_context, |ptr| unsafe {\n\n free(ptr as *mut c_void);\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n unsafe {\n\n let value = List::mgp_copy(std::ptr::null_mut(), &memgraph);\n\n assert!(value.is_err());\n\n }\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/list/tests.rs", "rank": 47, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_with_start() {\n\n mock_mgp_once!(mgp_path_make_with_start_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let vertex = Vertex::new(std::ptr::null_mut(), &memgraph);\n\n assert!(Path::make_with_start(&vertex, &memgraph).is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/path/tests.rs", "rank": 48, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_invalid_date() {\n\n let test_invalid_date = |date: NaiveDate| {\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let result = Date::from_naive_date(&date, &memgraph);\n\n assert!(result.is_err());\n\n assert_eq!(\n\n result.err().unwrap(),\n\n Error::UnableToCreateDateFromNaiveDate\n\n );\n\n });\n\n };\n\n test_invalid_date(NaiveDate::from_ymd(-1, 12, 31));\n\n test_invalid_date(NaiveDate::from_ymd(10000, 1, 1));\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 49, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_to_naive_time() {\n\n let test_time = |time_to_test: NaiveTime| {\n\n mock_mgp_once!(\n\n mgp_local_time_timestamp_context,\n\n move |_, timestamp_ptr| unsafe {\n\n (*timestamp_ptr) = time_to_test.num_seconds_from_midnight() as i64 * 1_000_000\n\n + (time_to_test.nanosecond() / 1_000) as i64;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n\n\n with_dummy!(LocalTime, |time: &LocalTime| {\n\n assert_eq!(time.to_naive_time(), time_to_test);\n\n });\n\n };\n\n test_time(NaiveTime::from_hms_micro(0, 0, 0, 0));\n\n test_time(NaiveTime::from_hms_micro(23, 59, 59, 999_999));\n\n test_time(NaiveTime::from_hms_micro(1, 2, 3, 444_555));\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 50, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_empty() {\n\n mock_mgp_once!(mgp_map_make_empty_context, |_, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = Map::make_empty(&memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/map/tests.rs", "rank": 51, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_create_record() {\n\n mock_mgp_once!(mgp_result_new_record_context, |_, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let result_record = ResultRecord::create(&memgraph);\n\n assert!(result_record.is_err());\n\n });\n\n}\n\n\n\nmacro_rules! mock_mgp_value_make_with_mem {\n\n ($c_func_name:ident) => {\n\n mock_mgp_once!($c_func_name, |_, _, value_ptr_ptr| unsafe {\n\n (*value_ptr_ptr) = alloc_mgp_value();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n };\n\n}\n\n\n\nmacro_rules! mock_mgp_value_make_without_mem {\n\n ($c_func_name:ident) => {\n\n mock_mgp_once!($c_func_name, |_, value_ptr_ptr| unsafe {\n\n (*value_ptr_ptr) = alloc_mgp_value();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n };\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/result/tests.rs", "rank": 52, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_mgp_copy() {\n\n mock_mgp_once!(mgp_edge_copy_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n unsafe {\n\n let value = Edge::mgp_copy(null_mut(), &memgraph);\n\n assert!(value.is_err());\n\n }\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/edge/tests.rs", "rank": 53, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_mgp_ptr() {\n\n with_dummy!(Path, |path: &Path| {\n\n let ptr = path.mgp_ptr();\n\n assert!(ptr.is_null());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/path/tests.rs", "rank": 54, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_empty() {\n\n mock_mgp_once!(mgp_list_make_empty_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = List::make_empty(0, &memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/list/tests.rs", "rank": 55, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_to_mgp_value() {\n\n mock_mgp_once!(mgp_value_make_null_context, |_, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = Value::Null;\n\n let mgp_value = value.to_mgp_value(&memgraph);\n\n assert!(mgp_value.is_err());\n\n });\n\n}\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 56, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_append_extend() {\n\n mock_mgp_once!(mgp_value_make_null_context, |_, value_ptr_ptr| unsafe {\n\n (*value_ptr_ptr) = alloc_mgp_value();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_list_append_extend_context, |_, _| {\n\n mgp_error::MGP_ERROR_INSUFFICIENT_BUFFER\n\n });\n\n mock_mgp_once!(mgp_value_destroy_context, |ptr| unsafe {\n\n free(ptr as *mut c_void);\n\n });\n\n\n\n with_dummy!(List, |list: &List| {\n\n assert!(list.append_extend(&Value::Null).is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/list/tests.rs", "rank": 57, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_insert_value() {\n\n // TODO(antaljanosbenjamin) Try to free the independently allocated types (list, map, etc)\n\n mock_mgp_once!(mgp_value_make_null_context, |_, value_ptr_ptr| unsafe {\n\n (*value_ptr_ptr) = alloc_mgp_value();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_value_make_with_mem!(mgp_value_make_bool_context);\n\n mock_mgp_value_make_with_mem!(mgp_value_make_int_context);\n\n mock_mgp_value_make_with_mem!(mgp_value_make_double_context);\n\n mock_mgp_value_make_with_mem!(mgp_value_make_string_context);\n\n\n\n mock_mgp_once!(mgp_list_size_context, |_, size_ptr| unsafe {\n\n (*size_ptr) = 0;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_list_make_empty_context, |_, _, list_ptr_ptr| unsafe {\n\n (*list_ptr_ptr) = alloc_mgp_list();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_value_make_without_mem!(mgp_value_make_list_context);\n", "file_path": "rust/rsmgp-sys/src/result/tests.rs", "rank": 58, "score": 81540.45492997873 }, { "content": "#[test]\n\n#[serial]\n\nfn test_empty_list_iter() {\n\n mock_mgp_once!(mgp_list_size_context, |_, size_ptr| unsafe {\n\n (*size_ptr) = 0;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n\n\n with_dummy!(List, |list: &List| {\n\n let iter = list.iter();\n\n assert!(iter.is_ok());\n\n let value = iter.unwrap().next();\n\n assert!(value.is_none());\n\n });\n\n}\n", "file_path": "rust/rsmgp-sys/src/list/tests.rs", "rank": 59, "score": 80713.18243146839 }, { "content": "#[test]\n\n#[serial]\n\nfn test_to_naive_date_time() {\n\n let test_date_time = |date: NaiveDate, time: NaiveTime| {\n\n mock_mgp_once!(\n\n mgp_local_date_time_get_year_context,\n\n move |_, year_ptr| unsafe {\n\n (*year_ptr) = date.year();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n mock_mgp_once!(\n\n mgp_local_date_time_get_month_context,\n\n move |_, month_ptr| unsafe {\n\n (*month_ptr) = date.month() as i32;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n mock_mgp_once!(\n\n mgp_local_date_time_get_day_context,\n\n move |_, day_ptr| unsafe {\n\n (*day_ptr) = date.day() as i32;\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 60, "score": 80713.18243146839 }, { "content": "#[test]\n\n#[serial]\n\nfn test_local_time_accessors() {\n\n let hour = 23;\n\n let minute = 1;\n\n let second = 2;\n\n let millisecond = 3;\n\n let microsecond = 4;\n\n mock_mgp_once!(mgp_local_time_get_hour_context, move |_, hour_ptr| unsafe {\n\n (*hour_ptr) = hour;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(\n\n mgp_local_time_get_minute_context,\n\n move |_, minute_ptr| unsafe {\n\n (*minute_ptr) = minute;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n mock_mgp_once!(\n\n mgp_local_time_get_second_context,\n\n move |_, second_ptr| unsafe {\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 61, "score": 80713.18243146839 }, { "content": "#[test]\n\n#[serial]\n\nfn test_empty_map_iter() {\n\n mock_mgp_once!(mgp_map_iter_items_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(Map, |map: &Map| {\n\n let iter = map.iter();\n\n assert!(iter.is_err());\n\n });\n\n}\n", "file_path": "rust/rsmgp-sys/src/map/tests.rs", "rank": 62, "score": 80713.18243146839 }, { "content": "#[test]\n\n#[serial]\n\nfn test_add_read_procedure() {\n\n mock_mgp_once!(\n\n mgp_module_add_read_procedure_context,\n\n |_, _, _, proc_ptr_ptr| unsafe {\n\n (*proc_ptr_ptr) = alloc_mgp_proc();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n let ctx_any = mgp_type_any_context();\n\n ctx_any.expect().times(3).returning(|type_ptr_ptr| unsafe {\n\n (*type_ptr_ptr) = alloc_mgp_type();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_type_once!(mgp_type_bool_context);\n\n mock_mgp_type_once!(mgp_type_number_context);\n\n mock_mgp_type_once!(mgp_type_int_context);\n\n mock_mgp_type_once!(mgp_type_float_context);\n\n mock_mgp_type_once!(mgp_type_string_context);\n\n mock_mgp_type_once!(mgp_type_map_context);\n\n mock_mgp_type_once!(mgp_type_node_context);\n", "file_path": "rust/rsmgp-sys/src/memgraph/tests.rs", "rank": 63, "score": 80713.18243146839 }, { "content": "#[test]\n\n#[serial]\n\nfn test_from_naive_date_time() {\n\n let test_date_time = |date: NaiveDate, time: NaiveTime, millis: i32, micros: i32| {\n\n let datetime = NaiveDateTime::new(date, time);\n\n mock_mgp_once!(\n\n mgp_local_date_time_from_parameters_context,\n\n move |local_date_time_params_ptr, _, local_time_ptr_ptr| unsafe {\n\n check_date_params(&*(*local_date_time_params_ptr).date_parameters, &datetime);\n\n check_local_time_params(\n\n &*(*local_date_time_params_ptr).local_time_parameters,\n\n &datetime,\n\n millis,\n\n micros,\n\n );\n\n (*local_time_ptr_ptr) = alloc_mgp_local_date_time();\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n mock_mgp_once!(mgp_local_date_time_destroy_context, |ptr| unsafe {\n\n free(ptr as *mut c_void);\n\n });\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 64, "score": 80713.18243146839 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_path_mgp_value() {\n\n mock_mgp_once!(mgp_path_copy_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let path = Path::new(null_mut(), &memgraph);\n\n let value = MgpValue::make_path(&path, &memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n\nmacro_rules! mock_mgp_value_is {\n\n ($c_func_name:ident, $value:expr) => {\n\n mock_mgp_once!(\n\n $c_func_name,\n\n |_, result: *mut ::std::os::raw::c_int| unsafe {\n\n (*result) = $value;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n };\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 65, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_null_mgp_value() {\n\n mock_mgp_once!(mgp_value_make_null_context, |_, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = MgpValue::make_null(&memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 66, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_string_mgp_value() {\n\n mock_mgp_once!(mgp_value_make_string_context, |value, _, _| unsafe {\n\n assert_eq!(CStr::from_ptr(value), c_str!(\"test\"));\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = MgpValue::make_string(c_str!(\"test\"), &memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 67, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_list_mgp_value() {\n\n mock_mgp_once!(mgp_list_size_context, |_, size_ptr| unsafe {\n\n (*size_ptr) = 0;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_list_make_empty_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_value_make_list_context, |_, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let list = List::new(null_mut(), &memgraph);\n\n let value = MgpValue::make_list(&list, &memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 68, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_vertex_mgp_value() {\n\n mock_mgp_once!(mgp_vertex_copy_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let vertex = Vertex::new(null_mut(), &memgraph);\n\n let value = MgpValue::make_vertex(&vertex, &memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 69, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_double_mgp_value() {\n\n mock_mgp_once!(mgp_value_make_double_context, |value, _, _| {\n\n assert_eq!(value, 0.0);\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = MgpValue::make_double(0.0, &memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 70, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_edge_mgp_value() {\n\n mock_mgp_once!(mgp_edge_copy_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let edge = Edge::new(null_mut(), &memgraph);\n\n let value = MgpValue::make_edge(&edge, &memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 71, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_local_date_time_accessors() {\n\n let year = 1994;\n\n let month = 12;\n\n let day = 7;\n\n let hour = 23;\n\n let minute = 1;\n\n let second = 2;\n\n let millisecond = 3;\n\n let microsecond = 4;\n\n mock_mgp_once!(\n\n mgp_local_date_time_get_year_context,\n\n move |_, year_ptr| unsafe {\n\n (*year_ptr) = year;\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n }\n\n );\n\n mock_mgp_once!(\n\n mgp_local_date_time_get_month_context,\n\n move |_, month_ptr| unsafe {\n\n (*month_ptr) = month;\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 72, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_mgp_value_for_the_right_type() {\n\n mock_mgp_value_is!(mgp_value_is_null_context, 1);\n\n mock_mgp_value_is!(mgp_value_is_bool_context, 1);\n\n mock_mgp_value_is!(mgp_value_is_int_context, 1);\n\n mock_mgp_value_is!(mgp_value_is_double_context, 1);\n\n mock_mgp_value_is!(mgp_value_is_string_context, 1);\n\n mock_mgp_value_is!(mgp_value_is_list_context, 1);\n\n mock_mgp_value_is!(mgp_value_is_map_context, 1);\n\n mock_mgp_value_is!(mgp_value_is_vertex_context, 1);\n\n mock_mgp_value_is!(mgp_value_is_edge_context, 1);\n\n mock_mgp_value_is!(mgp_value_is_path_context, 1);\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = MgpValue::new(null_mut(), &memgraph);\n\n assert!(value.is_null());\n\n assert!(value.is_bool());\n\n assert!(value.is_int());\n\n assert!(value.is_double());\n\n assert!(value.is_string());\n\n assert!(value.is_list());\n\n assert!(value.is_map());\n\n assert!(value.is_vertex());\n\n assert!(value.is_edge());\n\n assert!(value.is_path());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 73, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_mgp_value_for_the_wrong_type() {\n\n mock_mgp_value_is!(mgp_value_is_null_context, 0);\n\n mock_mgp_value_is!(mgp_value_is_bool_context, 0);\n\n mock_mgp_value_is!(mgp_value_is_int_context, 0);\n\n mock_mgp_value_is!(mgp_value_is_double_context, 0);\n\n mock_mgp_value_is!(mgp_value_is_string_context, 0);\n\n mock_mgp_value_is!(mgp_value_is_list_context, 0);\n\n mock_mgp_value_is!(mgp_value_is_map_context, 0);\n\n mock_mgp_value_is!(mgp_value_is_vertex_context, 0);\n\n mock_mgp_value_is!(mgp_value_is_edge_context, 0);\n\n mock_mgp_value_is!(mgp_value_is_path_context, 0);\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = MgpValue::new(null_mut(), &memgraph);\n\n assert!(!value.is_null());\n\n assert!(!value.is_bool());\n\n assert!(!value.is_int());\n\n assert!(!value.is_double());\n\n assert!(!value.is_string());\n\n assert!(!value.is_list());\n\n assert!(!value.is_map());\n\n assert!(!value.is_vertex());\n\n assert!(!value.is_edge());\n\n assert!(!value.is_path());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 74, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_int_mgp_value() {\n\n mock_mgp_once!(mgp_value_make_int_context, |value, _, _| {\n\n assert_eq!(value, 100);\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = MgpValue::make_int(100, &memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 75, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_map_mgp_value() {\n\n mock_mgp_once!(mgp_map_make_empty_context, |_, _| {\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_map_iter_items_context, |_, _, _| {\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_map_items_iterator_get_context, |_, _| {\n\n mgp_error::MGP_ERROR_NO_ERROR\n\n });\n\n mock_mgp_once!(mgp_value_make_map_context, |_, _| {\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let map = Map::new(null_mut(), &memgraph);\n\n let value = MgpValue::make_map(&map, &memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 76, "score": 79916.57065550111 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_true_bool_mgp_value() {\n\n mock_mgp_once!(mgp_value_make_bool_context, |value, _, _| {\n\n assert_eq!(value, 1);\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = MgpValue::make_bool(true, &memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 77, "score": 79148.94607362406 }, { "content": "#[test]\n\n#[serial]\n\nfn test_make_false_bool_mgp_value() {\n\n mock_mgp_once!(mgp_value_make_bool_context, |value, _, _| {\n\n assert_eq!(value, 0);\n\n mgp_error::MGP_ERROR_UNABLE_TO_ALLOCATE\n\n });\n\n\n\n with_dummy!(|memgraph: &Memgraph| {\n\n let value = MgpValue::make_bool(false, &memgraph);\n\n assert!(value.is_err());\n\n });\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/value/tests.rs", "rank": 78, "score": 79148.94607362406 }, { "content": "fn check_local_time_params<T: Timelike>(\n\n local_time_params: &mgp_local_time_parameters,\n\n time: &T,\n\n millis: i32,\n\n micros: i32,\n\n) {\n\n assert_eq!(local_time_params.hour as u32, time.hour());\n\n assert_eq!(local_time_params.minute as u32, time.minute());\n\n assert_eq!(local_time_params.second as u32, time.second());\n\n assert_eq!(local_time_params.millisecond, millis);\n\n assert_eq!(local_time_params.microsecond, micros);\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 79, "score": 71034.0221570582 }, { "content": "fn check_date_params<T: Datelike>(date_params: &mgp_date_parameters, date: &T) {\n\n assert_eq!(date_params.year, date.year());\n\n assert_eq!(date_params.month as u32, date.month());\n\n assert_eq!(date_params.day as u32, date.day());\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/tests.rs", "rank": 80, "score": 58050.204028717446 }, { "content": "use crate::value::*;\n\n// Required here, if not present tests linking fails.\n\n#[double]\n\nuse crate::mgp::ffi;\n\nuse mockall_double::double;\n\n\n\npub struct Map {\n\n ptr: *mut mgp_map,\n\n memgraph: Memgraph,\n\n}\n\n\n\nimpl Drop for Map {\n\n fn drop(&mut self) {\n\n unsafe {\n\n if !self.ptr.is_null() {\n\n ffi::mgp_map_destroy(self.ptr);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rust/rsmgp-sys/src/map/mod.rs", "rank": 81, "score": 46191.49441073576 }, { "content": " pub fn to_chrono_duration(&self) -> chrono::Duration {\n\n chrono::Duration::microseconds(self.microseconds())\n\n }\n\n\n\n pub fn mgp_ptr(&self) -> *mut mgp_duration {\n\n self.ptr\n\n }\n\n pub fn set_mgp_ptr(&mut self, new_ptr: *mut mgp_duration) {\n\n self.ptr = new_ptr;\n\n }\n\n\n\n pub fn microseconds(&self) -> i64 {\n\n unsafe { invoke_mgp_func!(i64, ffi::mgp_duration_get_microseconds, self.ptr).unwrap() }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "rust/rsmgp-sys/src/temporal/mod.rs", "rank": 82, "score": 46191.41186883005 }, { "content": "// Required here, if not present tests linking fails.\n\n#[double]\n\nuse crate::mgp::ffi;\n\nuse mockall_double::double;\n\n\n\npub struct Path {\n\n ptr: *mut mgp_path,\n\n memgraph: Memgraph,\n\n}\n\nimpl Drop for Path {\n\n fn drop(&mut self) {\n\n unsafe {\n\n if !self.ptr.is_null() {\n\n ffi::mgp_path_destroy(self.ptr);\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Path {\n", "file_path": "rust/rsmgp-sys/src/path/mod.rs", "rank": 83, "score": 46191.010359583815 }, { "content": "use crate::list::*;\n\nuse crate::map::*;\n\nuse crate::memgraph::*;\n\nuse crate::mgp::*;\n\nuse crate::path::*;\n\nuse crate::value::*;\n\nuse crate::vertex::*;\n\n// Required here, if not present tests linking fails.\n\n#[double]\n\nuse crate::mgp::ffi;\n\nuse mockall_double::double;\n\n\n\npub struct ResultRecord {\n\n ptr: *mut mgp_result_record,\n\n memgraph: Memgraph,\n\n}\n\n\n\nimpl ResultRecord {\n\n pub fn create(memgraph: &Memgraph) -> Result<ResultRecord> {\n\n unsafe {\n", "file_path": "rust/rsmgp-sys/src/result/mod.rs", "rank": 84, "score": 46190.944814207316 }, { "content": "#[double]\n\nuse crate::mgp::ffi;\n\nuse mockall_double::double;\n\n\n\n// NOTE: Not possible to implement [std::iter::IntoIterator] because the [ListIterator] holds the\n\n// [List] reference which needs the lifetime specifier.\n\npub struct List {\n\n ptr: *mut mgp_list,\n\n memgraph: Memgraph,\n\n}\n\n\n\nimpl Drop for List {\n\n fn drop(&mut self) {\n\n unsafe {\n\n if !self.ptr.is_null() {\n\n ffi::mgp_list_destroy(self.ptr);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rust/rsmgp-sys/src/list/mod.rs", "rank": 85, "score": 46190.130500633815 }, { "content": " self.minute(),\n\n self.second(),\n\n self.millisecond(),\n\n self.microsecond(),\n\n ))\n\n }\n\n\n\n pub fn mgp_ptr(&self) -> *mut mgp_local_date_time {\n\n self.ptr\n\n }\n\n pub fn set_mgp_ptr(&mut self, new_ptr: *mut mgp_local_date_time) {\n\n self.ptr = new_ptr;\n\n }\n\n\n\n pub fn year(&self) -> i32 {\n\n unsafe { invoke_mgp_func!(i32, ffi::mgp_local_date_time_get_year, self.ptr).unwrap() }\n\n }\n\n\n\n pub fn month(&self) -> u32 {\n\n unsafe {\n", "file_path": "rust/rsmgp-sys/src/temporal/mod.rs", "rank": 86, "score": 46189.971576974596 }, { "content": "\n\n pub fn mgp_ptr(&self) -> *mut mgp_date {\n\n self.ptr\n\n }\n\n pub fn set_mgp_ptr(&mut self, new_ptr: *mut mgp_date) {\n\n self.ptr = new_ptr;\n\n }\n\n\n\n pub fn year(&self) -> i32 {\n\n unsafe { invoke_mgp_func!(i32, ffi::mgp_date_get_year, self.ptr).unwrap() }\n\n }\n\n\n\n pub fn month(&self) -> u32 {\n\n unsafe { invoke_mgp_func!(i32, ffi::mgp_date_get_month, self.ptr).unwrap() as u32 }\n\n }\n\n\n\n pub fn day(&self) -> u32 {\n\n unsafe { invoke_mgp_func!(i32, ffi::mgp_date_get_day, self.ptr).unwrap() as u32 }\n\n }\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/temporal/mod.rs", "rank": 87, "score": 46189.72662746838 }, { "content": " }\n\n\n\n pub fn from_naive_time(from: &NaiveTime, memgraph: &Memgraph) -> Result<LocalTime> {\n\n let mut local_time_params = create_mgp_local_time_parameters(&from);\n\n\n\n unsafe {\n\n let local_time = LocalTime::new(invoke_mgp_func_with_res!(\n\n *mut mgp_local_time,\n\n Error::UnableToCreateLocalTimeFromNaiveTime,\n\n ffi::mgp_local_time_from_parameters,\n\n &mut local_time_params,\n\n memgraph.memory_ptr()\n\n )?);\n\n Ok(local_time)\n\n }\n\n }\n\n\n\n pub fn to_naive_time(&self) -> NaiveTime {\n\n // Ideally this function should use NaiveTime::from_hms_nano, but because the issue with\n\n // the LocalTime::minute method it cannot be used.\n", "file_path": "rust/rsmgp-sys/src/temporal/mod.rs", "rank": 88, "score": 46189.18674385706 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\npub struct Vertex {\n\n ptr: *mut mgp_vertex,\n\n memgraph: Memgraph,\n\n}\n\n\n\nimpl Drop for Vertex {\n\n fn drop(&mut self) {\n\n unsafe {\n\n if !self.ptr.is_null() {\n\n ffi::mgp_vertex_destroy(self.ptr);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/vertex/mod.rs", "rank": 89, "score": 46189.07260242142 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\npub struct Edge {\n\n ptr: *mut mgp_edge,\n\n memgraph: Memgraph,\n\n}\n\n\n\nimpl Drop for Edge {\n\n fn drop(&mut self) {\n\n unsafe {\n\n if !self.ptr.is_null() {\n\n ffi::mgp_edge_destroy(self.ptr);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "rust/rsmgp-sys/src/edge/mod.rs", "rank": 90, "score": 46189.07260242142 }, { "content": "\n\npub(crate) struct Duration {\n\n ptr: *mut mgp_duration,\n\n}\n\n\n\nimpl Drop for Duration {\n\n fn drop(&mut self) {\n\n unsafe {\n\n if !self.ptr.is_null() {\n\n ffi::mgp_duration_destroy(self.ptr);\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Duration {\n\n pub(crate) fn new(ptr: *mut mgp_duration) -> Duration {\n\n #[cfg(not(test))]\n\n assert!(\n\n !ptr.is_null(),\n", "file_path": "rust/rsmgp-sys/src/temporal/mod.rs", "rank": 91, "score": 46189.03930057684 }, { "content": " self.ptr,\n\n index\n\n )?;\n\n mgp_raw_value_to_value(c_value, &self.memgraph)\n\n }\n\n }\n\n\n\n pub fn iter(&self) -> Result<ListIterator> {\n\n Ok(ListIterator {\n\n list: self,\n\n position: 0,\n\n })\n\n }\n\n\n\n pub(crate) fn mgp_ptr(&self) -> *mut mgp_list {\n\n self.ptr\n\n }\n\n\n\n pub(crate) fn set_mgp_ptr(&mut self, new_ptr: *mut mgp_list) {\n\n self.ptr = new_ptr;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "rust/rsmgp-sys/src/list/mod.rs", "rank": 92, "score": 46188.876041202806 }, { "content": "use chrono::{Datelike, NaiveDate, NaiveDateTime, NaiveTime};\n\nuse mockall_double::double;\n\n\n\nconst MINIMUM_YEAR: i32 = 0;\n\nconst MAXIMUM_YEAR: i32 = 9999;\n\nconst MICROS_PER_SECOND: i64 = 1_000_000;\n\nconst NANOS_PER_MILLIS: u32 = 1_000_000;\n\nconst NANOS_PER_MICROS: u32 = 1_000;\n\nconst MICROS_PER_MILLIS: u32 = 1_000;\n\n\n\npub(crate) struct Date {\n\n ptr: *mut mgp_date,\n\n}\n\n\n\nimpl Drop for Date {\n\n fn drop(&mut self) {\n\n unsafe {\n\n if !self.ptr.is_null() {\n\n ffi::mgp_date_destroy(self.ptr);\n\n }\n", "file_path": "rust/rsmgp-sys/src/temporal/mod.rs", "rank": 93, "score": 46188.87020486074 }, { "content": "use crate::property::*;\n\nuse crate::result::*;\n\nuse crate::value::*;\n\n// Required here, if not present tests linking fails.\n\n#[double]\n\nuse crate::mgp::ffi;\n\nuse mockall_double::double;\n\n\n\npub struct VerticesIterator {\n\n ptr: *mut mgp_vertices_iterator,\n\n is_first: bool,\n\n memgraph: Memgraph,\n\n}\n\n\n\nimpl VerticesIterator {\n\n pub(crate) fn new(ptr: *mut mgp_vertices_iterator, memgraph: &Memgraph) -> VerticesIterator {\n\n #[cfg(not(test))]\n\n assert!(\n\n !ptr.is_null(),\n\n \"Unable to create vertices iterator because the given pointer is null.\"\n", "file_path": "rust/rsmgp-sys/src/vertex/mod.rs", "rank": 94, "score": 46188.72129459122 }, { "content": "use crate::result::*;\n\nuse crate::value::*;\n\nuse crate::vertex::Vertex;\n\n// Required here, if not present tests linking fails.\n\n#[double]\n\nuse crate::mgp::ffi;\n\nuse mockall_double::double;\n\n\n\npub struct EdgesIterator {\n\n ptr: *mut mgp_edges_iterator,\n\n is_first: bool,\n\n memgraph: Memgraph,\n\n}\n\n\n\nimpl EdgesIterator {\n\n pub(crate) fn new(ptr: *mut mgp_edges_iterator, memgraph: &Memgraph) -> EdgesIterator {\n\n #[cfg(not(test))]\n\n assert!(\n\n !ptr.is_null(),\n\n \"Unable to create edges iterator because the given pointer is null.\"\n", "file_path": "rust/rsmgp-sys/src/edge/mod.rs", "rank": 95, "score": 46188.657354885705 }, { "content": " let timestamp = self.timestamp();\n\n let seconds = (timestamp / MICROS_PER_SECOND) as u32;\n\n let micros = (timestamp % MICROS_PER_SECOND) as u32;\n\n NaiveTime::from_num_seconds_from_midnight(seconds, micros * NANOS_PER_MICROS)\n\n }\n\n\n\n pub fn mgp_ptr(&self) -> *mut mgp_local_time {\n\n self.ptr\n\n }\n\n pub fn set_mgp_ptr(&mut self, new_ptr: *mut mgp_local_time) {\n\n self.ptr = new_ptr;\n\n }\n\n\n\n pub fn hour(&self) -> u32 {\n\n unsafe { invoke_mgp_func!(i32, ffi::mgp_local_time_get_hour, self.ptr).unwrap() as u32 }\n\n }\n\n\n\n pub fn minute(&self) -> u32 {\n\n // As of Memgraph 2.0.1 there is a bug in the C API of mgp_local_time, which prevents the\n\n // usage of mgp_local_time_get_minute. Therefore this function cannot be used until the bug\n", "file_path": "rust/rsmgp-sys/src/temporal/mod.rs", "rank": 96, "score": 46188.526544430235 }, { "content": " ptr,\n\n memgraph: memgraph.clone(),\n\n }\n\n }\n\n\n\n pub(crate) fn mgp_ptr(&self) -> *mut mgp_value {\n\n self.ptr\n\n }\n\n\n\n pub fn to_value(&self) -> Result<Value> {\n\n unsafe { mgp_raw_value_to_value(self.mgp_ptr(), &self.memgraph) }\n\n }\n\n\n\n pub fn make_null(memgraph: &Memgraph) -> Result<MgpValue> {\n\n unsafe {\n\n let mgp_ptr = invoke_mgp_func_with_res!(\n\n *mut mgp_value,\n\n Error::UnableToMakeNullValue,\n\n ffi::mgp_value_make_null,\n\n memgraph.memory_ptr()\n", "file_path": "rust/rsmgp-sys/src/value/mod.rs", "rank": 97, "score": 46188.52068477809 }, { "content": " pub fn timestamp(&self) -> i64 {\n\n unsafe { invoke_mgp_func!(i64, ffi::mgp_local_time_timestamp, self.ptr).unwrap() }\n\n }\n\n}\n\n\n\npub(crate) struct LocalDateTime {\n\n ptr: *mut mgp_local_date_time,\n\n}\n\n\n\nimpl Drop for LocalDateTime {\n\n fn drop(&mut self) {\n\n unsafe {\n\n if !self.ptr.is_null() {\n\n ffi::mgp_local_date_time_destroy(self.ptr);\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl LocalDateTime {\n", "file_path": "rust/rsmgp-sys/src/temporal/mod.rs", "rank": 98, "score": 46188.43444896714 }, { "content": " result,\n\n memory,\n\n module,\n\n }\n\n }\n\n\n\n /// Creates a new object with all underlying data set to null. Used for the testing purposes.\n\n #[cfg(test)]\n\n pub(crate) fn new_default() -> Memgraph {\n\n Memgraph {\n\n args: std::ptr::null_mut(),\n\n graph: std::ptr::null_mut(),\n\n result: std::ptr::null_mut(),\n\n memory: std::ptr::null_mut(),\n\n module: std::ptr::null_mut(),\n\n }\n\n }\n\n\n\n /// Arguments passed to the procedure call.\n\n pub fn args(&self) -> Result<List> {\n", "file_path": "rust/rsmgp-sys/src/memgraph/mod.rs", "rank": 99, "score": 46188.35680261924 } ]
Rust
whisper/src/bin/whisper-create.rs
GiantPlantsSociety/graphite-rs
d2657ae3ddf110023417ec255f5192ac8fa83bfc
use humansize::{file_size_opts as options, FileSize}; use std::error::Error; use std::fs; use std::path::PathBuf; use std::process::exit; use structopt::StructOpt; use whisper::aggregation::AggregationMethod; use whisper::retention::Retention; use whisper::WhisperBuilder; #[derive(Debug, StructOpt)] #[structopt(name = "whisper-create")] struct Args { #[structopt(long = "overwrite")] overwrite: bool, #[structopt(long = "estimate")] estimate: bool, #[structopt(long = "sparse")] sparse: bool, #[structopt(long = "fallocate")] fallocate: bool, #[structopt(long = "xFilesFactor", default_value = "0.5")] x_files_factor: f32, #[structopt(long = "aggregationMethod", default_value = "average")] aggregation_method: AggregationMethod, #[structopt(name = "path", parse(from_os_str))] path: PathBuf, #[structopt( name = "retentions", help = r#"Specify lengths of time, for example: 60:1440 60 seconds per datapoint, 1440 datapoints = 1 day of retention 15m:8 15 minutes per datapoint, 8 datapoints = 2 hours of retention 1h:7d 1 hour per datapoint, 7 days of retention 12h:2y 12 hours per datapoint, 2 years of retention "#, required = true, min_values = 1 )] retentions: Vec<Retention>, } fn estimate_info(retentions: &[Retention]) { for (i, retention) in retentions.iter().enumerate() { println!( "Archive {}: {} points of {}s precision", i, &retention.points, &retention.seconds_per_point ); } let total_points: usize = retentions.iter().map(|x| x.points as usize).sum(); let size = (whisper::METADATA_SIZE + (retentions.len() * whisper::ARCHIVE_INFO_SIZE) + (total_points * whisper::POINT_SIZE)) as usize; let disk_size = (size as f64 / 4096.0).ceil() as usize * 4096; let custom_options = options::FileSizeOpts { decimal_places: 3, ..options::CONVENTIONAL }; println!(); println!( "Estimated Whisper DB Size: {} ({} bytes on disk with 4k blocks)", size.file_size(&custom_options).unwrap(), disk_size ); println!(); let numbers = [1, 5, 10, 50, 100, 500]; for number in &numbers { println!( "Estimated storage requirement for {}k metrics: {}", number, (number * 1000_usize * disk_size) .file_size(&custom_options) .unwrap() ); } } fn run(args: &Args) -> Result<(), Box<dyn Error>> { if args.estimate { estimate_info(&args.retentions); } else { if args.overwrite && args.path.exists() { println!( "Overwriting existing file: {}", &args.path.to_str().unwrap() ); fs::remove_file(&args.path)?; } WhisperBuilder::default() .add_retentions(&args.retentions) .x_files_factor(args.x_files_factor) .aggregation_method(args.aggregation_method) .sparse(args.sparse) .build(&args.path)?; let size = args.path.metadata()?.len(); println!("Created: {} ({} bytes)", &args.path.to_str().unwrap(), size); } Ok(()) } fn main() { let args = Args::from_args(); if let Err(err) = run(&args) { eprintln!("{}", err); exit(1); } }
use humansize::{file_size_opts as options, FileSize}; use std::error::Error; use std::fs; use std::path::PathBuf; use std::process::exit; use structopt::StructOpt; use whisper::aggregation::AggregationMethod; use whisper::retention::Retention; use whisper::WhisperBuilder; #[derive(Debug, StructOpt)] #[structopt(name = "whisper-create")] struct Args { #[structopt(long = "overwrite")] overwrite: bool, #[structopt(long = "estimate")] estimate: bool, #[structopt(long = "sparse")] sparse: bool, #[structopt(long = "fallocate")] fallocate: bool, #[structopt(long = "xFilesFactor", default_value = "0.5")] x_files_factor: f32, #[structopt(long = "aggregationMethod", default_value = "average")] aggregation_method: AggregationMethod, #[structopt(na
number, (number * 1000_usize * disk_size) .file_size(&custom_options) .unwrap() ); } } fn run(args: &Args) -> Result<(), Box<dyn Error>> { if args.estimate { estimate_info(&args.retentions); } else { if args.overwrite && args.path.exists() { println!( "Overwriting existing file: {}", &args.path.to_str().unwrap() ); fs::remove_file(&args.path)?; } WhisperBuilder::default() .add_retentions(&args.retentions) .x_files_factor(args.x_files_factor) .aggregation_method(args.aggregation_method) .sparse(args.sparse) .build(&args.path)?; let size = args.path.metadata()?.len(); println!("Created: {} ({} bytes)", &args.path.to_str().unwrap(), size); } Ok(()) } fn main() { let args = Args::from_args(); if let Err(err) = run(&args) { eprintln!("{}", err); exit(1); } }
me = "path", parse(from_os_str))] path: PathBuf, #[structopt( name = "retentions", help = r#"Specify lengths of time, for example: 60:1440 60 seconds per datapoint, 1440 datapoints = 1 day of retention 15m:8 15 minutes per datapoint, 8 datapoints = 2 hours of retention 1h:7d 1 hour per datapoint, 7 days of retention 12h:2y 12 hours per datapoint, 2 years of retention "#, required = true, min_values = 1 )] retentions: Vec<Retention>, } fn estimate_info(retentions: &[Retention]) { for (i, retention) in retentions.iter().enumerate() { println!( "Archive {}: {} points of {}s precision", i, &retention.points, &retention.seconds_per_point ); } let total_points: usize = retentions.iter().map(|x| x.points as usize).sum(); let size = (whisper::METADATA_SIZE + (retentions.len() * whisper::ARCHIVE_INFO_SIZE) + (total_points * whisper::POINT_SIZE)) as usize; let disk_size = (size as f64 / 4096.0).ceil() as usize * 4096; let custom_options = options::FileSizeOpts { decimal_places: 3, ..options::CONVENTIONAL }; println!(); println!( "Estimated Whisper DB Size: {} ({} bytes on disk with 4k blocks)", size.file_size(&custom_options).unwrap(), disk_size ); println!(); let numbers = [1, 5, 10, 50, 100, 500]; for number in &numbers { println!( "Estimated storage requirement for {}k metrics: {}",
random
[ { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"rrd2whisper\")]\n\nstruct Args {\n\n /// The xFilesFactor to use in the output file. Defaults to the input RRD's xFilesFactor.\n\n #[structopt(long = \"xFilesFactor\")]\n\n x_files_factor: Option<f64>,\n\n\n\n /// The consolidation function to fetch from on input and aggregationMethod to set on output. One of: average, last, max, min, avg_zero.\n\n #[structopt(long = \"aggregationMethod\", default_value = \"average\")]\n\n aggregation_method: rrd::AggregationMethod,\n\n\n\n /// Path to place created whisper file. Defaults to the RRD file's source path.\n\n #[structopt(long = \"destinationPath\", parse(from_os_str))]\n\n destination_path: Option<PathBuf>,\n\n\n\n #[structopt(parse(from_os_str))]\n\n rrd_path: PathBuf,\n\n}\n\n\n", "file_path": "rrd2whisper/src/main.rs", "rank": 0, "score": 109067.86150835617 }, { "content": "fn call_arg(input: &str) -> IResult<&str, (Option<String>, Arg), VerboseError<&str>> {\n\n tuple((opt(terminated(ident, c('='))), arg))(input)\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 1, "score": 104013.06249843663 }, { "content": "fn split_args<T>(all_args: Vec<(Option<String>, T)>) -> Option<(Vec<T>, Vec<(String, T)>)> {\n\n let mut args = Vec::new();\n\n let mut named_args = Vec::new();\n\n let mut named_arg_was_met = false;\n\n for (name, arg) in all_args {\n\n if !named_arg_was_met {\n\n match name {\n\n Some(name) => {\n\n named_arg_was_met = true;\n\n named_args.push((name, arg));\n\n }\n\n None => {\n\n args.push(arg);\n\n }\n\n }\n\n } else {\n\n match name {\n\n Some(name) => named_args.push((name, arg)),\n\n None => return None, // non-named argument after named one\n\n }\n\n }\n\n }\n\n Some((args, named_args))\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 2, "score": 95977.11722756189 }, { "content": "fn parse_call(argv: (String, Vec<(Option<String>, Arg)>)) -> Result<Call, String> {\n\n let (function, all_args) = argv;\n\n\n\n let (args, named_args) = split_args(all_args).ok_or_else(|| {\n\n format!(\n\n \"Bad call of {}: positional argument after named one.\",\n\n function\n\n )\n\n })?;\n\n Ok(Call {\n\n function,\n\n args,\n\n named_args,\n\n })\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 3, "score": 94531.42254167431 }, { "content": "pub fn info(filename: &Path, daemon: Option<&Path>, noflush: bool) -> Result<Info, Error> {\n\n let mut c_args = Vec::<*const c_char>::new();\n\n\n\n let info_str = CString::new(\"info\").unwrap();\n\n c_args.push(info_str.as_ptr());\n\n\n\n let c_filename = CString::new(filename.to_str().unwrap().as_bytes()).unwrap();\n\n c_args.push(c_filename.as_ptr());\n\n\n\n if let Some(daemon_path) = daemon {\n\n let daemon_str = CString::new(\"--daemon\").unwrap();\n\n c_args.push(daemon_str.as_ptr());\n\n let c_daemon_path = CString::new(daemon_path.to_str().unwrap().as_bytes()).unwrap();\n\n c_args.push(c_daemon_path.as_ptr());\n\n }\n\n\n\n if noflush {\n\n let noflush_str = CString::new(\"--noflush\").unwrap();\n\n c_args.push(noflush_str.as_ptr());\n\n }\n", "file_path": "rrd/src/lib.rs", "rank": 4, "score": 94128.60881841823 }, { "content": "fn file_name(path: &Path) -> Option<Cow<'_, str>> {\n\n if path.is_dir() {\n\n Some(path.file_name()?.to_string_lossy())\n\n } else if path.is_file() && path.extension() == Some(OsStr::new(\"wsp\")) {\n\n Some(path.file_stem()?.to_string_lossy())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "diamond-api/src/storage/whisper_fs.rs", "rank": 5, "score": 80409.98807500083 }, { "content": "fn arg(input: &str) -> IResult<&str, Arg, VerboseError<&str>> {\n\n alt((\n\n map(literal_value, Arg::Literal),\n\n map(expression, Arg::Expression),\n\n ))(input)\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 6, "score": 70019.20416196954 }, { "content": "#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\nstruct MetricResponse {\n\n metrics: Vec<MetricResponseLeaf>,\n\n}\n\n\n", "file_path": "diamond-api/src/find.rs", "rank": 7, "score": 69385.38549857293 }, { "content": "fn template_arg(input: &str) -> IResult<&str, (Option<String>, LiteralValue), VerboseError<&str>> {\n\n let (input, arg) = opt(terminated(ident, c('=')))(input)?;\n\n let (input, value) = literal_value(input)?;\n\n Ok((input, (arg, value)))\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 8, "score": 69347.57952463669 }, { "content": "#[derive(Debug, Clone, PartialEq, PartialOrd, Serialize, Deserialize)]\n\nstruct JsonTreeLeaf {\n\n text: String,\n\n id: String,\n\n #[serde(rename = \"allowChildren\")]\n\n allow_children: u8,\n\n expandable: u8,\n\n leaf: u8,\n\n}\n\n\n\nimpl From<MetricResponseLeaf> for JsonTreeLeaf {\n\n fn from(m: MetricResponseLeaf) -> JsonTreeLeaf {\n\n if m.is_leaf {\n\n JsonTreeLeaf {\n\n text: m.name.0.join(\".\"),\n\n id: m.name.0.last().unwrap().to_owned(),\n\n allow_children: 0,\n\n expandable: 0,\n\n leaf: 1,\n\n }\n\n } else {\n", "file_path": "diamond-api/src/find.rs", "rank": 9, "score": 68181.58912147098 }, { "content": "#[cfg(not(target_os = \"unix\"))]\n\npub fn fallocate(fd: &mut File, offset: usize, len: usize) -> Result<()> {\n\n use std::io::{Seek, SeekFrom, Write};\n\n\n\n fd.seek(SeekFrom::Start(offset as u64))?;\n\n let zeroes = [0u8; 16384];\n\n let mut remaining = len;\n\n while remaining > zeroes.len() {\n\n fd.write_all(&zeroes)?;\n\n remaining -= zeroes.len();\n\n }\n\n fd.write_all(&zeroes[0..remaining])?;\n\n Ok(())\n\n}\n", "file_path": "whisper/src/fallocate.rs", "rank": 10, "score": 65640.57042674742 }, { "content": "fn format_none(float: Option<f64>) -> String {\n\n match float {\n\n Some(x) => format!(\"{:.1}\", x),\n\n None => \"None\".to_string(),\n\n }\n\n}\n\n\n\nimpl fmt::Display for DiffArchiveInfo {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n for archive in &self.archives {\n\n if f.alternate() {\n\n writeln!(\n\n f,\n\n \"Archive {} ({} of {} datapoints differ)\",\n\n archive.index, archive.points, archive.total\n\n )?;\n\n writeln!(\n\n f,\n\n \"{:>7} {:>11} {:>13} {:>13}\",\n\n \"\", \"timestamp\", \"value_a\", \"value_b\"\n", "file_path": "whisper/src/diff.rs", "rank": 11, "score": 59092.60084801936 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-set-xfilesfactor.rs", "rank": 12, "score": 54819.67566500962 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-find-corrupt-whisper-files.rs", "rank": 13, "score": 53193.522159939996 }, { "content": "fn points_to_values(points: &[Point], start: u32, step: u32) -> Vec<Option<f64>> {\n\n let mut values = Vec::with_capacity(points.len());\n\n for (i, point) in points.iter().enumerate() {\n\n if point.interval == start + (i as u32) * step {\n\n values.push(Some(point.value));\n\n } else {\n\n values.push(None);\n\n }\n\n }\n\n values\n\n}\n\n\n", "file_path": "whisper/src/lib.rs", "rank": 14, "score": 49304.748347210014 }, { "content": "fn run(args: &Args) -> Result<(), Box<dyn Error>> {\n\n let now = SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs();\n\n\n\n let rrd_info = rrd::info(&args.rrd_path, None, false).unwrap();\n\n\n\n let info: HashMap<String, rrd::Value> = rrd_info.iter().collect();\n\n\n\n let seconds_per_pdp = &info[\"step\"].as_long().unwrap();\n\n\n\n let rras = rrd_info.rras();\n\n\n\n let datasources = rrd_info.datasources();\n\n\n\n // Grab the archive configuration\n\n let relevant_rras: Vec<_> = rras\n\n .iter()\n\n .filter(|rra| rra.cf == args.aggregation_method)\n\n .collect();\n\n\n\n if relevant_rras.is_empty() {\n", "file_path": "rrd2whisper/src/main.rs", "rank": 15, "score": 40569.42144573964 }, { "content": "use std::fs::File;\n\nuse std::io::Result;\n\n\n\n#[cfg(target_os = \"unix\")]\n", "file_path": "whisper/src/fallocate.rs", "rank": 16, "score": 38655.52387455842 }, { "content": "fn create_file(path: &Path) -> Result<WhisperFile, BuilderError> {\n\n WhisperBuilder::default()\n\n .add_retention(Retention {\n\n seconds_per_point: 1,\n\n points: 300,\n\n })\n\n .add_retention(Retention {\n\n seconds_per_point: 60,\n\n points: 30,\n\n })\n\n .add_retention(Retention {\n\n seconds_per_point: 300,\n\n points: 12,\n\n })\n\n .x_files_factor(0.1)\n\n .build(path)\n\n}\n\n\n", "file_path": "whisper_tests/benches/tests.rs", "rank": 17, "score": 38109.49591960182 }, { "content": "fn file_update(\n\n fh: &mut fs::File,\n\n header: &WhisperMetadata,\n\n point: &Point,\n\n now: u32,\n\n) -> Result<(), io::Error> {\n\n let timestamp = point.interval;\n\n\n\n if now >= timestamp + header.max_retention || now < timestamp {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n \"Timestamp not covered by any archives in this database.\",\n\n ));\n\n }\n\n\n\n // Find the highest-precision archive that covers timestamp\n\n let archive_index = header\n\n .archives\n\n .iter()\n\n .position(|a| timestamp + a.retention() >= now)\n", "file_path": "whisper/src/lib.rs", "rank": 18, "score": 35584.55889572279 }, { "content": "use assert_cmd::prelude::*;\n\nuse predicates::prelude::*;\n\nuse std::error::Error;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\nuse tempfile::Builder;\n\n\n\nconst NAME: &str = \"whisper-set-xfilesfactor\";\n\n\n\n#[test]\n", "file_path": "whisper/tests/test-whisper-set-xfilesfactor.rs", "rank": 19, "score": 34416.16007917165 }, { "content": "fn file_update_many(\n\n fh: &mut fs::File,\n\n header: &WhisperMetadata,\n\n points: &[Point],\n\n now: u32,\n\n) -> Result<(), io::Error> {\n\n let mut archive_index = 0;\n\n let mut current_points = vec![];\n\n\n\n for point in points {\n\n while point.interval + header.archives[archive_index].retention() < now {\n\n // We can't fit any more points in this archive\n\n if !current_points.is_empty() {\n\n // Commit all the points we've found that it can fit\n\n current_points.reverse(); // Put points in chronological order\n\n __archive_update_many(fh, &header, archive_index, &current_points)?;\n\n current_points.clear();\n\n }\n\n archive_index += 1;\n\n if archive_index >= header.archives.len() {\n", "file_path": "whisper/src/lib.rs", "rank": 20, "score": 34296.06073145176 }, { "content": "use assert_cmd::prelude::*;\n\nuse predicates::prelude::*;\n\nuse std::error::Error;\n\nuse std::process::Command;\n\n\n\nconst NAME: &str = \"find-corrupt-whisper-files\";\n\n\n\n#[test]\n", "file_path": "whisper/tests/test-find-corrupt-whisper-files.rs", "rank": 21, "score": 33102.47236471286 }, { "content": "fn call_args(\n\n input: &str,\n\n) -> IResult<&str, (String, Vec<(Option<String>, Arg)>), VerboseError<&str>> {\n\n let (input, function) = ident(input)?;\n\n let (input, _) = c('(')(input)?;\n\n let (input, all_args) = separated_list0(c(','), call_arg)(input)?;\n\n let (input, _) = c(')')(input)?;\n\n\n\n Ok((input, (function, all_args)))\n\n}\n\n\n", "file_path": "diamond-api/src/render_target/parser.rs", "rank": 22, "score": 31989.678283315992 }, { "content": "#[test]\n\nfn bindgen_test_layout_rrd_file_t() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rrd_file_t>(),\n\n 40usize,\n\n \"Size of rrd_file_t\"\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rrd_file_t>(),\n\n 8usize,\n\n \"Alignment of rrd_file_t\"\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rrd_file_t>())).header_len as *const _ as usize },\n\n 0usize,\n\n concat!(\n\n \"Offset of field: \",\n\n stringify!(rrd_file_t),\n\n \"::\",\n\n stringify!(header_len)\n\n )\n", "file_path": "rrd-sys/src/tests.rs", "rank": 23, "score": 30935.57772837593 }, { "content": "#[test]\n\nfn bindgen_test_layout_rrd_simple_file_t() {\n\n assert_eq!(\n\n ::std::mem::size_of::<rrd_simple_file_t>(),\n\n 4usize,\n\n \"Size of rrd_simple_file_t\"\n\n );\n\n assert_eq!(\n\n ::std::mem::align_of::<rrd_simple_file_t>(),\n\n 4usize,\n\n \"Alignment of rrd_simple_file_t\"\n\n );\n\n assert_eq!(\n\n unsafe { &(*(::std::ptr::null::<rrd_simple_file_t>())).fd as *const _ as usize },\n\n 0usize,\n\n \"Offset of field rrd_simple_file_t::fd\"\n\n );\n\n}\n\n\n", "file_path": "rrd-sys/src/tests.rs", "rank": 24, "score": 29957.13265376381 }, { "content": "#[test]\n\nfn calling_help() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"--help\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"USAGE\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-set-xfilesfactor.rs", "rank": 25, "score": 28267.066285453187 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-fetch.rs", "rank": 26, "score": 28183.31832623266 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-dump.rs", "rank": 27, "score": 28183.31832623266 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-resize.rs", "rank": 28, "score": 28183.31832623266 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-update.rs", "rank": 29, "score": 28183.31832623266 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-create.rs", "rank": 30, "score": 28183.31832623266 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-info.rs", "rank": 31, "score": 28183.31832623266 }, { "content": "#[test]\n\n#[allow(clippy::unreadable_literal)]\n\nfn test_merge_overwrite() -> Result<(), Box<dyn Error>> {\n\n let temp_dir = get_temp_dir();\n\n\n\n let path1 = get_file_path(&temp_dir, \"issue54_1\");\n\n let path2 = get_file_path(&temp_dir, \"issue54_2\");\n\n\n\n let now = 1528240800;\n\n\n\n let mut _file1 = create_and_update_points(\n\n &path1,\n\n &[\n\n Point {\n\n interval: now - 60,\n\n value: 60.0,\n\n },\n\n Point {\n\n interval: now - 180,\n\n value: 180.0,\n\n },\n\n Point {\n", "file_path": "whisper_tests/tests/test-whisper-merge.rs", "rank": 32, "score": 27500.51602518113 }, { "content": "#[test]\n\nfn calling_with_whole_param() -> Result<(), Box<dyn Error>> {\n\n let filename = \"info.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"1\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"Updated xFilesFactor\").from_utf8())\n\n .stdout(predicate::str::contains(\"(0.5 -> 1)\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n", "file_path": "whisper/tests/test-whisper-set-xfilesfactor.rs", "rank": 33, "score": 27450.502178670537 }, { "content": "#[test]\n\nfn calling_with_invalid_path() -> Result<(), Box<dyn Error>> {\n\n #[cfg(unix)]\n\n let error_msg = \"No such file or directory (os error 2)\";\n\n #[cfg(windows)]\n\n let error_msg = \"The system cannot find the file specified. (os error 2)\";\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"invalid\", \"0.5\"])\n\n .assert()\n\n .code(1)\n\n .stderr(predicate::str::contains(error_msg).from_utf8());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-set-xfilesfactor.rs", "rank": 34, "score": 27450.502178670537 }, { "content": "#[test]\n\nfn calling_with_invalid_param() -> Result<(), Box<dyn Error>> {\n\n let filename = \"info.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"2.0\"])\n\n .assert()\n\n .code(1)\n\n .stderr(predicate::str::contains(\"Bad x_files_factor 2\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-set-xfilesfactor.rs", "rank": 35, "score": 27450.502178670537 }, { "content": "#[test]\n\nfn calling_with_fractional_number() -> Result<(), Box<dyn Error>> {\n\n let filename = \"info.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"0.1\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"Updated xFilesFactor\").from_utf8())\n\n .stdout(predicate::str::contains(\"(0.5 -> 0.1)\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-set-xfilesfactor.rs", "rank": 36, "score": 27450.502178670537 }, { "content": "#[test]\n\nfn calling_creating_with_present_file() -> Result<(), Box<dyn Error>> {\n\n let filename = \"info.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n #[cfg(unix)]\n\n let error_msg = \"File exists (os error 17)\";\n\n #[cfg(windows)]\n\n let error_msg = \"The file exists. (os error 80)\";\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"60:1440\"])\n\n .assert()\n\n .code(1)\n\n .stderr(predicate::str::contains(error_msg).from_utf8());\n\n\n\n Ok(())\n\n}\n", "file_path": "whisper/tests/test-whisper-create.rs", "rank": 37, "score": 27360.97509498186 }, { "content": "#[test]\n\nfn calling_help() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"--help\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"USAGE\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-find-corrupt-whisper-files.rs", "rank": 38, "score": 27360.97509498186 }, { "content": "#[test]\n\nfn calling_as_plain_for_file_size() -> Result<(), Box<dyn Error>> {\n\n let filename = \"info.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"fileSize\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"34600\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-info.rs", "rank": 39, "score": 27360.97509498186 }, { "content": "#[test]\n\nfn calling_as_plain_for_x_files_factor() -> Result<(), Box<dyn Error>> {\n\n let filename = \"info.wsp\";\n\n\n\n let path = Builder::new()\n\n .prefix(\"whisper\")\n\n .suffix(filename)\n\n .tempdir()?\n\n .path()\n\n .to_path_buf();\n\n\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n fs::copy(&file_path, &path)?;\n\n\n\n Command::cargo_bin(NAME)?\n\n .args(&[path.to_str().unwrap(), \"xFilesFactor\"])\n\n .assert()\n\n .success()\n\n .stdout(predicate::str::contains(\"0.5\").from_utf8())\n\n .stderr(\"\");\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-info.rs", "rank": 40, "score": 27360.97509498186 }, { "content": "#[test]\n\nfn calling_without_args() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .assert()\n\n .code(1)\n\n .stdout(\"\")\n\n .stderr(predicate::str::contains(\"USAGE\").from_utf8());\n\n Ok(())\n\n}\n\n\n", "file_path": "whisper/tests/test-whisper-set-aggregation-method.rs", "rank": 41, "score": 26600.745184925312 }, { "content": "#[test]\n\nfn calling_with_invalid_path() -> Result<(), Box<dyn Error>> {\n\n Command::cargo_bin(NAME)?\n\n .args(&[\"invalid\"])\n\n .assert()\n\n .code(1)\n\n .stderr(predicate::str::contains(\"invalid is not a directory or not exist!\").from_utf8());\n\n\n\n Ok(())\n\n}\n", "file_path": "whisper/tests/test-find-corrupt-whisper-files.rs", "rank": 42, "score": 26592.776975014676 }, { "content": "fn get_file_path(temp_dir: &TempDir, prefix: &str) -> PathBuf {\n\n let file_name = format!(\"{}_{}.rrd\", prefix, random_string(10));\n\n let mut path = temp_dir.path().to_path_buf();\n\n path.push(file_name);\n\n path\n\n}\n\n\n", "file_path": "rrd_tests/tests/fetch.rs", "rank": 43, "score": 25866.53731198553 }, { "content": "pub fn copy_test_file(temp_dir: &TempDir, filename: &str) -> PathBuf {\n\n let file_path = PathBuf::new().join(\"data\").join(filename);\n\n\n\n let tmp_file_path = temp_dir.path().join(filename);\n\n\n\n fs::copy(&file_path, &tmp_file_path).unwrap();\n\n\n\n tmp_file_path\n\n}\n\n\n", "file_path": "whisper_tests/src/lib.rs", "rank": 44, "score": 25178.90987685205 }, { "content": "pub fn get_file_path(temp_dir: &TempDir, prefix: &str) -> PathBuf {\n\n let file_name = format!(\"{}_{}.wsp\", prefix, random_string(10));\n\n let mut path = temp_dir.path().to_path_buf();\n\n path.push(file_name);\n\n path\n\n}\n\n\n", "file_path": "whisper_tests/src/lib.rs", "rank": 45, "score": 25178.90987685205 }, { "content": "use super::*;\n\nuse crate::aggregation::AggregationMethod;\n\nuse crate::retention::Retention;\n\nuse std::convert::AsRef;\n\nuse std::default;\n\nuse std::fmt::{Display, Formatter};\n\nuse std::io;\n\nuse std::path::Path;\n\n\n\npub struct WhisperBuilder {\n\n aggregation_method: AggregationMethod,\n\n x_files_factor: f32,\n\n retentions: Vec<Retention>,\n\n sparse: bool,\n\n}\n\n\n\nimpl default::Default for WhisperBuilder {\n\n fn default() -> Self {\n\n Self {\n\n aggregation_method: AggregationMethod::Average,\n", "file_path": "whisper/src/builder.rs", "rank": 46, "score": 13.046442848285633 }, { "content": "##### Diamond-pipe\n\n\n\n`echo \"this.is.correct 123 1545775266\" | ./diamond-pipe data-dir 60:100`\n\n\n\n```\n\nReceive metrics from pipe\n\n\n\nUSAGE:\n\n diamond-pipe [OPTIONS] <path> <retentions>...\n\n\n\nFLAGS:\n\n -h, --help Prints help information\n\n -V, --version Prints version information\n\n\n\nOPTIONS:\n\n --aggregationMethod <aggregation_method>\n\n Default function to use when aggregating values (average, sum, last, max, min, avg_zero, absmax, absmin)\n\n [default: average]\n\n --xFilesFactor <x_files_factor> Default value for the xFilesFactor for new files [default: 0.5]\n\n\n\nARGS:\n\n <path> Path to the directory with data files\n\n <retentions>... Default retentions for new files\n\n Specify lengths of time, for example:\n\n 60:1440 60 seconds per datapoint, 1440 datapoints = 1 day of retention\n\n 15m:8 15 minutes per datapoint, 8 datapoints = 2 hours of retention\n\n 1h:7d 1 hour per datapoint, 7 days of retention\n\n 12h:2y 12 hours per datapoint, 2 years of retention\n\n```\n", "file_path": "README.md", "rank": 47, "score": 11.812760633395477 }, { "content": " self\n\n }\n\n\n\n pub fn x_files_factor(mut self, x_files_factor: f32) -> Self {\n\n self.x_files_factor = x_files_factor;\n\n self\n\n }\n\n\n\n pub fn sparse(mut self, sparse: bool) -> Self {\n\n self.sparse = sparse;\n\n self\n\n }\n\n\n\n fn into_metadata(mut self) -> Result<WhisperMetadata, BuilderError> {\n\n if self.x_files_factor < 0.0 || self.x_files_factor > 1.0 {\n\n return Err(BuilderError::InvalidXFilesFactor(self.x_files_factor));\n\n }\n\n\n\n if self.retentions.is_empty() {\n\n return Err(BuilderError::NoRetentions);\n", "file_path": "whisper/src/builder.rs", "rank": 48, "score": 9.688422435823329 }, { "content": "use std::path::PathBuf;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(Debug, Clone, StructOpt)]\n\n#[structopt(name = \"diamond-api\")]\n\npub struct Args {\n\n /// Path to data directory, default value is a current directory\n\n #[structopt(\n\n name = \"path\",\n\n short = \"d\",\n\n long = \"data-dir\",\n\n default_value = \".\",\n\n parse(from_os_str)\n\n )]\n\n pub path: PathBuf,\n\n\n\n /// Force to create data directory if it is absent\n\n #[structopt(name = \"force\", short = \"f\", long = \"force\")]\n\n pub force: bool,\n\n\n\n /// Port to listen on\n\n #[structopt(name = \"port\", short = \"p\", long = \"port\", default_value = \"8080\")]\n\n pub port: u16,\n\n}\n", "file_path": "diamond-api/src/opts.rs", "rank": 49, "score": 9.357078601213608 }, { "content": "use crate::opts::Args;\n\nuse crate::storage::Storage;\n\nuse std::sync::Arc;\n\n\n\n#[derive(Clone)]\n\npub struct Context {\n\n pub args: Args,\n\n pub storage: Arc<dyn Storage + Send + Sync>,\n\n}\n", "file_path": "diamond-api/src/context.rs", "rank": 50, "score": 8.981211849439438 }, { "content": "use crate::aggregation::AggregationMethod;\n\nuse crate::builder::WhisperBuilder;\n\nuse crate::error::Error;\n\nuse crate::interval::Interval;\n\n\n\nuse crate::point::Point;\n\nuse crate::retention::Retention;\n\nuse crate::WhisperFile;\n\n\n\nuse std::fs::{remove_file, rename};\n\nuse std::io;\n\nuse std::path::{Path, PathBuf};\n\nuse std::process::exit;\n\n\n", "file_path": "whisper/src/resize.rs", "rank": 51, "score": 8.43115524649318 }, { "content": " .merge(File::from(file))?,\n\n _ => s.merge(File::from_str(CONFIG, FileFormat::Toml))?,\n\n };\n\n\n\n s.try_into()\n\n }\n\n\n\n pub fn generate<P: AsRef<Path>>(path: P) -> Result<(), io::Error> {\n\n fs::write(path, CONFIG)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs::read_to_string;\n\n use std::net::IpAddr::V4;\n\n use tempfile::Builder;\n\n use whisper::retention::Retention;\n\n\n", "file_path": "diamond/src/settings.rs", "rank": 52, "score": 8.343131212547378 }, { "content": " }\n\n\n\n pub fn open<P: AsRef<Path>>(path: P) -> Result<Self, io::Error> {\n\n let mut file = fs::OpenOptions::new().read(true).write(true).open(path)?;\n\n let metadata = WhisperMetadata::read(&mut file)?;\n\n Ok(Self { metadata, file })\n\n }\n\n\n\n pub fn info(&self) -> &WhisperMetadata {\n\n &self.metadata\n\n }\n\n\n\n pub fn set_x_files_factor(&mut self, x_files_factor: f32) -> Result<(), io::Error> {\n\n if x_files_factor < 0.0 || x_files_factor > 1.0 {\n\n return Err(io::Error::new(\n\n io::ErrorKind::Other,\n\n format!(\"Bad x_files_factor {}\", x_files_factor),\n\n ));\n\n }\n\n\n", "file_path": "whisper/src/lib.rs", "rank": 53, "score": 8.242943456490602 }, { "content": "use std::borrow::Cow;\n\nuse std::ffi::OsStr;\n\nuse std::fs;\n\nuse std::iter::successors;\n\nuse std::path::{Path, PathBuf};\n\nuse whisper::interval::Interval;\n\nuse whisper::{ArchiveData, WhisperFile};\n\n\n\nuse super::storage::*;\n\nuse crate::error::ResponseError;\n\npub use crate::render_target::ast::{PathExpression, PathWord};\n\n\n\n#[derive(Clone)]\n\npub struct WhisperFileSystemStorage(pub PathBuf);\n\n\n\nimpl Storage for WhisperFileSystemStorage {\n\n fn find(\n\n &self,\n\n path_expression: &PathExpression,\n\n ) -> Result<Vec<MetricResponseLeaf>, ResponseError> {\n", "file_path": "diamond-api/src/storage/whisper_fs.rs", "rank": 54, "score": 8.03699232462311 }, { "content": " .x_files_factor(x_files_factor as f32)\n\n // .aggregation_method(args.aggregation_method) // TODO\n\n .build(&path)?;\n\n\n\n // let size = os.stat(path).st_size;\n\n // archiveConfig = \",\".join([\"%d:%d\" % ar for ar in archives]);\n\n // print(\"Created: %s (%d bytes) with archives: %s\" % (path, size, archiveConfig));\n\n\n\n println!(\"Migrating data\");\n\n let mut archive_number = archives.len();\n\n for archive in archives.iter().rev() {\n\n let retention = u64::from(archive.retention());\n\n let end_time = now - now % u64::from(archive.seconds_per_point);\n\n let start_time = end_time - retention;\n\n let data = rrd::fetch(\n\n &args.rrd_path,\n\n args.aggregation_method,\n\n Some(archive.seconds_per_point),\n\n start_time,\n\n end_time,\n", "file_path": "rrd2whisper/src/main.rs", "rank": 55, "score": 7.757578889981023 }, { "content": "use std::collections::HashMap;\n\nuse std::error::Error;\n\n/// https://oss.oetiker.ch/rrdtool/doc/rrdcreate.en.html\n\nuse std::path::PathBuf;\n\nuse std::process::exit;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\nuse structopt::StructOpt;\n\nuse whisper::{point::Point, retention::Retention, WhisperBuilder};\n\n\n\n// # Ignore SIGPIPE\n\n// signal.signal(signal.SIGPIPE, signal.SIG_DFL)\n\n\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"rrd2whisper\")]\n", "file_path": "rrd2whisper/src/main.rs", "rank": 56, "score": 7.675634321883874 }, { "content": "use std::fmt::{Display, Formatter, Result};\n\nuse std::io;\n\nuse std::num::{ParseFloatError, ParseIntError};\n\nuse std::path::PathBuf;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Io(io::Error),\n\n FileNotExist(PathBuf),\n\n Kind(String),\n\n}\n\n\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n\n match self {\n\n Error::Io(e) => write!(f, \"{}\", e),\n\n Error::FileNotExist(e) => write!(f, \"[ERROR] File {:#?} does not exist!\", e),\n\n Error::Kind(e) => write!(f, \"{}\", e),\n\n }\n\n }\n", "file_path": "whisper/src/error.rs", "rank": 57, "score": 7.654047633461558 }, { "content": " pub x_files_factor: f32,\n\n pub retentions: Vec<Retention>,\n\n pub aggregation_method: AggregationMethod,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize)]\n\npub struct Settings {\n\n pub db_path: PathBuf,\n\n pub tcp: Net,\n\n pub udp: Net,\n\n pub whisper: WhisperConfig,\n\n}\n\n\n\nimpl Settings {\n\n pub fn new(file: Option<PathBuf>) -> Result<Self, ConfigError> {\n\n let mut s = Config::default();\n\n\n\n match file {\n\n Some(file) => s\n\n .merge(File::from_str(CONFIG, FileFormat::Toml))?\n", "file_path": "diamond/src/settings.rs", "rank": 58, "score": 7.652790724558034 }, { "content": "use super::*;\n\nuse crate::interval::Interval;\n\nuse std::io;\n\nuse std::path::Path;\n\n\n\n/**\n\n * Merges the data from one whisper file into another. Each file must have\n\n * the same archive configuration. time_from and time_to can optionally be\n\n * specified for the merge.\n\n */\n", "file_path": "whisper/src/merge.rs", "rank": 59, "score": 7.537855821623466 }, { "content": " file: fs::File,\n\n}\n\n\n\nimpl WhisperFile {\n\n fn create<P: AsRef<Path>>(\n\n header: &WhisperMetadata,\n\n path: P,\n\n sparse: bool,\n\n ) -> Result<Self, io::Error> {\n\n let mut metainfo_bytes = Vec::<u8>::new();\n\n header.write(&mut metainfo_bytes)?;\n\n\n\n let mut fh = fs::OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .create_new(true)\n\n .open(path)?;\n\n\n\n // if LOCK {\n\n // fcntl.flock(fh.fileno(), fcntl.LOCK_EX)\n", "file_path": "whisper/src/lib.rs", "rank": 60, "score": 7.365099320234808 }, { "content": "use super::*;\n\nuse crate::interval::Interval;\n\nuse std::fmt;\n\nuse std::io;\n\nuse std::path::Path;\n\n\n\n#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]\n\npub struct DiffPoint {\n\n #[serde(rename = \"timestamp\")]\n\n pub interval: u32,\n\n #[serde(rename = \"value_a\")]\n\n pub value1: Option<f64>,\n\n #[serde(rename = \"value_b\")]\n\n pub value2: Option<f64>,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct DiffArchive {\n\n #[serde(rename = \"archive\")]\n\n pub index: usize,\n", "file_path": "whisper/src/diff.rs", "rank": 61, "score": 7.309770992694575 }, { "content": "use bencher::Bencher;\n\nuse bencher::{benchmark_group, benchmark_main};\n\nuse std::path::Path;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\nuse whisper::builder::{BuilderError, WhisperBuilder};\n\nuse whisper::interval::Interval;\n\nuse whisper::point::Point;\n\nuse whisper::retention::Retention;\n\nuse whisper::WhisperFile;\n\nuse whisper_tests::*;\n\n\n\nconst SECONDS_AGO: u32 = 3500;\n\nconst VALUE_STEP: f64 = 0.2;\n\n\n", "file_path": "whisper_tests/benches/tests.rs", "rank": 62, "score": 7.23534296511594 }, { "content": " for datasource in &datasources {\n\n let suffix = if datasources.len() > 1 {\n\n format!(\"_{}\", datasource)\n\n } else {\n\n String::new()\n\n };\n\n\n\n let destination_directory = args\n\n .destination_path\n\n .as_ref()\n\n .unwrap_or_else(|| &args.rrd_path);\n\n let destination_name = format!(\n\n \"{}{}.wsp\",\n\n args.rrd_path.file_stem().unwrap().to_str().unwrap(),\n\n suffix\n\n );\n\n let path = destination_directory.with_file_name(destination_name);\n\n\n\n let mut whisper_file = WhisperBuilder::default()\n\n .add_retentions(&archives)\n", "file_path": "rrd2whisper/src/main.rs", "rank": 63, "score": 7.150045587305434 }, { "content": "pub mod ast;\n\nmod parser;\n\n\n\npub use ast::*;\n\npub use std::convert::TryFrom;\n\npub use std::str::FromStr;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_from_str() {\n\n let _ex: ast::Expression = \"template(average(emea.events\\\\[2019\\\\].clicks,n=7),skip_empty=false,none=none)|aliasByNode(1)|movingAverage(\\\"5min\\\")\".parse().unwrap();\n\n }\n\n\n\n #[test]\n\n fn pathword_to_regex() {\n\n assert_eq!(\n\n PathExpression::from_str(\"just_a_metric\").unwrap().0[0]\n", "file_path": "diamond-api/src/render_target/mod.rs", "rank": 64, "score": 7.0946503810982104 }, { "content": "use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashSet;\n\nuse std::fs;\n\nuse std::io::{self, Read, Seek, Write};\n\nuse std::path::Path;\n\n\n\n/*\n\n# This module is an implementation of the Whisper database API\n\n# Here is the basic layout of a whisper data file\n\n#\n\n# File = Header,Data\n\n# Header = Metadata,ArchiveInfo+\n\n# Metadata = aggregationType,maxRetention,xFilesFactor,archiveCount\n\n# ArchiveInfo = Offset,SecondsPerPoint,Points\n\n# Data = Archive+\n\n# Archive = Point+\n\n# Point = timestamp,value\n\n\n\ntry:\n", "file_path": "whisper/src/lib.rs", "rank": 65, "score": 7.087679133149898 }, { "content": "use lazy_static::lazy_static;\n\nuse regex::Regex;\n\nuse std::convert::From;\n\nuse std::error::Error;\n\nuse std::fmt::{Display, Formatter};\n\nuse std::fs;\n\nuse std::num::{ParseFloatError, ParseIntError};\n\nuse std::path::{Path, PathBuf};\n\nuse std::str::FromStr;\n\nuse std::time::{SystemTime, UNIX_EPOCH};\n\nuse whisper::builder::WhisperBuilder;\n\nuse whisper::point::Point;\n\nuse whisper::WhisperFile;\n\n\n\npub mod settings;\n\n\n\nuse settings::Settings;\n\nuse settings::WhisperConfig;\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n", "file_path": "diamond/src/lib.rs", "rank": 66, "score": 6.847302558493815 }, { "content": " let method: &str = args.aggregation_method.into();\n\n return Err(format!(\n\n \"[ERROR] Unable to find any RRAs with consolidation function: {}\",\n\n method\n\n )\n\n .into());\n\n }\n\n\n\n let archives: Vec<_> = relevant_rras\n\n .iter()\n\n .map(|rra| Retention {\n\n seconds_per_point: (rra.pdp_per_row * seconds_per_pdp) as u32,\n\n points: rra.rows as u32,\n\n })\n\n .collect();\n\n\n\n let x_files_factor: f64 = args\n\n .x_files_factor\n\n .unwrap_or_else(|| relevant_rras.last().unwrap().xff);\n\n\n", "file_path": "rrd2whisper/src/main.rs", "rank": 67, "score": 6.452568403863912 }, { "content": "pub mod builder;\n\npub mod diff;\n\npub mod error;\n\nmod fallocate;\n\npub mod fill;\n\npub mod interval;\n\npub mod merge;\n\npub mod point;\n\npub mod resize;\n\npub mod retention;\n\n\n\nuse crate::aggregation::*;\n\nuse crate::archive_info::*;\n\nuse crate::interval::*;\n\nuse crate::point::*;\n\n\n\npub use crate::builder::WhisperBuilder;\n\n\n\npub const METADATA_SIZE: usize = 16;\n\npub const ARCHIVE_INFO_SIZE: usize = 12;\n", "file_path": "whisper/src/lib.rs", "rank": 68, "score": 6.420567924886 }, { "content": " unindent(\n\n \"\n\n maxRetention: 172800\n\n xFilesFactor: 0.5\n\n aggregationMethod: average\n\n fileSize: 34600\n\n \",\n\n )\n\n .as_str(),\n\n )\n\n .from_utf8(),\n\n )\n\n .stdout(\n\n predicate::str::contains(\n\n unindent(\n\n \"\n\n Archive 0\n\n retention: 86400\n\n secondsPerPoint: 60\n\n points: 1440\n", "file_path": "whisper/tests/test-whisper-info.rs", "rank": 69, "score": 6.176181940365032 }, { "content": " points: 5,\n\n },\n\n Retention {\n\n seconds_per_point: 120,\n\n points: 5,\n\n },\n\n ];\n\n\n\n whisper::resize::resize(\n\n &path1,\n\n Some(&path2),\n\n retentions,\n\n 0.5,\n\n AggregationMethod::Average,\n\n false,\n\n true,\n\n now,\n\n )?;\n\n\n\n let mut file2 = WhisperFile::open(&path2)?;\n", "file_path": "whisper_tests/tests/test-whisper-resize.rs", "rank": 70, "score": 6.087337667025538 }, { "content": "use std::collections::BTreeSet;\n\nuse std::fmt;\n\n\n\n// Literal\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum LiteralValue {\n\n Boolean(bool),\n\n Integer(i64),\n\n Float(f64),\n\n String(String),\n\n None,\n\n}\n\n\n\n// Path expression\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum PathElement {\n\n Variable(String),\n\n Partial(String),\n", "file_path": "diamond-api/src/render_target/ast.rs", "rank": 71, "score": 6.048088846967993 }, { "content": " unindent(\n\n \"\n\nMeta data:\n\n aggregation method: average\n\n max retention: 600\n\n xFilesFactor: 0.5\",\n\n )\n\n .as_str(),\n\n )\n\n .from_utf8(),\n\n )\n\n .stdout(\n\n predicate::str::contains(\n\n unindent(\n\n \"\n\nArchive 0 info:\n\n offset: 40\n\n seconds per point: 60\n\n points: 5\n\n retention: 300\n", "file_path": "whisper/tests/test-whisper-dump.rs", "rank": 72, "score": 6.0010131931684345 }, { "content": " unindent(\n\n \"\n\nMeta data:\n\n aggregation method: average\n\n max retention: 600\n\n xFilesFactor: 0.5\",\n\n )\n\n .as_str(),\n\n )\n\n .from_utf8(),\n\n )\n\n .stdout(\n\n predicate::str::contains(\n\n unindent(\n\n \"\n\nArchive 0 info:\n\n offset: 40\n\n seconds per point: 60\n\n points: 5\n\n retention: 300\n", "file_path": "whisper/tests/test-whisper-dump.rs", "rank": 73, "score": 6.0010131931684345 }, { "content": " }];\n\n\n\n whisper::resize::resize(\n\n &path1,\n\n Some(&path2),\n\n retentions,\n\n 0.5,\n\n AggregationMethod::Average,\n\n true,\n\n true,\n\n now,\n\n )?;\n\n\n\n let mut file2 = WhisperFile::open(&path2)?;\n\n\n\n let points = file2.dump(60)?;\n\n\n\n assert_eq!(points.len(), 5, \"Should be 5 points\");\n\n\n\n for delta in 1..6 {\n", "file_path": "whisper_tests/tests/test-whisper-resize.rs", "rank": 74, "score": 5.835506671065001 }, { "content": " }];\n\n\n\n whisper::resize::resize(\n\n &path1,\n\n Some(&path2),\n\n retentions,\n\n 0.5,\n\n AggregationMethod::Average,\n\n false,\n\n true,\n\n now,\n\n )?;\n\n\n\n let mut file2 = WhisperFile::open(&path2)?;\n\n\n\n let points = file2.dump(60)?;\n\n\n\n assert_eq!(points.len(), 5, \"Should be 5 points\");\n\n\n\n for delta in 1..6 {\n", "file_path": "whisper_tests/tests/test-whisper-resize.rs", "rank": 75, "score": 5.835506671065001 }, { "content": " }];\n\n\n\n whisper::resize::resize(\n\n &path1,\n\n Some(&path2),\n\n retentions,\n\n 0.5,\n\n AggregationMethod::Average,\n\n true,\n\n true,\n\n now,\n\n )?;\n\n\n\n let mut file2 = WhisperFile::open(&path2)?;\n\n\n\n let points = file2.dump(60)?;\n\n\n\n for delta in 1..10 {\n\n assert!(\n\n points.iter().any(|p| p.interval == (now - delta * 60)\n", "file_path": "whisper_tests/tests/test-whisper-resize.rs", "rank": 76, "score": 5.756130315882153 }, { "content": " }];\n\n\n\n whisper::resize::resize(\n\n &path1,\n\n Some(&path2),\n\n retentions,\n\n 0.5,\n\n AggregationMethod::Average,\n\n false,\n\n true,\n\n now,\n\n )?;\n\n\n\n let mut file2 = WhisperFile::open(&path2)?;\n\n\n\n let points = file2.dump(60)?;\n\n\n\n for delta in 1..10 {\n\n assert!(\n\n points.iter().any(|p| p.interval == (now - delta * 60)\n", "file_path": "whisper_tests/tests/test-whisper-resize.rs", "rank": 77, "score": 5.756130315882153 }, { "content": " }],\n\n x_files_factor: 0.5,\n\n aggregation_method: AggregationMethod::Average,\n\n };\n\n let now = 1_545_778_348;\n\n line_update(message, &dir, &config, now)?;\n\n\n\n assert_eq!(\n\n file.dump(1)?[0],\n\n Point {\n\n interval: now - 10,\n\n value: 123.0\n\n }\n\n );\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn update_silently_with_absent_wsp() -> Result<(), io::Error> {\n", "file_path": "diamond/src/lib.rs", "rank": 78, "score": 5.530449686891895 }, { "content": " pub step: u32,\n\n pub values: Vec<Option<f64>>,\n\n}\n\n\n\nimpl ArchiveData {\n\n pub fn points(&self) -> Vec<Point> {\n\n (self.from_interval..self.until_interval)\n\n .step_by(self.step as usize)\n\n .zip(&self.values)\n\n .filter_map(|(interval, value)| value.map(|value| Point { interval, value }))\n\n .collect()\n\n }\n\n\n\n pub fn filter_out(&self, f: &dyn Fn(&Option<f64>) -> bool) -> ArchiveData {\n\n ArchiveData {\n\n values: self.values.clone().into_iter().filter(f).collect(),\n\n ..*self\n\n }\n\n }\n\n}\n\n\n", "file_path": "whisper/src/lib.rs", "rank": 79, "score": 5.523417446667134 }, { "content": "pub const POINT_SIZE: usize = 12;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct WhisperMetadata {\n\n pub aggregation_method: AggregationMethod,\n\n pub max_retention: u32,\n\n pub x_files_factor: f32,\n\n pub archives: Vec<ArchiveInfo>,\n\n}\n\n\n\nimpl WhisperMetadata {\n\n pub fn read<R: Read + Seek>(fh: &mut R) -> Result<Self, io::Error> {\n\n fh.seek(io::SeekFrom::Start(0))?;\n\n\n\n let aggregation_type = fh.read_u32::<BigEndian>()?;\n\n let max_retention = fh.read_u32::<BigEndian>()?;\n\n let x_files_factor = fh.read_f32::<BigEndian>()?;\n\n let archive_count = fh.read_u32::<BigEndian>()?;\n\n\n\n let aggregation_method =\n", "file_path": "whisper/src/lib.rs", "rank": 80, "score": 5.360391886728788 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn test_display() {\n\n assert_eq!(AggregationMethod::Average.to_string(), \"average\");\n\n assert_eq!(AggregationMethod::Sum.to_string(), \"sum\");\n\n assert_eq!(AggregationMethod::Last.to_string(), \"last\");\n\n assert_eq!(AggregationMethod::Max.to_string(), \"max\");\n\n assert_eq!(AggregationMethod::Min.to_string(), \"min\");\n\n assert_eq!(AggregationMethod::AvgZero.to_string(), \"avg_zero\");\n\n assert_eq!(AggregationMethod::AbsMax.to_string(), \"absmax\");\n\n assert_eq!(AggregationMethod::AbsMin.to_string(), \"absmin\");\n\n\n\n assert_eq!(AggregationMethod::default().to_string(), \"average\");\n\n }\n\n\n\n #[test]\n\n fn test_convert() {\n\n assert_eq!(\n\n AggregationMethod::from_str(&AggregationMethod::Average.to_string()),\n", "file_path": "whisper/src/aggregation.rs", "rank": 81, "score": 5.2500359146983016 }, { "content": "use byteorder::{BigEndian, WriteBytesExt};\n\nuse std::error::Error;\n\nuse std::fs;\n\nuse whisper::point::Point;\n\nuse whisper::retention::*;\n\nuse whisper::*;\n\nuse whisper_tests::*;\n\n\n", "file_path": "whisper_tests/tests/update.rs", "rank": 82, "score": 5.210427716361435 }, { "content": "use std::error::Error;\n\nuse whisper::point::*;\n\nuse whisper::retention::*;\n\nuse whisper::*;\n\nuse whisper_tests::*;\n\n\n\n#[test]\n", "file_path": "whisper_tests/tests/issue22.rs", "rank": 83, "score": 5.199320556371818 }, { "content": " let dir = Builder::new()\n\n .prefix(\"diamond\")\n\n .tempdir()\n\n .unwrap()\n\n .path()\n\n .to_path_buf();\n\n\n\n let message = \"this.is.correct1 1545778338 124\";\n\n\n\n let config = WhisperConfig {\n\n retentions: vec![Retention {\n\n seconds_per_point: 1,\n\n points: 1000,\n\n }],\n\n x_files_factor: 0.5,\n\n aggregation_method: AggregationMethod::Average,\n\n };\n\n let now = 1_545_778_348;\n\n line_update(message, &dir, &config, now)?;\n\n\n", "file_path": "diamond/src/lib.rs", "rank": 84, "score": 5.191228799368091 }, { "content": "use rand::{\n\n distributions::{Alphanumeric, DistString},\n\n thread_rng,\n\n};\n\nuse std::error::Error;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\nuse tempfile::{Builder, TempDir};\n\n\n\nuse whisper::point::*;\n\nuse whisper::retention::*;\n\nuse whisper::*;\n\n\n", "file_path": "whisper_tests/src/lib.rs", "rank": 85, "score": 5.181684850817601 }, { "content": "use assert_cmd::prelude::*;\n\nuse predicates::prelude::*;\n\nuse std::error::Error;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\nuse tempfile::Builder;\n\nuse unindent::unindent;\n\n\n\nconst NAME: &str = \"whisper-dump\";\n\n\n\n#[test]\n", "file_path": "whisper/tests/test-whisper-dump.rs", "rank": 86, "score": 5.16572195877325 }, { "content": "use assert_cmd::prelude::*;\n\nuse predicates::prelude::*;\n\nuse std::error::Error;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\nuse tempfile::Builder;\n\nuse unindent::unindent;\n\n\n\nconst NAME: &str = \"whisper-info\";\n\n\n\n#[test]\n", "file_path": "whisper/tests/test-whisper-info.rs", "rank": 87, "score": 5.16572195877325 }, { "content": "use std::error::Error;\n\nuse whisper::aggregation::*;\n\nuse whisper::point::*;\n\nuse whisper::retention::*;\n\nuse whisper::*;\n\nuse whisper_tests::*;\n\n\n\n#[test]\n\n#[allow(clippy::unreadable_literal)]\n", "file_path": "whisper_tests/tests/test-whisper-resize.rs", "rank": 88, "score": 5.163050319484055 }, { "content": "use std::error::Error;\n\nuse whisper::diff::*;\n\nuse whisper::point::*;\n\nuse whisper::retention::*;\n\nuse whisper::*;\n\nuse whisper_tests::*;\n\n\n\n#[test]\n\n#[allow(clippy::unreadable_literal)]\n", "file_path": "whisper_tests/tests/test-whisper-diff.rs", "rank": 89, "score": 5.163050319484055 }, { "content": "use std::error::Error;\n\nuse whisper::builder::WhisperBuilder;\n\nuse whisper::interval::Interval;\n\nuse whisper::point::Point;\n\nuse whisper::retention::Retention;\n\nuse whisper::ArchiveData;\n\nuse whisper_tests::*;\n\n\n\n#[test]\n", "file_path": "whisper_tests/tests/test-whisper.rs", "rank": 90, "score": 5.155503081080047 }, { "content": "\n\n fn write_metadata<W: Write>(&self, w: &mut W) -> Result<(), io::Error> {\n\n w.write_u32::<BigEndian>(self.aggregation_method.to_type())?;\n\n w.write_u32::<BigEndian>(self.max_retention)?;\n\n w.write_f32::<BigEndian>(self.x_files_factor)?;\n\n w.write_u32::<BigEndian>(self.archives.len() as u32)?;\n\n Ok(())\n\n }\n\n\n\n fn write<W: Write>(&self, w: &mut W) -> Result<(), io::Error> {\n\n self.write_metadata(w)?;\n\n for archive in &self.archives {\n\n archive.write(w)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\npub struct WhisperFile {\n\n metadata: WhisperMetadata,\n", "file_path": "whisper/src/lib.rs", "rank": 91, "score": 5.151063781681755 }, { "content": "use std::error::Error;\n\nuse whisper::builder;\n\nuse whisper::builder::*;\n\nuse whisper::point::*;\n\nuse whisper::retention::*;\n\nuse whisper_tests::*;\n\n\n\n#[test]\n\n#[allow(clippy::unreadable_literal)]\n", "file_path": "whisper_tests/tests/test-whisper-merge.rs", "rank": 92, "score": 5.141940636723 }, { "content": " use std::fs::File;\n\n use std::path::Path;\n\n use std::str::FromStr;\n\n\n\n fn get_temp_dir() -> tempfile::TempDir {\n\n tempfile::Builder::new()\n\n .prefix(\"diamond-api\")\n\n .tempdir()\n\n .expect(\"Temp dir created\")\n\n }\n\n\n\n #[test]\n\n fn walk_tree_verify() -> Result<(), Box<dyn std::error::Error>> {\n\n let dir = get_temp_dir();\n\n let path = dir.path();\n\n let path1 = path.join(Path::new(\"foo\"));\n\n let path2 = path.join(Path::new(\"bar\"));\n\n let path3 = path.join(Path::new(\"foobar.wsp\"));\n\n let path4 = path1.join(Path::new(\"bar.wsp\"));\n\n\n", "file_path": "diamond-api/src/storage/whisper_fs.rs", "rank": 93, "score": 5.141707401647505 }, { "content": "use super::*;\n\nuse crate::interval::Interval;\n\nuse std::io;\n\nuse std::path::Path;\n\n\n", "file_path": "whisper/src/fill.rs", "rank": 94, "score": 5.1261804698743445 }, { "content": "use assert_cmd::prelude::*;\n\nuse predicates::prelude::*;\n\nuse std::error::Error;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\nuse tempfile::Builder;\n\n\n\nconst NAME: &str = \"whisper-create\";\n\n\n\n#[test]\n", "file_path": "whisper/tests/test-whisper-create.rs", "rank": 95, "score": 5.1195254755467925 }, { "content": "use lazy_static::lazy_static;\n\nuse regex::Regex;\n\nuse serde::*;\n\nuse std::str::FromStr;\n\n\n", "file_path": "whisper/src/retention.rs", "rank": 96, "score": 5.095028888924779 }, { "content": "use assert_cmd::prelude::*;\n\nuse predicates::prelude::*;\n\nuse std::error::Error;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\nuse std::process::Command;\n\nuse tempfile::Builder;\n\n\n\nconst NAME: &str = \"whisper-set-aggregation-method\";\n\n\n\n#[test]\n", "file_path": "whisper/tests/test-whisper-set-aggregation-method.rs", "rank": 97, "score": 5.084046528557426 }, { "content": "use std::error::Error;\n\nuse whisper::point::Point;\n\nuse whisper::retention::*;\n\nuse whisper::*;\n\nuse whisper_tests::*;\n\n\n\n#[test]\n\n#[allow(clippy::unreadable_literal)]\n", "file_path": "whisper_tests/tests/issue8.rs", "rank": 98, "score": 5.073471105107233 }, { "content": "use serde::*;\n\nuse std::cmp;\n\nuse std::convert::Into;\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\n#[allow(clippy::trivially_copy_pass_by_ref)]\n", "file_path": "whisper/src/aggregation.rs", "rank": 99, "score": 5.073471105107233 } ]
Rust
src/main.rs
erikjohnston/matrix-media-store-size
16055e1964c58f2f35bdff1e888e84d963a1c37a
#[macro_use] extern crate clap; extern crate humansize; extern crate linear_map; extern crate rand; extern crate rusqlite; extern crate twox_hash; extern crate walkdir; extern crate indicatif; use humansize::{FileSize, file_size_opts as options}; use clap::{App, Arg}; use linear_map::LinearMap; use rand::Rng; use std::io; use std::io::Read; use std::fs::File; use std::hash::Hasher; use std::collections::BTreeMap; use std::path::PathBuf; use walkdir::WalkDir; fn copy<R: io::Read, W: Hasher>(reader: &mut R, writer: &mut W) -> io::Result<u64> { let mut buf = [0; 64 * 1024]; let mut written = 0; loop { let len = match reader.read(&mut buf) { Ok(0) => return Ok(written), Ok(len) => len, Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue, Err(e) => return Err(e), }; writer.write(&buf[..len]); written += len as u64; } } fn to_hash(path: &PathBuf) -> u64 { let mut file = File::open(path).unwrap(); let mut hasher = twox_hash::XxHash::default(); copy(&mut file, &mut hasher).unwrap(); hasher.finish() } fn read_file(path: &PathBuf) -> Vec<u8> { let mut file = File::open(path).unwrap(); let mut vec = Vec::new(); file.read_to_end(&mut vec).unwrap(); vec } fn partition_by<I, F, R>(paths: I, f: F) -> LinearMap<R, Vec<PathBuf>> where I: Iterator<Item=PathBuf>, F: Fn(&PathBuf) -> R, R: Eq { let mut map = LinearMap::with_capacity(paths.size_hint().0); for path in paths { let key = f(&path); map.entry(key).or_insert_with(Vec::new).push(path); } map } const DB_TABLE_SCHEMA: &'static str = r#" CREATE TABLE files ( hash BIGINT NOT NULL, path TEXT NOT NULL, size BIGINT NOT NULL ); "#; fn main() { let matches = App::new(crate_name!()) .version(crate_version!()) .author(crate_authors!("\n")) .arg(Arg::with_name("media_directory") .help("The location of the media store") .index(1) .multiple(true) .required(true)) .arg(Arg::with_name("output-db") .short("o") .long("output-db") .help("Where to write SQLite database to") .value_name("FILE") .takes_value(true)) .get_matches(); let paths_to_search = matches.values_of("media_directory").unwrap(); let output_db_path = matches.value_of("output-db"); let db = rusqlite::Connection::open_in_memory().expect("failed to open sqlite db"); db.execute_batch(DB_TABLE_SCHEMA).expect("failed to create db schema"); let mut paths_by_size = BTreeMap::new(); let mut total_files = 0; let mut total_size = 0; let pb = indicatif::ProgressBar::new_spinner(); pb.set_style( indicatif::ProgressStyle::default_spinner() .template("{spinner} Collected metadata for {pos} files [{elapsed}]") ); for path in paths_to_search { for entry in WalkDir::new(path) { let entry = entry.unwrap(); if !entry.file_type().is_file() { continue } let file_size = entry.metadata().unwrap().len() as usize; paths_by_size.entry(file_size).or_insert_with(Vec::new).push(entry.path().to_owned()); total_files += 1; total_size += file_size; pb.inc(1); } } pb.finish(); pb.set_position(total_files); let pb = indicatif::ProgressBar::new(total_files); pb.set_style( indicatif::ProgressStyle::default_bar() .template(" Searching for possible duplicates {bar:40} {pos:>9}/{len:9} [{elapsed}]") ); let mut possible_total_size = 0; let mut possible_duplicates = Vec::new(); for (file_size, paths) in paths_by_size { if paths.len() > 1 { possible_total_size += file_size * paths.len(); possible_duplicates.push((file_size, paths)) } pb.inc(1); } pb.finish(); let mut rng = rand::thread_rng(); rng.shuffle(possible_duplicates.as_mut_slice()); let pb = indicatif::ProgressBar::new(possible_total_size as u64); pb.set_style( indicatif::ProgressStyle::default_bar() .template(" Comparing hashes {bar:40} {bytes:>9}/{total_bytes:9} [{elapsed}]") ); let mut total_wasted_size = 0; for (file_size, paths) in possible_duplicates { if paths.len() == 1 { continue } let by_hash = partition_by(paths.into_iter(), to_hash); for (hash, paths) in by_hash { if paths.len() == 1 { pb.inc(file_size as u64); continue } let by_contents = partition_by(paths.into_iter(), read_file); for (_, paths) in by_contents { if paths.len() == 1 { pb.inc(file_size as u64); continue } for path in &paths { db.execute("INSERT INTO files (hash, path, size) VALUES (?, ?, ?)", &[&(hash as i64), &path.to_str().unwrap(), &(file_size as i64)]).expect("failed to write to db"); } let wasted = file_size * (paths.len() - 1); total_wasted_size += wasted; pb.inc((file_size * paths.len()) as u64); } } } pb.finish(); println!(); println!( "Total wasted size: {} out of {}. Percentage: {:.2}%", total_wasted_size.file_size(options::CONVENTIONAL).unwrap(), total_size.file_size(options::CONVENTIONAL).unwrap(), (total_wasted_size * 100) as f64 / total_size as f64, ); if let Some(path) = output_db_path { let mut disk_db = rusqlite::Connection::open(path).expect("failed to open sqlite db"); let backup = rusqlite::backup::Backup::new(&db, &mut disk_db).expect("failed to create backup"); backup.run_to_completion(5, std::time::Duration::from_millis(0), None).expect("failed to write to disk"); } }
#[macro_use] extern crate clap; extern crate humansize; extern crate linear_map; extern crate rand; extern crate rusqlite; extern crate twox_hash; extern crate walkdir; extern crate indicatif; use humansize::{FileSize, file_size_opts as options}; use clap::{App, Arg}; use linear_map::LinearMap; use rand::Rng; use std::io; use std::io::Read; use std::fs::File; use std::hash::Hasher; use std::collections::BTreeMap; use std::path::PathBuf; use walkdir::WalkDir; fn copy<R: io::Read, W: Hasher>(reader: &mut R, writer: &mut W) -> io::Result<u64> { let mut buf = [0; 64 * 1024]; let mut written = 0; loop { let len = match reader.read(&mut buf) { Ok(0) => return Ok(written), Ok(len) => len, Err(ref e) if e.kind() == io::ErrorKind::Interrupted => continue, Err(e) => return Err(e), }; writer.write(&buf[..len]); written += len as u64; } } fn to_hash(path: &PathBuf) -> u64 { let mut file = File::open(path).unwrap(); let mut hasher = twox_hash::XxHash::default(); copy(&mut file, &mut hasher).unwrap(); hasher.finish() } fn read_file(path: &PathBuf) -> Vec<u8> { let mut file = File::open(path).unwrap(); let mut vec = Vec::new(); file.read_to_end(&mut vec).unwrap(); vec } fn partition_by<I, F, R>(paths: I, f: F) -> LinearMap<R, Vec<PathBuf>> where I: Iterator<Item=PathBuf>, F: Fn(&PathBuf) -> R, R: Eq { let mut map = LinearMap::with_capacity(paths.size_hint().0); for path in paths { let key = f(&path); map.entry(key).or_insert_with(Vec::new).push(path); } map } const DB_TABLE_SCHEMA: &'static str = r#" CREATE TABLE files ( hash BIGINT NOT NULL, path TEXT NOT NULL, size BIGINT NOT NULL ); "#; fn main() { let matches = App::new(crate_name!()) .version(crate_version!()) .author(crate_authors!("\n")) .arg(Arg::with_name("media_directory") .help("The location of the media store") .index(1) .multiple(true) .required(true)) .arg(Arg::with_name("output-db") .short("o") .long("output-db") .help("Where to write SQLite database to") .value_name("FILE") .takes_value(true)) .get_matches(); let paths_to_search = matches.values_of("media_directory").unwrap(); let output_db_path = matches.value_of("output-db"); let db = rusqlite::Connection::open_in_memory().expect("failed to open sqlite db"); db.execute_batch(DB_TABLE_SCHEMA).expect("failed to create db schema"); let mut paths_by_size = BTreeMap::new(); let mut total_files = 0; let mut total_size = 0; let pb = indicatif::ProgressBar::new_spinner(); pb.set_style( indicatif::ProgressStyle::default_spinner() .template("{spinner} Collected metadata for {pos} files [{elapsed}]") ); for path in paths_to_search { for entry in WalkDir::new(path) { let entry = entry.unwrap(); if !entry.file_type().is_file() { continue } let file_size = entry.metadata().unwrap().len() as usize; paths_by_size.entry(file_size).or_insert_with(Vec::new).push(entry.path().to_owned()); total_files += 1; total_size += file_size; pb.inc(1); } } pb.finish(); pb.set_position(total_files); let pb = indicatif::ProgressBar::new(total_files); pb.set_style( indicatif::ProgressStyle::default_bar() .template(" Searching for possible duplicates {bar:40} {pos:>9}/{len:9} [{elapsed}]") ); let mut possible_total_size = 0; let mut possible_duplicates = Vec::new(); for (file_size, paths) in paths_by_size { if paths.len() > 1 { possible_total_size += file_size * paths.len(); possible_duplicates.push((file_size, paths)) } pb.inc(1); } pb.finish(); let mut rng = rand::thread_rng(); rng.shuffle(possible_duplicates.as_mut_slice()); let pb = indicatif::ProgressBar::new(possible_total_size as u64); pb.set_style( indicatif::ProgressStyle::default_bar() .template(" Comparing hashes {bar:40} {bytes:>9}/{total_bytes:9} [{elapsed}]") ); let mut total_wasted_size = 0; for (file_size, paths) in possible_duplicates { if paths.len() == 1 { continue } let by_hash = partition_by(paths.into_iter(), to_hash); for (hash, paths) in by_hash { if paths.len() == 1 { pb.inc(file_size as u64); continue } let by_contents = partition_by(paths.into_iter(), read_file); for (_, paths) in by_contents { if paths.len() == 1 { pb.inc(file_size as u64); continue } for path in &paths { db.execute("INSERT INTO files (hash, path, size) VALUES (?, ?, ?)", &[&(hash as i64), &path.to_str().unwrap(), &(file_size as i64)]).expect("failed to write to db
.len()) as u64); } } } pb.finish(); println!(); println!( "Total wasted size: {} out of {}. Percentage: {:.2}%", total_wasted_size.file_size(options::CONVENTIONAL).unwrap(), total_size.file_size(options::CONVENTIONAL).unwrap(), (total_wasted_size * 100) as f64 / total_size as f64, ); if let Some(path) = output_db_path { let mut disk_db = rusqlite::Connection::open(path).expect("failed to open sqlite db"); let backup = rusqlite::backup::Backup::new(&db, &mut disk_db).expect("failed to create backup"); backup.run_to_completion(5, std::time::Duration::from_millis(0), None).expect("failed to write to disk"); } }
"); } let wasted = file_size * (paths.len() - 1); total_wasted_size += wasted; pb.inc((file_size * paths
random
[]
Rust
src/entities.rs
brooks-builds/improve_skills_by_building_ecs_library_in_rust
d8131b0b6a963456fd4c2ad94382924060847905
pub mod query; pub mod query_entity; use std::{ any::{Any, TypeId}, cell::RefCell, collections::HashMap, rc::Rc, vec, }; use eyre::Result; use crate::custom_errors::CustomErrors; pub type Component = Rc<RefCell<dyn Any>>; pub type Components = HashMap<TypeId, Vec<Option<Component>>>; #[derive(Debug, Default)] pub struct Entities { components: Components, bit_masks: HashMap<TypeId, u32>, map: Vec<u32>, inserting_into_index: usize, } impl Entities { pub fn register_component<T: Any + 'static>(&mut self) { let type_id = TypeId::of::<T>(); let bit_mask = 2u32.pow(self.bit_masks.len() as u32); self.components.insert(type_id, vec![]); self.bit_masks.insert(type_id, bit_mask); } pub fn create_entity(&mut self) -> &mut Self { if let Some((index, _)) = self .map .iter() .enumerate() .find(|(_index, mask)| **mask == 0) { self.inserting_into_index = index; } else { self.components .iter_mut() .for_each(|(_key, components)| components.push(None)); self.map.push(0); self.inserting_into_index = self.map.len() - 1; } self } pub fn with_component(&mut self, data: impl Any) -> Result<&mut Self> { let type_id = data.type_id(); let index = self.inserting_into_index; if let Some(components) = self.components.get_mut(&type_id) { let component = components .get_mut(index) .ok_or(CustomErrors::CreateComponentNeverCalled)?; *component = Some(Rc::new(RefCell::new(data))); let bitmask = self.bit_masks.get(&type_id).unwrap(); self.map[index] |= *bitmask; } else { return Err(CustomErrors::ComponentNotRegistered.into()); } Ok(self) } pub fn get_bitmask(&self, type_id: &TypeId) -> Option<u32> { self.bit_masks.get(type_id).copied() } pub fn delete_component_by_entity_id<T: Any>(&mut self, index: usize) -> Result<()> { let type_id = TypeId::of::<T>(); let mask = if let Some(mask) = self.bit_masks.get(&type_id) { mask } else { return Err(CustomErrors::ComponentNotRegistered.into()); }; if self.has_component(index, *mask) { self.map[index] ^= *mask; } Ok(()) } pub fn add_component_by_entity_id(&mut self, data: impl Any, index: usize) -> Result<()> { let type_id = data.type_id(); let mask = if let Some(mask) = self.bit_masks.get(&type_id) { mask } else { return Err(CustomErrors::ComponentNotRegistered.into()); }; self.map[index] |= *mask; let components = self.components.get_mut(&type_id).unwrap(); components[index] = Some(Rc::new(RefCell::new(data))); Ok(()) } pub fn delete_entity_by_id(&mut self, index: usize) -> Result<()> { if let Some(map) = self.map.get_mut(index) { *map = 0; } else { return Err(CustomErrors::EntityDoesNotExist.into()); } Ok(()) } fn has_component(&self, index: usize, mask: u32) -> bool { self.map[index] & mask == mask } } #[cfg(test)] mod test { use std::any::TypeId; use super::*; #[test] fn register_an_entity() { let mut entities = Entities::default(); entities.register_component::<Health>(); let type_id = TypeId::of::<Health>(); let health_components = entities.components.get(&type_id).unwrap(); assert_eq!(health_components.len(), 0); } #[test] fn bitmask_updated_when_registering_entities() { let mut entities = Entities::default(); entities.register_component::<Health>(); let type_id = TypeId::of::<Health>(); let mask = entities.bit_masks.get(&type_id).unwrap(); assert_eq!(*mask, 1); entities.register_component::<Speed>(); let type_id = TypeId::of::<Speed>(); let mask = entities.bit_masks.get(&type_id).unwrap(); assert_eq!(*mask, 2); } #[test] fn create_entity() { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.register_component::<Speed>(); entities.create_entity(); let health = entities.components.get(&TypeId::of::<Health>()).unwrap(); let speed = entities.components.get(&TypeId::of::<Speed>()).unwrap(); assert!(health.len() == speed.len() && health.len() == 1); assert!(health[0].is_none() && speed[0].is_none()); } #[test] fn with_component() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.register_component::<Speed>(); entities .create_entity() .with_component(Health(100))? .with_component(Speed(15))?; let first_health = &entities.components.get(&TypeId::of::<Health>()).unwrap()[0]; let wrapped_health = first_health.as_ref().unwrap(); let borrowed_health = wrapped_health.borrow(); let health = borrowed_health.downcast_ref::<Health>().unwrap(); assert_eq!(health.0, 100); Ok(()) } #[test] fn map_is_updated_when_creating_entities() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.register_component::<Speed>(); entities .create_entity() .with_component(Health(100))? .with_component(Speed(15))?; let entity_map = entities.map[0]; assert_eq!(entity_map, 3); entities.create_entity().with_component(Speed(15))?; let entity_map = entities.map[1]; assert_eq!(entity_map, 2); Ok(()) } #[test] fn delete_component_by_entity_id() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.register_component::<Speed>(); entities .create_entity() .with_component(Health(100))? .with_component(Speed(50))?; entities.delete_component_by_entity_id::<Health>(0)?; assert_eq!(entities.map[0], 2); Ok(()) } #[test] fn add_component_to_entity_by_id() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.register_component::<Speed>(); entities.create_entity().with_component(Health(100))?; entities.add_component_by_entity_id(Speed(50), 0)?; assert_eq!(entities.map[0], 3); let speed_type_id = TypeId::of::<Speed>(); let wrapped_speeds = entities.components.get(&speed_type_id).unwrap(); let wrapped_speed = wrapped_speeds[0].as_ref().unwrap(); let borrowed_speed = wrapped_speed.borrow(); let speed = borrowed_speed.downcast_ref::<Speed>().unwrap(); assert_eq!(speed.0, 50); Ok(()) } #[test] fn delete_entity_by_id() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.create_entity().with_component(Health(100))?; entities.delete_entity_by_id(0)?; assert_eq!(entities.map[0], 0); Ok(()) } #[test] fn created_entities_are_inserted_into_deleted_entities_columns() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.create_entity().with_component(Health(100))?; entities.create_entity().with_component(Health(50))?; entities.delete_entity_by_id(0)?; entities.create_entity().with_component(Health(25))?; assert_eq!(entities.map[0], 1); let type_id = TypeId::of::<Health>(); let borrowed_health = &entities.components.get(&type_id).unwrap()[0] .as_ref() .unwrap() .borrow(); let health = borrowed_health.downcast_ref::<Health>().unwrap(); assert_eq!(health.0, 25); Ok(()) } #[test] fn should_not_add_component_back_after_deleting_twice() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<u32>(); entities.register_component::<f32>(); entities .create_entity() .with_component(100_u32)? .with_component(50.0_f32)?; entities.delete_component_by_entity_id::<u32>(0)?; entities.delete_component_by_entity_id::<u32>(0)?; assert_eq!(entities.map[0], 2); Ok(()) } /* Brendon Stanton When you create your second component, inserting_into_index never changes from 0. So when you get to with_component, Health(50) overwrites Health(100) and column 1 in your table never actually gets used. */ #[test] fn inserting_into_index_should_change_when_adding_components() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<f32>(); entities.register_component::<u32>(); let creating_entity = entities.create_entity(); assert_eq!(creating_entity.inserting_into_index, 0); creating_entity .with_component(100.0_f32)? .with_component(10_u32)?; assert_eq!(entities.inserting_into_index, 0); let creating_entity = entities.create_entity(); assert_eq!(creating_entity.inserting_into_index, 1); creating_entity .with_component(110.0_f32)? .with_component(20_u32)?; assert_eq!(entities.inserting_into_index, 1); entities.delete_entity_by_id(0)?; let creating_entity = entities.create_entity(); assert_eq!(creating_entity.inserting_into_index, 0); creating_entity .with_component(100.0_f32)? .with_component(10_u32)?; assert_eq!(entities.inserting_into_index, 0); Ok(()) } struct Health(pub u32); struct Speed(pub u32); }
pub mod query; pub mod query_entity; use std::{ any::{Any, TypeId}, cell::RefCell, collections::HashMap, rc::Rc, vec, }; use eyre::Result; use crate::custom_errors::CustomErrors; pub type Component = Rc<RefCell<dyn Any>>; pub type Components = HashMap<TypeId, Vec<Option<Component>>>; #[derive(Debug, Default)] pub struct Entities { components: Components, bit_masks: HashMap<TypeId, u32>, map: Vec<u32>, inserting_into_index: usize, } impl Entities { pub fn register_component<T: Any + 'static>(&mut self) { let type_id = TypeId::of::<T>(); let bit_mask = 2u32.pow(self.bit_masks.len() as u32); self.components.insert(type_id, vec![]); self.bit_masks.insert(type_id, bit_mask); } pub fn create_entity(&mut self) -> &mut Self { if let Some((index, _)) = self .map .iter() .enumerate() .find(|(_index, mask)| **mask == 0) { self.inserting_into_index = index; } else { self.components .iter_mut() .for_each(|(_key, components)| components.push(None)); self.map.push(0); self.inserting_into_index = self.map.len() - 1; } self } pub fn with_component(&mut self, data: impl Any) -> Result<&mut Self> { let type_id = data.type_id(); let index = self.inserting_into_index;
Ok(self) } pub fn get_bitmask(&self, type_id: &TypeId) -> Option<u32> { self.bit_masks.get(type_id).copied() } pub fn delete_component_by_entity_id<T: Any>(&mut self, index: usize) -> Result<()> { let type_id = TypeId::of::<T>(); let mask = if let Some(mask) = self.bit_masks.get(&type_id) { mask } else { return Err(CustomErrors::ComponentNotRegistered.into()); }; if self.has_component(index, *mask) { self.map[index] ^= *mask; } Ok(()) } pub fn add_component_by_entity_id(&mut self, data: impl Any, index: usize) -> Result<()> { let type_id = data.type_id(); let mask = if let Some(mask) = self.bit_masks.get(&type_id) { mask } else { return Err(CustomErrors::ComponentNotRegistered.into()); }; self.map[index] |= *mask; let components = self.components.get_mut(&type_id).unwrap(); components[index] = Some(Rc::new(RefCell::new(data))); Ok(()) } pub fn delete_entity_by_id(&mut self, index: usize) -> Result<()> { if let Some(map) = self.map.get_mut(index) { *map = 0; } else { return Err(CustomErrors::EntityDoesNotExist.into()); } Ok(()) } fn has_component(&self, index: usize, mask: u32) -> bool { self.map[index] & mask == mask } } #[cfg(test)] mod test { use std::any::TypeId; use super::*; #[test] fn register_an_entity() { let mut entities = Entities::default(); entities.register_component::<Health>(); let type_id = TypeId::of::<Health>(); let health_components = entities.components.get(&type_id).unwrap(); assert_eq!(health_components.len(), 0); } #[test] fn bitmask_updated_when_registering_entities() { let mut entities = Entities::default(); entities.register_component::<Health>(); let type_id = TypeId::of::<Health>(); let mask = entities.bit_masks.get(&type_id).unwrap(); assert_eq!(*mask, 1); entities.register_component::<Speed>(); let type_id = TypeId::of::<Speed>(); let mask = entities.bit_masks.get(&type_id).unwrap(); assert_eq!(*mask, 2); } #[test] fn create_entity() { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.register_component::<Speed>(); entities.create_entity(); let health = entities.components.get(&TypeId::of::<Health>()).unwrap(); let speed = entities.components.get(&TypeId::of::<Speed>()).unwrap(); assert!(health.len() == speed.len() && health.len() == 1); assert!(health[0].is_none() && speed[0].is_none()); } #[test] fn with_component() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.register_component::<Speed>(); entities .create_entity() .with_component(Health(100))? .with_component(Speed(15))?; let first_health = &entities.components.get(&TypeId::of::<Health>()).unwrap()[0]; let wrapped_health = first_health.as_ref().unwrap(); let borrowed_health = wrapped_health.borrow(); let health = borrowed_health.downcast_ref::<Health>().unwrap(); assert_eq!(health.0, 100); Ok(()) } #[test] fn map_is_updated_when_creating_entities() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.register_component::<Speed>(); entities .create_entity() .with_component(Health(100))? .with_component(Speed(15))?; let entity_map = entities.map[0]; assert_eq!(entity_map, 3); entities.create_entity().with_component(Speed(15))?; let entity_map = entities.map[1]; assert_eq!(entity_map, 2); Ok(()) } #[test] fn delete_component_by_entity_id() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.register_component::<Speed>(); entities .create_entity() .with_component(Health(100))? .with_component(Speed(50))?; entities.delete_component_by_entity_id::<Health>(0)?; assert_eq!(entities.map[0], 2); Ok(()) } #[test] fn add_component_to_entity_by_id() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.register_component::<Speed>(); entities.create_entity().with_component(Health(100))?; entities.add_component_by_entity_id(Speed(50), 0)?; assert_eq!(entities.map[0], 3); let speed_type_id = TypeId::of::<Speed>(); let wrapped_speeds = entities.components.get(&speed_type_id).unwrap(); let wrapped_speed = wrapped_speeds[0].as_ref().unwrap(); let borrowed_speed = wrapped_speed.borrow(); let speed = borrowed_speed.downcast_ref::<Speed>().unwrap(); assert_eq!(speed.0, 50); Ok(()) } #[test] fn delete_entity_by_id() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.create_entity().with_component(Health(100))?; entities.delete_entity_by_id(0)?; assert_eq!(entities.map[0], 0); Ok(()) } #[test] fn created_entities_are_inserted_into_deleted_entities_columns() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<Health>(); entities.create_entity().with_component(Health(100))?; entities.create_entity().with_component(Health(50))?; entities.delete_entity_by_id(0)?; entities.create_entity().with_component(Health(25))?; assert_eq!(entities.map[0], 1); let type_id = TypeId::of::<Health>(); let borrowed_health = &entities.components.get(&type_id).unwrap()[0] .as_ref() .unwrap() .borrow(); let health = borrowed_health.downcast_ref::<Health>().unwrap(); assert_eq!(health.0, 25); Ok(()) } #[test] fn should_not_add_component_back_after_deleting_twice() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<u32>(); entities.register_component::<f32>(); entities .create_entity() .with_component(100_u32)? .with_component(50.0_f32)?; entities.delete_component_by_entity_id::<u32>(0)?; entities.delete_component_by_entity_id::<u32>(0)?; assert_eq!(entities.map[0], 2); Ok(()) } /* Brendon Stanton When you create your second component, inserting_into_index never changes from 0. So when you get to with_component, Health(50) overwrites Health(100) and column 1 in your table never actually gets used. */ #[test] fn inserting_into_index_should_change_when_adding_components() -> Result<()> { let mut entities = Entities::default(); entities.register_component::<f32>(); entities.register_component::<u32>(); let creating_entity = entities.create_entity(); assert_eq!(creating_entity.inserting_into_index, 0); creating_entity .with_component(100.0_f32)? .with_component(10_u32)?; assert_eq!(entities.inserting_into_index, 0); let creating_entity = entities.create_entity(); assert_eq!(creating_entity.inserting_into_index, 1); creating_entity .with_component(110.0_f32)? .with_component(20_u32)?; assert_eq!(entities.inserting_into_index, 1); entities.delete_entity_by_id(0)?; let creating_entity = entities.create_entity(); assert_eq!(creating_entity.inserting_into_index, 0); creating_entity .with_component(100.0_f32)? .with_component(10_u32)?; assert_eq!(entities.inserting_into_index, 0); Ok(()) } struct Health(pub u32); struct Speed(pub u32); }
if let Some(components) = self.components.get_mut(&type_id) { let component = components .get_mut(index) .ok_or(CustomErrors::CreateComponentNeverCalled)?; *component = Some(Rc::new(RefCell::new(data))); let bitmask = self.bit_masks.get(&type_id).unwrap(); self.map[index] |= *bitmask; } else { return Err(CustomErrors::ComponentNotRegistered.into()); }
if_condition
[ { "content": "struct Health(pub u32);\n\n\n\nimpl Health {\n\n pub fn new(data: u32) -> Self {\n\n Self(data)\n\n }\n\n\n\n pub fn lose_health(&mut self, amount: u32) {\n\n self.0 -= amount;\n\n }\n\n\n\n pub fn print_health(&self) {\n\n println!(\"health: {:?}\", **self);\n\n }\n\n}\n\n\n\nimpl Deref for Health {\n\n type Target = u32;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n\n\n", "file_path": "tests/language/structs.rs", "rank": 0, "score": 127009.40355618211 }, { "content": "#[derive(Debug)]\n\nstruct FpsResource(pub u32);\n\n\n\nimpl std::ops::Deref for FpsResource {\n\n type Target = u32;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n}\n", "file_path": "tests/resources.rs", "rank": 1, "score": 119077.98498402303 }, { "content": "type ExtractedComponents<'a> = Result<&'a Vec<Option<Rc<RefCell<dyn Any>>>>>;\n\n\n\npub struct QueryEntity<'a> {\n\n pub id: usize,\n\n entities: &'a Entities,\n\n}\n\n\n\nimpl<'a> QueryEntity<'a> {\n\n pub fn new(id: usize, entities: &'a Entities) -> Self {\n\n Self { id, entities }\n\n }\n\n\n\n fn extract_components<T: Any>(&self) -> ExtractedComponents {\n\n let type_id = TypeId::of::<T>();\n\n self.entities\n\n .components\n\n .get(&type_id)\n\n .ok_or_else(|| CustomErrors::ComponentNotRegistered.into())\n\n }\n\n\n", "file_path": "src/entities/query_entity.rs", "rank": 2, "score": 109439.78795907318 }, { "content": "#[derive(Debug)]\n\nstruct Health(u32);\n\n\n\nimpl Default for Health {\n\n fn default() -> Self {\n\n Self(100)\n\n }\n\n}\n", "file_path": "tests/language/default.rs", "rank": 3, "score": 108614.68546086381 }, { "content": "struct Location(pub f32, pub f32);\n", "file_path": "tests/entities.rs", "rank": 4, "score": 105438.61885828468 }, { "content": "#[test]\n\npub fn filter_map() {\n\n let mut numbers = vec![];\n\n for number in 0..100 {\n\n numbers.push(number);\n\n }\n\n let wrapped_numbers = numbers\n\n .iter()\n\n // .map(|number| WrappedNumber {\n\n // value: *number,\n\n // is_even: number % 2 == 0,\n\n // })\n\n // .filter(|wrapped_number| wrapped_number.is_even)\n\n .filter_map(|number| {\n\n let is_even = number % 2 == 0;\n\n if is_even {\n\n Some(WrappedNumber {\n\n value: *number,\n\n is_even,\n\n })\n\n } else {\n\n None\n\n }\n\n })\n\n .collect::<Vec<WrappedNumber>>();\n\n\n\n dbg!(wrapped_numbers);\n\n}\n\n\n", "file_path": "tests/language/filter_map.rs", "rank": 5, "score": 104968.56823441433 }, { "content": "struct Speed(u32);\n", "file_path": "tests/language/type_id.rs", "rank": 6, "score": 104900.51621306935 }, { "content": "struct Size(pub f32);\n", "file_path": "tests/entities.rs", "rank": 7, "score": 102920.4701344939 }, { "content": "#[test]\n\nfn default() {\n\n let player = Player::new();\n\n dbg!(player);\n\n}\n\n\n", "file_path": "tests/language/default.rs", "rank": 8, "score": 95252.69557990743 }, { "content": "#[test]\n\nfn type_id() {\n\n let mut components: HashMap<TypeId, Vec<Box<dyn Any + 'static>>> = HashMap::new();\n\n let health = 100_u32;\n\n let health_type_id = TypeId::of::<u32>();\n\n components.insert(health_type_id, vec![Box::new(health)]);\n\n let speed = Speed(150);\n\n let speed_type_id = speed.type_id();\n\n components.insert(speed_type_id, vec![Box::new(speed)]);\n\n\n\n for (_component_type_id, component_value) in components {\n\n let type_id = component_value[0].type_id();\n\n dbg!(type_id);\n\n }\n\n}\n\n\n", "file_path": "tests/language/type_id.rs", "rank": 9, "score": 90413.87722846391 }, { "content": "#[test]\n\n#[allow(clippy::float_cmp)]\n\nfn query_for_entities() -> Result<()> {\n\n let mut world = World::new();\n\n world.register_component::<Location>();\n\n world.register_component::<Size>();\n\n\n\n world\n\n .create_entity()\n\n .with_component(Location(42.0, 24.0))?\n\n .with_component(Size(10.0))?;\n\n\n\n world.create_entity().with_component(Size(11.0))?;\n\n\n\n world.create_entity().with_component(Location(43.0, 25.0))?;\n\n\n\n world\n\n .create_entity()\n\n .with_component(Location(44.0, 26.0))?\n\n .with_component(Size(12.0))?;\n\n\n\n let query = world\n", "file_path": "tests/entities.rs", "rank": 10, "score": 88390.16156854507 }, { "content": "#[test]\n\nfn add_component_to_entity() -> Result<()> {\n\n let mut world = World::new();\n\n world.register_component::<Location>();\n\n world.register_component::<Size>();\n\n world.create_entity().with_component(Location(10.0, 15.0))?;\n\n\n\n world.add_component_to_entity_by_id(Size(20.0), 0)?;\n\n\n\n let query = world\n\n .query()\n\n .with_component::<Location>()?\n\n .with_component::<Size>()?\n\n .run();\n\n assert_eq!(query.0.len(), 1);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/entities.rs", "rank": 11, "score": 85821.22012517239 }, { "content": "#[test]\n\nfn deleted_component_from_entity() -> Result<()> {\n\n let mut world = World::new();\n\n\n\n world.register_component::<Location>();\n\n world.register_component::<Size>();\n\n\n\n world\n\n .create_entity()\n\n .with_component(Location(10.0, 11.0))?\n\n .with_component(Size(10.0))?;\n\n\n\n world\n\n .create_entity()\n\n .with_component(Location(20.0, 21.0))?\n\n .with_component(Size(20.0))?;\n\n\n\n world.delete_component_by_entity_id::<Location>(0)?;\n\n\n\n let query = world\n\n .query()\n\n .with_component::<Location>()?\n\n .with_component::<Size>()?\n\n .run();\n\n\n\n assert_eq!(query.0.len(), 1);\n\n assert_eq!(query.0[0], 1);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/entities.rs", "rank": 12, "score": 85821.22012517239 }, { "content": "fn get_type_id<T: Any>() -> TypeId {\n\n TypeId::of::<T>()\n\n}\n", "file_path": "tests/language/generics.rs", "rank": 13, "score": 73928.6515578766 }, { "content": "fn structs() {\n\n let mut player_health = Health(100);\n\n let _enemy_health = Health::new(100);\n\n\n\n player_health.lose_health(10);\n\n player_health.print_health();\n\n\n\n let _ice_cream = FavoriteFood::IceCream {\n\n topping: \"Nothing\".to_owned(),\n\n scoops: 1,\n\n flavor: \"Space Junkie\".to_owned(),\n\n };\n\n}\n\n\n", "file_path": "tests/language/structs.rs", "rank": 14, "score": 71729.20958514094 }, { "content": "#[derive(Debug, Default)]\n\nstruct Player {\n\n health: Health,\n\n damage: u32,\n\n}\n\n\n\nimpl Player {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n}\n\n\n", "file_path": "tests/language/default.rs", "rank": 15, "score": 70515.00682475943 }, { "content": "fn maybe_something() -> Option<u32> {\n\n Some(52)\n\n}\n\n\n", "file_path": "tests/language/converting_options_to_results.rs", "rank": 16, "score": 68591.1815687452 }, { "content": "#[test]\n\nfn hash_maps() {\n\n let mut hash_map = HashMap::new();\n\n hash_map.insert(Letters::A, vec!['a']);\n\n let letters = hash_map.entry(Letters::B).or_insert_with(|| vec!['b']);\n\n letters.push('b');\n\n let _a = hash_map.get_mut(&Letters::A).unwrap();\n\n dbg!(hash_map.len());\n\n\n\n // for (letter, characters) in hash_map {\n\n\n\n // }\n\n\n\n hash_map.values().for_each(|characters| {\n\n dbg!(characters);\n\n });\n\n\n\n if hash_map.contains_key(&Letters::A) {\n\n dbg!(\"We have a's\");\n\n }\n\n\n\n // hash_map.retain(|key, value| false);\n\n // dbg!(hash_map.len());\n\n\n\n hash_map.remove(&Letters::B);\n\n hash_map.clear();\n\n}\n\n\n", "file_path": "tests/language/hash_maps.rs", "rank": 17, "score": 67788.2767361722 }, { "content": "#[test]\n\nfn type_keyword() {\n\n let name = Some((\"Brooks\".to_owned(), 3));\n\n say_hello(name);\n\n}\n\n\n", "file_path": "tests/language/type_keyword.rs", "rank": 18, "score": 67503.81693104666 }, { "content": "#[derive(Debug)]\n\nstruct WrappedNumber {\n\n value: i32,\n\n is_even: bool,\n\n}\n", "file_path": "tests/language/filter_map.rs", "rank": 19, "score": 65625.01559492317 }, { "content": "#[test]\n\nfn create_entity() -> Result<()> {\n\n let mut world = World::new();\n\n world.register_component::<Location>();\n\n world.register_component::<Size>();\n\n\n\n world\n\n .create_entity()\n\n .with_component(Location(42.0, 24.0))?\n\n .with_component(Size(10.0))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/entities.rs", "rank": 20, "score": 65611.05952539919 }, { "content": "#[test]\n\n#[allow(clippy::float_cmp)]\n\nfn deleting_an_entity() -> Result<()> {\n\n let mut world = World::new();\n\n world.register_component::<Location>();\n\n world.register_component::<Size>();\n\n world.create_entity().with_component(Location(10.0, 15.0))?;\n\n world.create_entity().with_component(Location(20.0, 25.0))?;\n\n\n\n world.delete_entity_by_id(0)?;\n\n\n\n let query = world.query().with_component::<Location>()?.run();\n\n\n\n assert_eq!(query.0.len(), 1);\n\n\n\n let borrowed_location = query.1[0][0].borrow();\n\n let location = borrowed_location.downcast_ref::<Location>().unwrap();\n\n\n\n assert_eq!(location.0, 20.0);\n\n\n\n world.create_entity().with_component(Location(30.0, 35.0))?;\n\n let query = world.query().with_component::<Location>()?.run();\n\n let borrowed_location = query.1[0][0].borrow();\n\n let location = borrowed_location.downcast_ref::<Location>().unwrap();\n\n assert_eq!(location.0, 30.0);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/entities.rs", "rank": 21, "score": 65611.05952539919 }, { "content": "fn get_type_id(boxed_thing: Box<dyn Any>) -> TypeId {\n\n boxed_thing.type_id()\n\n}\n", "file_path": "tests/language/boxes.rs", "rank": 22, "score": 65127.11717157165 }, { "content": "#[test]\n\nfn methods_on_primitive_types() {\n\n let number = 10.0_f32;\n\n let larger_number = number.powf(2.5);\n\n dbg!(larger_number);\n\n let pi = std::f64::consts::PI;\n\n dbg!(pi);\n\n}\n", "file_path": "tests/language/methods_on_primitive_types.rs", "rank": 23, "score": 63765.77036145833 }, { "content": "fn add_one(wrapped_number: Rc<RefCell<u32>>) {\n\n let _cloned_number = {\n\n let other_borrowed_number = wrapped_number.borrow();\n\n *other_borrowed_number\n\n };\n\n let mut borrowed_number = wrapped_number.borrow_mut();\n\n *borrowed_number += 1;\n\n}\n", "file_path": "tests/language/interior_mutability.rs", "rank": 24, "score": 59062.76841108105 }, { "content": " pub fn get_component<T: Any>(&self) -> Result<Ref<T>> {\n\n let components = self.extract_components::<T>()?;\n\n let borrowed_component = components[self.id]\n\n .as_ref()\n\n .ok_or(CustomErrors::ComponentDataDoesNotExist)?\n\n .borrow();\n\n Ok(Ref::map(borrowed_component, |any| {\n\n any.downcast_ref::<T>().unwrap()\n\n }))\n\n }\n\n\n\n pub fn get_component_mut<T: Any>(&self) -> Result<RefMut<T>> {\n\n let components = self.extract_components::<T>()?;\n\n let borrowed_component = components[self.id]\n\n .as_ref()\n\n .ok_or(CustomErrors::ComponentDataDoesNotExist)?\n\n .borrow_mut();\n\n Ok(RefMut::map(borrowed_component, |any| {\n\n any.downcast_mut::<T>().unwrap()\n\n }))\n\n }\n\n}\n", "file_path": "src/entities/query_entity.rs", "rank": 25, "score": 53910.10960133278 }, { "content": "use eyre::Result;\n\nuse std::{\n\n any::{Any, TypeId},\n\n cell::{Ref, RefCell, RefMut},\n\n rc::Rc,\n\n};\n\n\n\nuse crate::custom_errors::CustomErrors;\n\n\n\nuse super::Entities;\n\n\n", "file_path": "src/entities/query_entity.rs", "rank": 26, "score": 53909.81138237093 }, { "content": "use std::any::{Any, TypeId};\n\n\n\nuse eyre::Result;\n\n\n\nuse super::{query_entity::QueryEntity, Component, Entities};\n\nuse crate::custom_errors::CustomErrors;\n\n\n\npub type QueryIndexes = Vec<usize>;\n\npub type QueryComponents = Vec<Vec<Component>>;\n\n\n\n#[derive(Debug)]\n\npub struct Query<'a> {\n\n map: u32,\n\n entities: &'a Entities,\n\n type_ids: Vec<TypeId>,\n\n}\n\n\n\nimpl<'a> Query<'a> {\n\n pub fn new(entities: &'a Entities) -> Self {\n\n Self {\n", "file_path": "src/entities/query.rs", "rank": 27, "score": 49409.132257238576 }, { "content": " entities,\n\n map: 0,\n\n type_ids: vec![],\n\n }\n\n }\n\n\n\n pub fn with_component<T: Any>(&mut self) -> Result<&mut Self> {\n\n let type_id = TypeId::of::<T>();\n\n if let Some(bit_mask) = self.entities.get_bitmask(&type_id) {\n\n self.map |= bit_mask;\n\n self.type_ids.push(type_id);\n\n } else {\n\n return Err(CustomErrors::ComponentNotRegistered.into());\n\n }\n\n Ok(self)\n\n }\n\n\n\n pub fn run(&self) -> (QueryIndexes, QueryComponents) {\n\n let indexes: Vec<usize> = self\n\n .entities\n", "file_path": "src/entities/query.rs", "rank": 28, "score": 49405.42042836001 }, { "content": "\n\n#[cfg(test)]\n\nmod test {\n\n use crate::entities::query_entity::QueryEntity;\n\n\n\n use super::*;\n\n use core::f32;\n\n use std::{\n\n cell::{Ref, RefMut},\n\n u32,\n\n };\n\n\n\n #[test]\n\n fn query_mask_updating_with_component() -> Result<()> {\n\n let mut entities = Entities::default();\n\n entities.register_component::<u32>();\n\n entities.register_component::<f32>();\n\n let mut query = Query::new(&entities);\n\n query.with_component::<u32>()?.with_component::<f32>()?;\n\n\n", "file_path": "src/entities/query.rs", "rank": 29, "score": 49402.80496432065 }, { "content": " }\n\n\n\n (indexes, result)\n\n }\n\n\n\n pub fn run_entity(&self) -> Vec<QueryEntity> {\n\n self.entities\n\n .map\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(index, entity_map)| {\n\n if entity_map & self.map == self.map {\n\n Some(QueryEntity::new(index, self.entities))\n\n } else {\n\n None\n\n }\n\n })\n\n .collect()\n\n }\n\n}\n", "file_path": "src/entities/query.rs", "rank": 30, "score": 49400.89210038595 }, { "content": " .map\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(index, entity_map)| {\n\n if entity_map & self.map == self.map {\n\n Some(index)\n\n } else {\n\n None\n\n }\n\n })\n\n .collect();\n\n let mut result = vec![];\n\n\n\n for type_id in &self.type_ids {\n\n let entity_components = self.entities.components.get(type_id).unwrap();\n\n let mut components_to_keep = vec![];\n\n for index in &indexes {\n\n components_to_keep.push(entity_components[*index].as_ref().unwrap().clone());\n\n }\n\n result.push(components_to_keep);\n", "file_path": "src/entities/query.rs", "rank": 31, "score": 49400.53814466363 }, { "content": " // fn get_component_mut<T: Any>(&self, entity_id: usize) -> RefMut<T> {\n\n // RefMut::map(self.components[id].as_ref().unwrap().borrow_mut(), |x| {\n\n // x.downcast_mut::<T>().unwrap()\n\n // })\n\n // }\n\n // ```\n\n\n\n #[test]\n\n fn query_for_entity_ref() -> Result<()> {\n\n let mut entities = Entities::default();\n\n\n\n entities.register_component::<u32>();\n\n entities.register_component::<f32>();\n\n entities.create_entity().with_component(100_u32)?;\n\n entities.create_entity().with_component(10.0_f32)?;\n\n\n\n let mut query = Query::new(&entities);\n\n let entities: Vec<QueryEntity> = query.with_component::<u32>()?.run_entity();\n\n\n\n assert_eq!(entities.len(), 1);\n", "file_path": "src/entities/query.rs", "rank": 32, "score": 49400.01119956057 }, { "content": " entities.create_entity().with_component(10_u32)?;\n\n entities.create_entity();\n\n let mut query = Query::new(&entities);\n\n query.with_component::<u32>()?;\n\n let query_result = query.run();\n\n let u32s = &query_result.1[0];\n\n assert_eq!(u32s.len(), 1);\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn query_after_deleting_entity() -> Result<()> {\n\n let mut entities = Entities::default();\n\n entities.register_component::<u32>();\n\n entities.create_entity().with_component(10_u32)?;\n\n entities.create_entity().with_component(20_u32)?;\n\n entities.delete_entity_by_id(1)?;\n\n let (query_indexes, query_results) = Query::new(&entities).with_component::<u32>()?.run();\n\n assert_eq!(query_indexes.len(), query_results.len());\n\n assert_eq!(query_results[0].len(), 1);\n", "file_path": "src/entities/query.rs", "rank": 33, "score": 49398.82642792762 }, { "content": "\n\n for entity in entities {\n\n assert_eq!(entity.id, 0);\n\n let health: Ref<u32> = entity.get_component::<u32>()?;\n\n assert_eq!(*health, 100);\n\n }\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn query_for_entity_mut() -> Result<()> {\n\n let mut entities = Entities::default();\n\n\n\n entities.register_component::<u32>();\n\n entities.register_component::<f32>();\n\n entities.create_entity().with_component(100_u32)?;\n\n entities.create_entity().with_component(10.0_f32)?;\n\n\n\n let mut query = Query::new(&entities);\n\n let entities: Vec<QueryEntity> = query.with_component::<u32>()?.run_entity();\n", "file_path": "src/entities/query.rs", "rank": 34, "score": 49398.783805178704 }, { "content": " assert_eq!(query.map, 3);\n\n assert_eq!(TypeId::of::<u32>(), query.type_ids[0]);\n\n assert_eq!(TypeId::of::<f32>(), query.type_ids[1]);\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n #[allow(clippy::float_cmp)]\n\n fn run_query() -> Result<()> {\n\n let mut entities = Entities::default();\n\n entities.register_component::<u32>();\n\n entities.register_component::<f32>();\n\n entities\n\n .create_entity()\n\n .with_component(10_u32)?\n\n .with_component(20.0_f32)?;\n\n entities.create_entity().with_component(5_u32)?;\n\n entities.create_entity().with_component(50.0_f32)?;\n\n entities\n", "file_path": "src/entities/query.rs", "rank": 35, "score": 49398.595872364705 }, { "content": "\n\n assert_eq!(entities.len(), 1);\n\n\n\n for entity in entities {\n\n assert_eq!(entity.id, 0);\n\n let mut health: RefMut<u32> = entity.get_component_mut::<u32>()?;\n\n assert_eq!(*health, 100);\n\n *health += 1;\n\n }\n\n\n\n let entities: Vec<QueryEntity> = query.with_component::<u32>()?.run_entity();\n\n for entity in entities {\n\n let health: Ref<u32> = entity.get_component::<u32>()?;\n\n assert_eq!(*health, 101);\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/entities/query.rs", "rank": 36, "score": 49397.97309752255 }, { "content": " .create_entity()\n\n .with_component(15_u32)?\n\n .with_component(25.0_f32)?;\n\n let mut query = Query::new(&entities);\n\n query.with_component::<u32>()?.with_component::<f32>()?;\n\n\n\n let query_result = query.run();\n\n let u32s = &query_result.1[0];\n\n let f32s = &query_result.1[1];\n\n let indexes = &query_result.0;\n\n\n\n assert!(u32s.len() == f32s.len() && u32s.len() == indexes.len());\n\n assert_eq!(u32s.len(), 2);\n\n\n\n let borrowed_first_u32 = u32s[0].borrow();\n\n let first_u32 = borrowed_first_u32.downcast_ref::<u32>().unwrap();\n\n assert_eq!(*first_u32, 10);\n\n\n\n let borrowed_first_f32 = f32s[0].borrow();\n\n let first_f32 = borrowed_first_f32.downcast_ref::<f32>().unwrap();\n", "file_path": "src/entities/query.rs", "rank": 37, "score": 49396.07071222018 }, { "content": " assert_eq!(query_indexes[0], 0);\n\n let borrowed_first_u32 = query_results[0][0].borrow();\n\n let first_u32 = borrowed_first_u32.downcast_ref::<u32>().unwrap();\n\n assert_eq!(*first_u32, 10);\n\n Ok(())\n\n }\n\n\n\n // Suggestion from community member SOS (https://github.com/00sos00)\n\n\n\n // I think i found the best way to write the querying system, so instead of making the Query struct\n\n // return components, we will make it return only entities that have the components we specified,\n\n // and then on the Entities struct we are gonna have these 2 methods\n\n\n\n // ```\n\n // fn get_component<T: Any>(&self, entity_id: usize) -> Ref<T> {\n\n // Ref::map(self.components[id].as_ref().unwrap().borrow(), |x| {\n\n // x.downcast_ref::<T>().unwrap()\n\n // })\n\n // }\n\n\n", "file_path": "src/entities/query.rs", "rank": 38, "score": 49395.477854144156 }, { "content": " assert_eq!(*first_f32, 20.0);\n\n\n\n let borrowed_second_u32 = u32s[1].borrow();\n\n let second_u32 = borrowed_second_u32.downcast_ref::<u32>().unwrap();\n\n assert_eq!(*second_u32, 15);\n\n\n\n let borrowed_second_f32 = f32s[1].borrow();\n\n let second_f32 = borrowed_second_f32.downcast_ref::<f32>().unwrap();\n\n assert_eq!(*second_f32, 25.0);\n\n\n\n assert_eq!(indexes[0], 0);\n\n assert_eq!(indexes[1], 3);\n\n\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn run_query_with_no_components() -> Result<()> {\n\n let mut entities = Entities::default();\n\n entities.register_component::<u32>();\n", "file_path": "src/entities/query.rs", "rank": 39, "score": 49394.24986788699 }, { "content": "fn say_hello(name: NameWithNumberOfExclamations) {\n\n dbg!(name);\n\n}\n", "file_path": "tests/language/type_keyword.rs", "rank": 40, "score": 49084.578284774485 }, { "content": "#[derive(Eq, PartialEq, Hash, Debug)]\n\n#[allow(dead_code)]\n\nenum Letters {\n\n A,\n\n B,\n\n C,\n\n}\n\n\n\n// { key: value }\n", "file_path": "tests/language/hash_maps.rs", "rank": 41, "score": 46634.184782363714 }, { "content": "#[allow(dead_code)]\n\nenum FavoriteFood {\n\n Hamburger,\n\n HotDog,\n\n IceCream {\n\n topping: String,\n\n scoops: u32,\n\n flavor: String,\n\n },\n\n}\n", "file_path": "tests/language/structs.rs", "rank": 42, "score": 46278.86528478164 }, { "content": "fn print_dimensions((x, _): (f32, f32)) {\n\n dbg!(x);\n\n}\n", "file_path": "tests/language/tuples.rs", "rank": 43, "score": 46118.39969441156 }, { "content": "struct FavoriteThings {\n\n thing: Box<dyn Any + 'static>,\n\n}\n\n\n\nimpl FavoriteThings {\n\n pub fn get<T: Any + 'static>(&self) -> Option<&T> {\n\n self.thing.downcast_ref()\n\n }\n\n}\n", "file_path": "tests/language/any.rs", "rank": 44, "score": 45876.29302898218 }, { "content": "struct MyName {\n\n name: String,\n\n}\n", "file_path": "tests/language/options.rs", "rank": 45, "score": 45876.29302898218 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct Vector2 {\n\n pub x: f32,\n\n pub y: f32,\n\n}\n\n\n\nimpl Add for Vector2 {\n\n type Output = Self;\n\n\n\n fn add(self, rhs: Self) -> Self::Output {\n\n Self {\n\n x: self.x + rhs.x,\n\n y: self.y + rhs.y,\n\n }\n\n }\n\n}\n", "file_path": "tests/language/copy_and_clone.rs", "rank": 46, "score": 44624.84910608135 }, { "content": "type NameWithNumberOfExclamations = Option<(String, u8)>;\n\n\n", "file_path": "tests/language/type_keyword.rs", "rank": 47, "score": 42851.28619673097 }, { "content": "#[test]\n\nfn any_trait() {\n\n let u32s = FavoriteThings {\n\n thing: Box::new(10_u32),\n\n };\n\n let _floats = FavoriteThings {\n\n thing: Box::new(50.0_f32),\n\n };\n\n\n\n let extracted_u32 = u32s.get::<u32>().unwrap();\n\n assert_eq!(*extracted_u32, 10);\n\n}\n\n\n", "file_path": "tests/language/any.rs", "rank": 48, "score": 41195.072557815045 }, { "content": "#[test]\n\nfn tuples() {\n\n let dimensions = (100.0, 150.0);\n\n print_dimensions(dimensions);\n\n}\n\n\n", "file_path": "tests/language/tuples.rs", "rank": 49, "score": 39867.87781469262 }, { "content": "#[test]\n\nfn canary_test() {\n\n let first = 5;\n\n let second = 5;\n\n assert_eq!(first, second);\n\n}\n", "file_path": "tests/canary.rs", "rank": 50, "score": 39867.87781469262 }, { "content": "#[test]\n\nfn delete_resource() {\n\n let mut world = initialize_world();\n\n world.delete_resource::<FpsResource>();\n\n let deleted_resource = world.get_resource::<FpsResource>();\n\n assert!(deleted_resource.is_none());\n\n}\n\n\n", "file_path": "tests/resources.rs", "rank": 51, "score": 39867.87781469262 }, { "content": "#[test]\n\nfn generics() {\n\n let type_id = get_type_id::<u32>();\n\n dbg!(type_id);\n\n let type_id = get_type_id::<i32>();\n\n dbg!(type_id);\n\n}\n\n\n", "file_path": "tests/language/generics.rs", "rank": 52, "score": 39867.87781469262 }, { "content": "#[test]\n\nfn printing() {\n\n let cat = Some(\"Xilbe\");\n\n\n\n let _something = dbg!(cat);\n\n}\n", "file_path": "tests/language/printing.rs", "rank": 53, "score": 39867.87781469262 }, { "content": "#[test]\n\nfn options() {\n\n let _name = MyName {\n\n name: String::from(\"Brooks\"),\n\n };\n\n dbg!(hello(None));\n\n}\n\n\n", "file_path": "tests/language/options.rs", "rank": 54, "score": 39867.87781469262 }, { "content": "#[test]\n\nfn boxes() {\n\n let number = 15.0_f32;\n\n let _type_id = get_type_id(Box::new(number));\n\n}\n\n\n", "file_path": "tests/language/boxes.rs", "rank": 55, "score": 39867.87781469262 }, { "content": "#[test]\n\nfn get_resources_mutably() {\n\n let mut world = initialize_world();\n\n {\n\n let fps: &mut FpsResource = world.get_resource_mut::<FpsResource>().unwrap();\n\n fps.0 += 1;\n\n }\n\n let fps = world.get_resource::<FpsResource>().unwrap();\n\n assert_eq!(fps.0, 61);\n\n}\n\n\n", "file_path": "tests/resources.rs", "rank": 56, "score": 38678.11730356331 }, { "content": "#[test]\n\nfn copy_and_clone() {\n\n let player_location = Vector2 { x: 10.0, y: 15.0 };\n\n let player_velocity = Vector2 { x: 1.0, y: 2.0 };\n\n let _new_location = player_location + player_velocity;\n\n let _new_location = player_location + player_velocity;\n\n}\n\n\n", "file_path": "tests/language/copy_and_clone.rs", "rank": 57, "score": 37605.49443334924 }, { "content": "#[test]\n\nfn interior_mutability() {\n\n let number = 0;\n\n let wrapped_number = Rc::new(RefCell::new(number));\n\n dbg!(wrapped_number.clone());\n\n add_one(wrapped_number.clone());\n\n dbg!(wrapped_number);\n\n}\n\n\n", "file_path": "tests/language/interior_mutability.rs", "rank": 58, "score": 37605.49443334924 }, { "content": "#[test]\n\nfn create_and_get_resource_immutably() {\n\n let world = initialize_world();\n\n let fps = world.get_resource::<FpsResource>().unwrap();\n\n assert_eq!(fps.0, 60)\n\n}\n\n\n", "file_path": "tests/resources.rs", "rank": 59, "score": 37605.49443334924 }, { "content": "#[test]\n\nfn results() -> Result<()> {\n\n // this_always_fails()?;\n\n // this_also_fails()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/language/results.rs", "rank": 60, "score": 36771.4873169896 }, { "content": "fn initialize_world() -> World {\n\n let mut world = World::new();\n\n world.add_resource(FpsResource(60));\n\n world\n\n}\n\n\n", "file_path": "tests/resources.rs", "rank": 61, "score": 36771.4873169896 }, { "content": "#[allow(dead_code)]\n\nfn this_also_fails() -> Result<()> {\n\n Err(CustomError::OfCourseItFailed(42).into())\n\n}\n\n\n", "file_path": "tests/language/results.rs", "rank": 62, "score": 35698.86444677554 }, { "content": "#[allow(dead_code)]\n\nfn this_always_fails() -> Result<()> {\n\n Err(CustomError::AlwaysFails.into())\n\n}\n\n\n", "file_path": "tests/language/results.rs", "rank": 63, "score": 35698.86444677554 }, { "content": "#[test]\n\nfn converting_options_to_results() -> Result<()> {\n\n let something = maybe_something().ok_or(CustomError::SomethingWasNothing)?;\n\n assert_eq!(something, 52);\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/language/converting_options_to_results.rs", "rank": 64, "score": 33033.09053401544 }, { "content": "fn hello(name: Option<MyName>) -> String {\n\n let unwrapped_name = name.unwrap_or_else(|| MyName {\n\n name: \"stranger\".to_owned(),\n\n });\n\n format!(\"hello {}\", unwrapped_name.name)\n\n}\n\n\n", "file_path": "tests/language/options.rs", "rank": 65, "score": 30051.605130880387 }, { "content": "mod any;\n\nmod boxes;\n\nmod converting_options_to_results;\n\nmod copy_and_clone;\n\nmod default;\n\nmod filter_map;\n\nmod generics;\n\nmod hash_maps;\n\nmod interior_mutability;\n\nmod methods_on_primitive_types;\n\nmod options;\n\nmod printing;\n\nmod results;\n\nmod structs;\n\nmod tuples;\n\nmod type_id;\n\nmod type_keyword;\n", "file_path": "tests/language/mod.rs", "rank": 66, "score": 26213.98403163033 }, { "content": "#[test]\n", "file_path": "tests/language/default.rs", "rank": 67, "score": 26058.478978852803 }, { "content": "use std::ops::Deref;\n\n\n\n#[test]\n\n\n", "file_path": "tests/language/structs.rs", "rank": 68, "score": 25571.78263941254 }, { "content": "use std::{any::Any, cell::RefCell, rc::Rc};\n\n\n\nuse bbecs_tutorial::World;\n\nuse eyre::Result;\n\n\n\n#[test]\n", "file_path": "tests/entities.rs", "rank": 85, "score": 25449.097200774515 }, { "content": " .query()\n\n .with_component::<Location>()?\n\n .with_component::<Size>()?\n\n .run();\n\n\n\n let locations: &Vec<Rc<RefCell<dyn Any>>> = &query.1[0];\n\n let sizes: &Vec<Rc<RefCell<dyn Any>>> = &query.1[1];\n\n\n\n assert_eq!(locations.len(), sizes.len());\n\n assert_eq!(locations.len(), 2);\n\n\n\n let borrowed_first_location = locations[0].borrow();\n\n let first_location = borrowed_first_location.downcast_ref::<Location>().unwrap();\n\n assert_eq!(first_location.0, 42.0);\n\n let borrowed_first_size = sizes[0].borrow();\n\n let first_size = borrowed_first_size.downcast_ref::<Size>().unwrap();\n\n assert_eq!(first_size.0, 10.0);\n\n\n\n let borrowed_second_location = locations[1].borrow();\n\n let second_location = borrowed_second_location.downcast_ref::<Location>().unwrap();\n\n assert_eq!(second_location.0, 44.0);\n\n let mut borrowed_second_size = sizes[1].borrow_mut();\n\n let second_size = borrowed_second_size.downcast_mut::<Size>().unwrap();\n\n second_size.0 += 1.0;\n\n assert_eq!(second_size.0, 13.0);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/entities.rs", "rank": 86, "score": 25448.65686439357 }, { "content": "use std::collections::HashMap;\n\n\n\n#[test]\n", "file_path": "tests/language/hash_maps.rs", "rank": 87, "score": 24552.938850513252 }, { "content": "#[test]\n", "file_path": "tests/language/filter_map.rs", "rank": 88, "score": 24544.97001616645 }, { "content": "use std::{\n\n any::{Any, TypeId},\n\n collections::HashMap,\n\n vec,\n\n};\n\n\n\n#[test]\n", "file_path": "tests/language/type_id.rs", "rank": 89, "score": 24325.71532310192 }, { "content": "#[derive(Debug, Error)]\n\nenum CustomError {\n\n #[error(\"This always fails, I don't know what you expected\")]\n\n AlwaysFails,\n\n #[error(\"This also always fails, but the value it failed with was {0}\")]\n\n OfCourseItFailed(u32),\n\n}\n", "file_path": "tests/language/results.rs", "rank": 90, "score": 23360.060330100605 }, { "content": "#[test]\n", "file_path": "tests/language/methods_on_primitive_types.rs", "rank": 91, "score": 23054.775790508374 }, { "content": "#[derive(Debug, thiserror::Error)]\n\nenum CustomError {\n\n #[error(\"Something wasn't really something, it turned out to be nothing\")]\n\n SomethingWasNothing,\n\n}\n", "file_path": "tests/language/converting_options_to_results.rs", "rank": 92, "score": 21168.056754227753 }, { "content": "use std::any::Any;\n\n\n\nuse entities::{query::Query, Entities};\n\nuse eyre::Result;\n\nuse resource::Resource;\n\n\n\npub mod custom_errors;\n\nmod entities;\n\nmod resource;\n\n\n\n#[derive(Default, Debug)]\n\npub struct World {\n\n resources: Resource,\n\n entities: Entities,\n\n}\n\n\n\nimpl World {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n", "file_path": "src/lib.rs", "rank": 93, "score": 18.322598903140744 }, { "content": "\n\n pub fn delete_component_by_entity_id<T: Any>(&mut self, index: usize) -> Result<()> {\n\n self.entities.delete_component_by_entity_id::<T>(index)\n\n }\n\n\n\n pub fn add_component_to_entity_by_id(&mut self, data: impl Any, index: usize) -> Result<()> {\n\n self.entities.add_component_by_entity_id(data, index)\n\n }\n\n\n\n pub fn delete_entity_by_id(&mut self, index: usize) -> Result<()> {\n\n self.entities.delete_entity_by_id(index)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {}\n", "file_path": "src/lib.rs", "rank": 94, "score": 18.183129608566627 }, { "content": "use std::{\n\n any::{Any, TypeId},\n\n collections::HashMap,\n\n};\n\n\n\n#[derive(Default, Debug)]\n\npub struct Resource {\n\n data: HashMap<TypeId, Box<dyn Any>>,\n\n}\n\n\n\nimpl Resource {\n\n pub fn add(&mut self, data: impl Any) {\n\n let type_id = data.type_id();\n\n self.data.insert(type_id, Box::new(data));\n\n }\n\n\n\n pub fn get_ref<T: Any>(&self) -> Option<&T> {\n\n let type_id = TypeId::of::<T>();\n\n if let Some(data) = self.data.get(&type_id) {\n\n data.downcast_ref()\n", "file_path": "src/resource.rs", "rank": 95, "score": 14.66307494077035 }, { "content": " }\n\n\n\n /**\n\n This will remove the resource from the world, and it doesn't care if the resource exists at this point in time.\n\n */\n\n pub fn delete_resource<T: Any>(&mut self) {\n\n self.resources.remove::<T>();\n\n }\n\n\n\n pub fn register_component<T: Any + 'static>(&mut self) {\n\n self.entities.register_component::<T>();\n\n }\n\n\n\n pub fn create_entity(&mut self) -> &mut Entities {\n\n self.entities.create_entity()\n\n }\n\n\n\n pub fn query(&self) -> Query {\n\n Query::new(&self.entities)\n\n }\n", "file_path": "src/lib.rs", "rank": 96, "score": 11.945949783058712 }, { "content": " } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn get_mut<T: Any>(&mut self) -> Option<&mut T> {\n\n let type_id = TypeId::of::<T>();\n\n if let Some(data) = self.data.get_mut(&type_id) {\n\n data.downcast_mut()\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn remove<T: Any>(&mut self) {\n\n let type_id = TypeId::of::<T>();\n\n self.data.remove(&type_id);\n\n }\n\n}\n\n\n", "file_path": "src/resource.rs", "rank": 97, "score": 10.11411639208593 }, { "content": "use thiserror::Error;\n\n\n\n#[derive(Debug, Error)]\n\npub enum CustomErrors {\n\n #[error(\"Attempting to add component to an entity without calling create component first\")]\n\n CreateComponentNeverCalled,\n\n #[error(\"attempting to reference a component that wasn't registered\")]\n\n ComponentNotRegistered,\n\n #[error(\"attempting to reference an entity that doesn't exist\")]\n\n EntityDoesNotExist,\n\n #[error(\"attempting to get component data that does not exist\")]\n\n ComponentDataDoesNotExist,\n\n #[error(\"attempting to downcast to the wrong type\")]\n\n DowncastToWrongType,\n\n}\n", "file_path": "src/custom_errors.rs", "rank": 98, "score": 9.169948013133363 }, { "content": "\n\n /// Add a resource to the world so that anyone with access to the world can query for it immutably or mutably. Generally resources are pieces of data that are not associated with individual entities. An example of a resource could be the average fps.\n\n ///\n\n /// Resources are stored based on their type id, so we could store one u32 resource. If we want to store more u32's then we can wrap the data in a tuple struct. See the [integration tests](https://github.com/brooks-builds/bbecs_tutorial/blob/main/tests/resources.rs) for an example.\n\n ///\n\n /// ```\n\n /// use bbecs_tutorial::World;\n\n /// let mut world = World::new();\n\n /// world.add_resource(10_u32);\n\n /// ```\n\n pub fn add_resource(&mut self, resource_data: impl Any) {\n\n self.resources.add(resource_data);\n\n }\n\n\n\n /// Query for a resource and get a reference to it. The type of the resource must be added in so that we can find it.\n\n /// ```\n\n /// use bbecs_tutorial::World;\n\n /// let mut world = World::new();\n\n /// world.add_resource(10_u32);\n\n /// let resource = world.get_resource::<u32>().unwrap();\n", "file_path": "src/lib.rs", "rank": 99, "score": 8.95794072418495 } ]
Rust
src/main.rs
detedetedetedete/DictWalker
81ebbf52a0e124172b7697a1f8ee0748f98ed60e
extern crate serde_json; extern crate encoding; extern crate fern; extern crate clap; extern crate chrono; extern crate regex; #[macro_use] extern crate serde_derive; #[macro_use] extern crate log; #[macro_use] extern crate lazy_static; extern crate serde; extern crate libc; use dict_entry::DictEntry; use std::path::Path; use cli_api::get_args; use std::collections::HashSet; use std::iter::FromIterator; use std::fs::File; use std::io::Write; use training_entry::TrainingEntry; use phoneme_resolvers::DeadEndPhonemeResolver; use phoneme_resolvers::DictionaryPhonemeResolver; use phoneme_resolvers::PhonemeResolver; use phoneme_resolvers::MarkerPhonemeResolver; use phoneme_resolvers::DummyPhonemeResolver; use phoneme_resolvers::TensorflowPhonemeResolver; mod decode; mod dict_entry; mod cli_api; mod logging; mod training_entry; mod phonemes; mod phoneme_resolvers; mod model_def; mod model_runner; fn main() { let matches = get_args(); let dictionary = matches.value_of("dictionary").unwrap(); let mut output_file = match File::create(matches.value_of("output").unwrap()) { Ok(v) => v, Err(e) => { error!("Cannot open file \"{}\" for writing: {}", matches.value_of("output").unwrap(), e); panic!(); } }; let mut phoneme_resolvers: Vec<Box<PhonemeResolver>> = vec![ match matches.value_of("phoneme dictionary") { Some(path) => { match DictionaryPhonemeResolver::load(Path::new(path)) { Ok(v) => Box::new(v), Err(e) => { error!("Failed to instantiate DictionaryPhonemeResolver: \"{}\"", e); panic!(); } } }, None => Box::new(DummyPhonemeResolver::new()) }, match matches.value_of("Seq2Seq model folder") { Some(path) => { let model_folder_path = Path::new(path); match TensorflowPhonemeResolver::load(model_folder_path) { Ok(v) => Box::new(v), Err(e) => { error!("Failed to instantiate DictionaryPhonemeResolver: \"{}\"", e); panic!(); } } }, None => Box::new(DummyPhonemeResolver::new()) }, Box::new(MarkerPhonemeResolver::new()), Box::new(DeadEndPhonemeResolver::new()) ]; let mut entries = match DictEntry::collect_entries( Path::new(dictionary), &HashSet::from_iter(matches.value_of("audio extensions").unwrap().split(",").map(|v| String::from(v))), &HashSet::from_iter(matches.value_of("text extensions").unwrap().split(",").map(|v| String::from(v))) ) { Ok(v) => v, Err(e) => panic!("Failed to collect entries: {:?}", e) }; let t_entries: Vec<TrainingEntry> = entries .drain(0..) .map(|v| TrainingEntry::construct(v, &mut phoneme_resolvers)) .collect(); let json = match serde_json::to_string_pretty(&t_entries) { Ok(v) => v, Err(e) => { error!("Cannot serialize processed entries to JSON: {}", e); panic!(); } }; match output_file.write_all(json.as_bytes()) { Err(e) => { error!("Error during write to file {:?}: {}", output_file, e); panic!(); }, _ => () }; info!("Done."); }
extern crate serde_json; extern crate encoding; extern crate fern; extern crate clap; extern crate chrono; extern crate regex; #[macro_use] extern crate serde_derive; #[macro_use] extern crate log; #[macro_use] extern crate lazy_static; extern crate serde; extern crate libc; use dict_entry::DictEntry; use std::path::Path; use cli_api::get_args; use std::collections::HashSet; use std::iter::FromIterator; use std::fs::File; use std::io::Write; use training_entry::TrainingEntry; use phoneme_resolvers::DeadEndPhonemeResolver; use phoneme_resolvers::DictionaryPhonemeResolver; use phoneme_resolvers::PhonemeResolver; use phoneme_resolvers::MarkerPhonemeResolver; use phoneme_resolvers::DummyPhonemeResolver; use phoneme_resolvers::TensorflowPhonemeResolver; mod decode; mod dict_entry; mod cli_api; mod logging; mod training_entry; mod phonemes; mod phoneme_resolvers; mod model_def; mod model_runner; fn main() { let matches = get_args(); let dictionary = matches.value_of("dictionary").unwrap(); let mut output_file = match File::create(matches.value_of("output").unwrap()) { Ok(v) => v, Err(e) => { error!("Cannot open file \"{}\" for writing: {}", matches.value_of("output").unwrap(), e); panic!(); } }; let mut phoneme_resolvers: Vec<Box<PhonemeResolver>> = vec![ match matches.value_of("phoneme dictionary") { Some(path) => { match DictionaryPhonemeResolver::load(Path::new(path)) { Ok(v) => Box::new(v), Err(e) => { error!("Failed to instantiate DictionaryPhonemeResolver: \"{}\"", e); panic!(); } } }, None => Box::new(DummyPhonemeResolver::new()) }, match matches.value_of("Seq2Seq model folder") { Some(path) => { let model_folder_path = Path::new(path); match TensorflowPhonemeResolver::load(model_folder_path) { Ok(v) => Box::new(v), Err(e) => { error!("Failed to instantiate DictionaryPhonemeResolver: \"{}\"", e); panic!(); } } }, None => Box::new(DummyPhonemeResolver::new()) }, Box::new(MarkerPhonemeResolver::new())
in(0..) .map(|v| TrainingEntry::construct(v, &mut phoneme_resolvers)) .collect(); let json = match serde_json::to_string_pretty(&t_entries) { Ok(v) => v, Err(e) => { error!("Cannot serialize processed entries to JSON: {}", e); panic!(); } }; match output_file.write_all(json.as_bytes()) { Err(e) => { error!("Error during write to file {:?}: {}", output_file, e); panic!(); }, _ => () }; info!("Done."); }
, Box::new(DeadEndPhonemeResolver::new()) ]; let mut entries = match DictEntry::collect_entries( Path::new(dictionary), &HashSet::from_iter(matches.value_of("audio extensions").unwrap().split(",").map(|v| String::from(v))), &HashSet::from_iter(matches.value_of("text extensions").unwrap().split(",").map(|v| String::from(v))) ) { Ok(v) => v, Err(e) => panic!("Failed to collect entries: {:?}", e) }; let t_entries: Vec<TrainingEntry> = entries .dra
function_block-random_span
[ { "content": "fn serialize_phoneme_vec<S>(vec: &Vec<Phoneme>, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer {\n\n let mut str = String::new();\n\n for ph in vec {\n\n str.push_str(&ph.to_string());\n\n }\n\n serializer.serialize_str(&str)\n\n}\n\n\n\nimpl TrainingEntry {\n\n //TODO: perform some postprocessing on the transcript\n\n // --encoding errors:\n\n // remove \\u{feff}\n\n // replace \\u{9a} with ž\n\n // remove \\u{1f}\n\n\n\n // --errors:\n\n // _centrai centrai - error in transcript, included twice\n\n // indais _dais - fix detached accent\n\n // _is kvepimas - fix misspelled marker\n\n // _puslpais - spelling error, fix to _puslapis\n", "file_path": "src/training_entry.rs", "rank": 1, "score": 65484.04454243701 }, { "content": "pub fn setup_logger(level: LevelFilter) -> Result<(), fern::InitError> {\n\n\n\n let colors = ColoredLevelConfig::new()\n\n .trace(Color::White)\n\n .debug(Color::Yellow)\n\n .info(Color::Green)\n\n .warn(Color::BrightMagenta)\n\n .error(Color::Red);\n\n\n\n fern::Dispatch::new()\n\n .format(move |out, message, record| {\n\n out.finish(format_args!(\n\n \"{}[{}][{}] {}\",\n\n chrono::Local::now().format(\"[%Y-%m-%d][%H:%M:%S]\"),\n\n record.target(),\n\n colors.color(record.level()),\n\n message\n\n ))\n\n })\n\n .level(level)\n\n .chain(std::io::stdout())\n\n .apply()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/logging.rs", "rank": 2, "score": 61987.496067528235 }, { "content": "pub fn decode_windows_1257(bytes: &[u8]) -> Result<String, String> {\n\n match WINDOWS_1257.decode(bytes, DecoderTrap::Strict) {\n\n Ok(v) => Ok(v),\n\n Err(e) => return Err(format!(\"Failed to read bytes as Windows 1257: \\\"{}\\\"\", e))\n\n }\n\n}\n\n\n", "file_path": "src/decode.rs", "rank": 3, "score": 47226.41341373668 }, { "content": "pub fn decode_utf16_be(bytes: &[u8]) -> Result<String, String> {\n\n match UTF_16BE.decode(bytes, DecoderTrap::Strict) {\n\n Ok(v) => Ok(v),\n\n Err(e) => return Err(format!(\"Failed to read bytes as UTF_16BE: \\\"{}\\\"\", e))\n\n }\n\n}", "file_path": "src/decode.rs", "rank": 4, "score": 47226.41341373668 }, { "content": "pub fn decode_utf16_le(bytes: &[u8]) -> Result<String, String> {\n\n match UTF_16LE.decode(bytes, DecoderTrap::Strict) {\n\n Ok(v) => Ok(v),\n\n Err(e) => return Err(format!(\"Failed to read bytes as UTF_16LE: \\\"{}\\\"\", e))\n\n }\n\n}\n\n\n", "file_path": "src/decode.rs", "rank": 5, "score": 45891.39988546824 }, { "content": "pub fn get_args() -> ArgMatches<'static> {\n\n let matches = App::new(\"Audio dictionary walker\")\n\n .version(\"0.1.0\")\n\n .author(\"DT <[email protected]>\")\n\n .about(\"Walks the text <-> audio dictionary and produces a JSON with phonemes\")\n\n .arg(\n\n Arg::with_name(\"dictionary\")\n\n .short(\"i\")\n\n .long(\"dictionary\")\n\n .value_name(\"DIRECTORY\")\n\n .help(\"Path to the text <-> audio dictionary\")\n\n .takes_value(true)\n\n .required(true)\n\n .validator(valid_dir)\n\n )\n\n .arg(\n\n Arg::with_name(\"output\")\n\n .short(\"o\")\n\n .long(\"output\")\n\n .value_name(\"FILE\")\n", "file_path": "src/cli_api.rs", "rank": 6, "score": 44147.76485080278 }, { "content": "fn valid_dest_file(path_str: String) -> Result<(), String> {\n\n let path = Path::new(&path_str);\n\n if let Some(parent) = path.parent() {\n\n if parent.exists() {\n\n Ok(())\n\n } else {\n\n Err(format!(\"Directory to file \\\"{}\\\" does not exist.\", path_str))\n\n }\n\n\n\n } else {\n\n Err(format!(\"Cannot resolve parent directory of \\\"{}\\\".\", path_str))\n\n }\n\n}\n\n\n", "file_path": "src/cli_api.rs", "rank": 7, "score": 40329.045405459205 }, { "content": "fn valid_input_file(path_str: String) -> Result<(), String> {\n\n let path = Path::new(&path_str);\n\n if path.is_file() {\n\n Ok(())\n\n } else {\n\n Err(format!(\"Path {:?} does not exist or is not a file.\", path))\n\n }\n\n}\n\n\n", "file_path": "src/cli_api.rs", "rank": 8, "score": 40329.045405459205 }, { "content": "fn valid_s2s_model(path_str: String) -> Result<(), String> {\n\n valid_dir(path_str.clone())?;\n\n valid_input_file(format!(\"{}/model.json\", path_str))?;\n\n valid_input_file(format!(\"{}/encoder_inference_model.pb\", path_str))?;\n\n valid_input_file(format!(\"{}/decoder_inference_model.pb\", path_str))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli_api.rs", "rank": 9, "score": 40277.055086025444 }, { "content": "fn valid_level(log_level_str: String) -> Result<(), String> {\n\n match level_from_string(log_level_str.as_ref()) {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(e)\n\n }\n\n}\n\n\n", "file_path": "src/cli_api.rs", "rank": 10, "score": 40225.58409310037 }, { "content": "pub fn level_from_string(level_str: &str) -> Result<LevelFilter, String> {\n\n match level_str.to_lowercase().as_ref() {\n\n \"trace\" => Ok(LevelFilter::Trace),\n\n \"debug\" => Ok(LevelFilter::Debug),\n\n \"info\" => Ok(LevelFilter::Info),\n\n \"warn\" => Ok(LevelFilter::Warn),\n\n \"error\" => Ok(LevelFilter::Error),\n\n _ => Err(format!(\"\\\"{}\\\" is not a valid LevelFilter value.\", level_str))\n\n }\n\n}", "file_path": "src/logging.rs", "rank": 11, "score": 37444.920388727216 }, { "content": "fn extension_list_valid(extension: String) -> Result<(), String> {\n\n lazy_static! {\n\n static ref ext_list_re: Regex = Regex::new(r\"[a-zA-Z0-9,]\").unwrap();\n\n }\n\n match ext_list_re.is_match(extension.as_ref()) {\n\n true => Ok(()),\n\n false => Err(format!(\"\\\"{}\\\" is not a valid extension list.\", extension))\n\n }\n\n}\n\n\n", "file_path": "src/cli_api.rs", "rank": 12, "score": 25359.568831405064 }, { "content": "fn valid_dir(path_str: String) -> Result<(), String> {\n\n let path = Path::new(&path_str);\n\n if path.is_dir() {\n\n Ok(())\n\n } else {\n\n Err(format!(\"Path \\\"{}\\\" does not exist or is not a directory.\", path_str))\n\n }\n\n}\n\n\n", "file_path": "src/cli_api.rs", "rank": 13, "score": 25359.568831405064 }, { "content": "use encoding::all::WINDOWS_1257;\n\nuse encoding::all::UTF_16BE;\n\nuse encoding::all::UTF_16LE;\n\nuse encoding::DecoderTrap;\n\nuse encoding::types::Encoding;\n\n\n", "file_path": "src/decode.rs", "rank": 14, "score": 24466.705130992992 }, { "content": "use log::LevelFilter;\n\nuse fern::colors::ColoredLevelConfig;\n\nuse fern::colors::Color;\n\n\n", "file_path": "src/logging.rs", "rank": 15, "score": 24466.06998431116 }, { "content": "pub trait PhonemeResolver {\n\n fn resolve(&self, graphemes: &str) -> Option<Vec<Phoneme>>;\n\n}\n\n\n\npub struct DictionaryPhonemeResolver {\n\n dict: HashMap<String, Vec<Phoneme>>\n\n}\n\n\n\nimpl DictionaryPhonemeResolver {\n\n pub fn load(path: &Path) -> io::Result<DictionaryPhonemeResolver> {\n\n let mut file = File::open(path)?;\n\n let mut dict_str = String::new();\n\n file.read_to_string(&mut dict_str)?;\n\n\n\n let mut dict: HashMap<String, Vec<Phoneme>> = HashMap::new();\n\n\n\n lazy_static! {\n\n static ref postfix_accent_re: Regex = Regex::new(r\"^(?P<graph>[^ ]+) +(?P<accent>.+)$\").unwrap(); // TODO fix var and named cap group names\n\n }\n\n\n", "file_path": "src/phoneme_resolvers.rs", "rank": 21, "score": 23942.44004604205 }, { "content": "use libc::c_void;\n\nuse libc::c_char;\n\nuse libc::size_t;\n\n\n\n#[link(name = \"model_runner\")]\n\nextern {\n\n pub fn getModelRunnerInstance(model_path: *const c_char) -> *const c_void;\n\n pub fn deleteModelRunnerInstance(ptr: *const c_void);\n\n pub fn modelRunnerInfer(ptr: *const c_void, input: *const *const c_char, input_n: size_t, result: *mut *const *const c_char, result_n: *mut size_t, max_len: size_t);\n\n}", "file_path": "src/model_runner.rs", "rank": 22, "score": 23070.17726388995 }, { "content": "#[derive(Serialize, Deserialize)]\n\npub struct ModelDef {\n\n pub name: String,\n\n pub in_tokens: Vec<String>,\n\n pub out_tokens: Vec<String>,\n\n pub max_in_length: usize,\n\n pub max_out_length: usize\n\n}", "file_path": "src/model_def.rs", "rank": 23, "score": 23062.747289903054 }, { "content": "use std::fmt;\n\nuse std::fmt::Formatter;\n\nuse std::fmt::Error;\n\nuse std::str::FromStr;\n\nuse serde::Serializer;\n\n\n\n#[derive(Debug)]\n\npub struct Phoneme {\n\n pub symbol: String,\n\n pub ordinal: i8,\n\n pub accented: bool,\n\n pub valid: bool\n\n}\n\n\n\nimpl Phoneme {\n\n pub fn from_symbol(symbol: &str, accent: bool) -> Phoneme {\n\n match symbol {\n\n \" \" => Phoneme::from_symbol(\"[PAUSE]\", accent),\n\n \"A\" => Phoneme {\n\n symbol: String::from(\"A\"),\n", "file_path": "src/phonemes.rs", "rank": 24, "score": 21897.744497413838 }, { "content": " Ok(Phoneme::from_symbol(s, false))\n\n }\n\n}\n\n\n\nimpl fmt::Display for Phoneme {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {\n\n if self.ordinal == 0 {\n\n write!(f, \"{}\", self.symbol)\n\n } else if self.accented {\n\n write!(f, \"{{{}}}\", self.symbol)\n\n } else {\n\n write!(f, \"[{}]\", self.symbol)\n\n }\n\n }\n\n}\n\n\n\nimpl Clone for Phoneme {\n\n fn clone(&self) -> Self {\n\n Phoneme {\n\n symbol: self.symbol.clone(),\n", "file_path": "src/phonemes.rs", "rank": 25, "score": 21896.822790285874 }, { "content": " \"DZ\" => Phoneme {\n\n symbol: String::from(\"DZ\"),\n\n ordinal: 8,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"DZ2\" => Phoneme {\n\n symbol: String::from(\"DZ2\"),\n\n ordinal: 9,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"E\" => Phoneme {\n\n symbol: String::from(\"E\"),\n\n ordinal: 10,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"E_\" => Phoneme {\n\n symbol: String::from(\"E_\"),\n", "file_path": "src/phonemes.rs", "rank": 26, "score": 21895.034848205294 }, { "content": " \"[NOISE]\" => Phoneme {\n\n symbol: String::from(\"NOISE\"),\n\n ordinal: 48,\n\n accented: accent,\n\n valid: true\n\n },\n\n _ => Phoneme {\n\n symbol: format!(\"ERR-{}\", symbol),\n\n ordinal: -1,\n\n accented: accent,\n\n valid: false\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for Phoneme {\n\n type Err = String;\n\n\n\n fn from_str(s: &str) -> Result<Self, <Self as FromStr>::Err> {\n", "file_path": "src/phonemes.rs", "rank": 27, "score": 21894.594215951034 }, { "content": " ordinal: 31,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"U\" => Phoneme {\n\n symbol: String::from(\"U\"),\n\n ordinal: 32,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"U_\" => Phoneme {\n\n symbol: String::from(\"U_\"),\n\n ordinal: 33,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"V\" => Phoneme {\n\n symbol: String::from(\"V\"),\n\n ordinal: 34,\n\n accented: accent,\n", "file_path": "src/phonemes.rs", "rank": 28, "score": 21894.339804245195 }, { "content": " ordinal: self.ordinal,\n\n accented: self.accented,\n\n valid: self.valid\n\n }\n\n }\n\n}\n\n\n\nimpl serde::Serialize for Phoneme {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where\n\n S: Serializer {\n\n serializer.serialize_str(&self.to_string())\n\n }\n\n}\n", "file_path": "src/phonemes.rs", "rank": 29, "score": 21893.120889920996 }, { "content": " \"R\" => Phoneme {\n\n symbol: String::from(\"R\"),\n\n ordinal: 28,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"S\" => Phoneme {\n\n symbol: String::from(\"S\"),\n\n ordinal: 29,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"S2\" => Phoneme {\n\n symbol: String::from(\"S2\"),\n\n ordinal: 30,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"T\" => Phoneme {\n\n symbol: String::from(\"T\"),\n", "file_path": "src/phonemes.rs", "rank": 30, "score": 21891.868845750916 }, { "content": " \"[INHALE]\" => Phoneme {\n\n symbol: String::from(\"INHALE\"),\n\n ordinal: 38,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"[EXHALE]\" => Phoneme {\n\n symbol: String::from(\"EXHALE\"),\n\n ordinal: 39,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"[SWALLOW]\" => Phoneme {\n\n symbol: String::from(\"SWALLOW\"),\n\n ordinal: 40,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"[SMACK]\" => Phoneme {\n\n symbol: String::from(\"SMACK\"),\n", "file_path": "src/phonemes.rs", "rank": 31, "score": 21891.868845750916 }, { "content": " \"IO_\" => Phoneme {\n\n symbol: String::from(\"IO_\"),\n\n ordinal: 18,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"IU\" => Phoneme {\n\n symbol: String::from(\"IU\"),\n\n ordinal: 19,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"IU_\" => Phoneme {\n\n symbol: String::from(\"IU_\"),\n\n ordinal: 20,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"J.\" => Phoneme {\n\n symbol: String::from(\"J.\"),\n", "file_path": "src/phonemes.rs", "rank": 32, "score": 21891.868845750916 }, { "content": " valid: true\n\n },\n\n \"H\" => Phoneme {\n\n symbol: String::from(\"H\"),\n\n ordinal: 15,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"I\" => Phoneme {\n\n symbol: String::from(\"I\"),\n\n ordinal: 16,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"I_\" => Phoneme {\n\n symbol: String::from(\"I_\"),\n\n ordinal: 17,\n\n accented: accent,\n\n valid: true\n\n },\n", "file_path": "src/phonemes.rs", "rank": 33, "score": 21891.794060159973 }, { "content": " ordinal: 1,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"A_\" => Phoneme {\n\n symbol: String::from(\"A_\"),\n\n ordinal: 2,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"B\" => Phoneme {\n\n symbol: String::from(\"B\"),\n\n ordinal: 3,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"C\" => Phoneme {\n\n symbol: String::from(\"C\"),\n\n ordinal: 4,\n\n accented: accent,\n", "file_path": "src/phonemes.rs", "rank": 34, "score": 21891.748919806814 }, { "content": " valid: true\n\n },\n\n \"C2\" => Phoneme {\n\n symbol: String::from(\"C2\"),\n\n ordinal: 5,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"CH\" => Phoneme {\n\n symbol: String::from(\"CH\"),\n\n ordinal: 6,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"D\" => Phoneme {\n\n symbol: String::from(\"D\"),\n\n ordinal: 7,\n\n accented: accent,\n\n valid: true\n\n },\n", "file_path": "src/phonemes.rs", "rank": 35, "score": 21891.734241665574 }, { "content": " valid: true\n\n },\n\n \"N\" => Phoneme {\n\n symbol: String::from(\"N\"),\n\n ordinal: 25,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"O_\" => Phoneme {\n\n symbol: String::from(\"O_\"),\n\n ordinal: 26,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"P\" => Phoneme {\n\n symbol: String::from(\"P\"),\n\n ordinal: 27,\n\n accented: accent,\n\n valid: true\n\n },\n", "file_path": "src/phonemes.rs", "rank": 36, "score": 21891.734241665574 }, { "content": " valid: true\n\n },\n\n \"Z\" => Phoneme {\n\n symbol: String::from(\"Z\"),\n\n ordinal: 35,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"Z2\" => Phoneme {\n\n symbol: String::from(\"Z2\"),\n\n ordinal: 36,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"[PAUSE]\" => Phoneme {\n\n symbol: String::from(\"PAUSE\"),\n\n ordinal: 37,\n\n accented: accent,\n\n valid: true\n\n },\n", "file_path": "src/phonemes.rs", "rank": 37, "score": 21891.734241665574 }, { "content": " valid: true\n\n },\n\n \"[DOOR]\" => Phoneme {\n\n symbol: String::from(\"DOOR\"),\n\n ordinal: 45,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"[EH]\" => Phoneme {\n\n symbol: String::from(\"EH\"),\n\n ordinal: 46,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"[MIDWORDPAUSE]\" => Phoneme {\n\n symbol: String::from(\"MIDWORDPAUSE\"),\n\n ordinal: 47,\n\n accented: accent,\n\n valid: true\n\n },\n", "file_path": "src/phonemes.rs", "rank": 38, "score": 21891.734241665574 }, { "content": " ordinal: 41,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"[CHAIR]\" => Phoneme {\n\n symbol: String::from(\"CHAIR\"),\n\n ordinal: 42,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"[STOMACH]\" => Phoneme {\n\n symbol: String::from(\"STOMACH\"),\n\n ordinal: 43,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"[PAGE]\" => Phoneme {\n\n symbol: String::from(\"PAGE\"),\n\n ordinal: 44,\n\n accented: accent,\n", "file_path": "src/phonemes.rs", "rank": 39, "score": 21891.719742235466 }, { "content": " ordinal: 11,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"E3_\" => Phoneme {\n\n symbol: String::from(\"E3_\"),\n\n ordinal: 12,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"F\" => Phoneme {\n\n symbol: String::from(\"F\"),\n\n ordinal: 13,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"G\" => Phoneme {\n\n symbol: String::from(\"G\"),\n\n ordinal: 14,\n\n accented: accent,\n", "file_path": "src/phonemes.rs", "rank": 40, "score": 21891.719742235466 }, { "content": " ordinal: 21,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"K\" => Phoneme {\n\n symbol: String::from(\"K\"),\n\n ordinal: 22,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"L\" => Phoneme {\n\n symbol: String::from(\"L\"),\n\n ordinal: 23,\n\n accented: accent,\n\n valid: true\n\n },\n\n \"M\" => Phoneme {\n\n symbol: String::from(\"M\"),\n\n ordinal: 24,\n\n accented: accent,\n", "file_path": "src/phonemes.rs", "rank": 41, "score": 21891.719742235466 }, { "content": "\n\nimpl PhonemeResolver for DummyPhonemeResolver {\n\n fn resolve(&self, _graphemes: &str) -> Option<Vec<Phoneme>> {\n\n None\n\n }\n\n}\n\n\n\npub struct TensorflowPhonemeResolver {\n\n ptr: *const c_void,\n\n allowed_tokens: HashSet<String>\n\n}\n\n\n\nimpl TensorflowPhonemeResolver {\n\n pub fn load(model_folder_path: &Path) -> Result<TensorflowPhonemeResolver, Box<dyn Error>> {\n\n let model_def: ModelDef = serde_json::from_reader(\n\n File::open(model_folder_path.join(\"model.json\")).unwrap()\n\n ).unwrap();\n\n let path: CString = CString::new(model_folder_path.as_os_str().to_str().unwrap())?;\n\n Ok(TensorflowPhonemeResolver {\n\n ptr: unsafe { model_runner::getModelRunnerInstance(path.as_ptr()) },\n", "file_path": "src/phoneme_resolvers.rs", "rank": 42, "score": 20582.12048829084 }, { "content": "use phonemes::Phoneme;\n\nuse std::collections::HashMap;\n\nuse std::path::Path;\n\nuse std::fs::File;\n\nuse std::io;\n\nuse std::io::Read;\n\nuse regex::Regex;\n\nuse std::str::FromStr;\n\nuse std::error::Error;\n\nuse libc::c_void;\n\nuse libc::c_char;\n\nuse libc::size_t;\n\nuse std::ffi::CString;\n\nuse model_runner;\n\nuse std::ffi::CStr;\n\nuse std::collections::HashSet;\n\nuse model_def::ModelDef;\n\nuse std::iter::FromIterator;\n\n\n", "file_path": "src/phoneme_resolvers.rs", "rank": 43, "score": 20581.823275169307 }, { "content": " allowed_tokens: HashSet::from_iter(model_def.in_tokens)\n\n })\n\n }\n\n}\n\n\n\nimpl Drop for TensorflowPhonemeResolver {\n\n fn drop(&mut self) {\n\n unsafe {\n\n model_runner::deleteModelRunnerInstance(self.ptr)\n\n }\n\n }\n\n}\n\n\n\nimpl PhonemeResolver for TensorflowPhonemeResolver {\n\n fn resolve(&self, graphemes: &str) -> Option<Vec<Phoneme>> {\n\n if graphemes.contains(\"[midwordpause]\") {\n\n let mut result: Vec<Phoneme> = vec![];\n\n for (idx, part) in graphemes.split(\"[midwordpause]\").enumerate() {\n\n match self.resolve(part) {\n\n Some(mut r) => {\n", "file_path": "src/phoneme_resolvers.rs", "rank": 44, "score": 20580.118898479093 }, { "content": " dict\n\n })\n\n }\n\n}\n\n\n\nimpl PhonemeResolver for DictionaryPhonemeResolver {\n\n fn resolve(&self, graphemes: &str) -> Option<Vec<Phoneme>> {\n\n match self.dict.get(graphemes) {\n\n Some(v) => Some(v.clone().to_vec()),\n\n None => None\n\n }\n\n }\n\n}\n\n\n\npub struct MarkerPhonemeResolver {}\n\n\n\nimpl MarkerPhonemeResolver {\n\n pub fn new() -> MarkerPhonemeResolver {\n\n MarkerPhonemeResolver{}\n\n }\n", "file_path": "src/phoneme_resolvers.rs", "rank": 45, "score": 20579.723734678268 }, { "content": " if idx != 0 {\n\n result.push(Phoneme::from_str(\"[MIDWORDPAUSE]\").unwrap());\n\n }\n\n r.drain(0..).for_each(|v|result.push(v));\n\n },\n\n None => return None\n\n }\n\n }\n\n return Some(result);\n\n }\n\n\n\n let mut phonemes: Vec<String> = Vec::new();\n\n let mut _graphemes: Vec<CString> = Vec::new();\n\n let mut last = 0;\n\n for c in graphemes.chars() {\n\n let len = c.len_utf8();\n\n let slice = &graphemes[last..last+len];\n\n if !self.allowed_tokens.contains(slice) {\n\n return None;\n\n }\n", "file_path": "src/phoneme_resolvers.rs", "rank": 46, "score": 20578.77380315073 }, { "content": " for entry in dict_str.split(\"\\n\") {\n\n let caps = match postfix_accent_re.captures(entry) {\n\n Some(v) => v,\n\n None => {\n\n warn!(\"Cannot parse dictionary line \\\"{}\\\" as a dictionary entry\", entry);\n\n continue;\n\n }\n\n };\n\n\n\n let gr = caps.get(1).unwrap().as_str();\n\n let ph: Vec<Phoneme> = caps.get(2).unwrap()\n\n .as_str().split_whitespace()\n\n .map(|v| Phoneme::from_str(v).unwrap())\n\n .collect();\n\n\n\n dict.insert(gr.to_string(), ph);\n\n }\n\n\n\n\n\n Ok(DictionaryPhonemeResolver {\n", "file_path": "src/phoneme_resolvers.rs", "rank": 47, "score": 20578.11505130178 }, { "content": "}\n\n\n\nimpl PhonemeResolver for MarkerPhonemeResolver {\n\n fn resolve(&self, graphemes: &str) -> Option<Vec<Phoneme>> {\n\n match graphemes.starts_with(\"[\") {\n\n true => {\n\n let ph = Phoneme::from_str(graphemes).unwrap();\n\n if ph.valid {\n\n Some(vec![ph])\n\n } else {\n\n None\n\n }\n\n },\n\n false => None\n\n }\n\n }\n\n}\n\n\n\npub struct DeadEndPhonemeResolver {}\n\n\n", "file_path": "src/phoneme_resolvers.rs", "rank": 48, "score": 20578.04520308844 }, { "content": " let c_str: &CStr = CStr::from_ptr(*result.offset(i as isize));\n\n phonemes.push(c_str.to_str().unwrap().clone().to_owned());\n\n libc::free(*result.offset(i as isize) as *mut c_void);\n\n }\n\n libc::free(result as *mut c_void);\n\n }\n\n\n\n let result: Vec<Phoneme> = phonemes.iter()\n\n .skip(1)\n\n .take(phonemes.len()-2)\n\n .map(|val| Phoneme::from_str(val).unwrap())\n\n .collect();\n\n\n\n Some(result)\n\n }\n\n}\n", "file_path": "src/phoneme_resolvers.rs", "rank": 49, "score": 20577.427115020095 }, { "content": " _graphemes.push(CString::new(slice).unwrap());\n\n last += len;\n\n }\n\n unsafe {\n\n let mut grphms: Vec<*const c_char> = vec![std::ptr::null(); graphemes.chars().count()];\n\n let mut result_size: size_t = 0;\n\n let mut result: *const *const c_char = std::ptr::null();\n\n for (idx, c) in _graphemes.iter().enumerate() {\n\n grphms[idx] = c.as_ptr();\n\n }\n\n model_runner::modelRunnerInfer(\n\n self.ptr,\n\n grphms.as_ptr(),\n\n grphms.len(),\n\n &mut result,\n\n &mut result_size,\n\n 255\n\n );\n\n\n\n for i in 0..result_size {\n", "file_path": "src/phoneme_resolvers.rs", "rank": 50, "score": 20575.813660378935 }, { "content": "impl DeadEndPhonemeResolver {\n\n pub fn new() -> DeadEndPhonemeResolver {\n\n DeadEndPhonemeResolver{}\n\n }\n\n}\n\n\n\nimpl PhonemeResolver for DeadEndPhonemeResolver {\n\n fn resolve(&self, graphemes: &str) -> Option<Vec<Phoneme>> {\n\n warn!(\"Failed to resolve phonemes for word \\\"{}\\\"\", graphemes);\n\n Some(vec![Phoneme::from_str(graphemes).unwrap()])\n\n }\n\n}\n\n\n\npub struct DummyPhonemeResolver {}\n\n\n\nimpl DummyPhonemeResolver {\n\n pub fn new() -> DummyPhonemeResolver {\n\n DummyPhonemeResolver{}\n\n }\n\n}\n", "file_path": "src/phoneme_resolvers.rs", "rank": 51, "score": 20573.638722098698 }, { "content": "use clap::App;\n\nuse clap::Arg;\n\nuse clap::ArgMatches;\n\nuse std::path::Path;\n\nuse logging::level_from_string;\n\nuse regex::Regex;\n\nuse logging::setup_logger;\n\n\n", "file_path": "src/cli_api.rs", "rank": 52, "score": 13.99178617290435 }, { "content": " let mut file = match File::open(path) {\n\n Ok(v) => v,\n\n Err(e) => return Err(String::from(e.description()))\n\n };\n\n let mut bytes: Vec<u8> = Vec::new();\n\n match file.read_to_end(&mut bytes) {\n\n Ok(_) => (),\n\n Err(e) => return Err(String::from(e.description()))\n\n };\n\n\n\n match String::from_utf8(bytes.clone()) {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n debug!(\"Failed to read {:?} as UTF-8 ({}), checking for 0xFF 0xFE bytes...\", path, e);\n\n if bytes.len() >= 2 && bytes[0] == 0xFF && bytes[1] == 0xFE {\n\n trace!(\"0xFF 0xFE bytes detected. Trying to decode as UTF-16LE...\");\n\n match decode_utf16_le(&bytes[2..]) {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n debug!(\"Failed to read as UTF-16LE: {}\", e);\n", "file_path": "src/dict_entry.rs", "rank": 53, "score": 13.957992742208644 }, { "content": " .long(\"model\")\n\n .value_name(\"FOLDER\")\n\n .help(\"Seq2Seq grapheme to phoneme model folder\")\n\n .takes_value(true)\n\n .required(false)\n\n .validator(valid_s2s_model)\n\n )\n\n .get_matches();\n\n\n\n\n\n setup_logger(\n\n level_from_string(\n\n matches.value_of(\"log level\").unwrap()\n\n ).unwrap()\n\n ).expect(\"Failed to initialize logging.\");\n\n\n\n if !matches.is_present(\"force\") {\n\n let path = Path::new(matches.value_of(\"output\").unwrap());\n\n if path.exists() {\n\n error!(\"Output file already exists! Use the --force (or -f) to force overwriting of the output file.\");\n\n panic!();\n\n }\n\n\n\n }\n\n\n\n matches\n\n}", "file_path": "src/cli_api.rs", "rank": 54, "score": 13.261676091284187 }, { "content": "use phonemes::Phoneme;\n\nuse dict_entry::DictEntry;\n\nuse regex::Regex;\n\nuse phoneme_resolvers::PhonemeResolver;\n\nuse std::str::FromStr;\n\nuse serde::Serializer;\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct TrainingEntry {\n\n pub transcript: String,\n\n #[serde(serialize_with = \"serialize_phoneme_vec\")]\n\n pub phonemes: Vec<Phoneme>,\n\n pub audio_path: String\n\n}\n\n\n", "file_path": "src/training_entry.rs", "rank": 55, "score": 12.75133680721951 }, { "content": " }\n\n }\n\n\n\n // TODO: maybe use the .? syntax to propagate Err up\n\n pub fn collect_entries(dir: &Path, audio_exts: &HashSet<String>, text_exts: &HashSet<String>) -> Result<Vec<DictEntry>, String> {\n\n let mut paths: Vec<String> = Vec::new();\n\n let mut files: Vec<String> = Vec::new();\n\n let mut entries: HashMap<String, DictEntry> = HashMap::new();\n\n\n\n let dir_str = match dir.to_str() {\n\n Some(v) => String::from(v),\n\n None => return Err(format!(\"Cannot get string representation of path \\\"{:?}\\\"\", dir))\n\n };\n\n if dir.is_dir() {\n\n paths.push(dir_str);\n\n } else {\n\n files.push(dir_str);\n\n }\n\n\n\n while !paths.is_empty() {\n", "file_path": "src/dict_entry.rs", "rank": 56, "score": 10.437896353517512 }, { "content": "\n\n {\n\n let mut entry = entries.entry(String::from(file_stem)).or_insert(DictEntry::new_empty());\n\n let extension = match file.extension() {\n\n Some(v) => match v.to_str() {\n\n Some(v) => v,\n\n None => return Err(format!(\"Cannot get &str from OsStr \\\"{:?}\\\"!\", v))\n\n },\n\n None => \"\"\n\n };\n\n\n\n entry.name = String::from(file_stem);\n\n entry.containing_dir = match file.parent() {\n\n Some(v) => match v.to_str() {\n\n Some(v) => String::from(v),\n\n None => return Err(format!(\"Cannot get &str from Path \\\"{:?}\\\"!\", v))\n\n },\n\n None => return Err(format!(\"Cannot resolve containing directory for {:?}!\", file))\n\n };\n\n\n", "file_path": "src/dict_entry.rs", "rank": 57, "score": 9.426272177831217 }, { "content": " .value_name(\"EXTS\")\n\n .help(\"comma delimited text extensions\")\n\n .takes_value(true)\n\n .required(false)\n\n .validator(extension_list_valid)\n\n .default_value(\"txt\")\n\n )\n\n .arg(\n\n Arg::with_name(\"phoneme dictionary\")\n\n .short(\"p\")\n\n .long(\"phonemes\")\n\n .value_name(\"DICTIONARY\")\n\n .help(\"path to grapheme-phoneme dictionary\")\n\n .takes_value(true)\n\n .required(false)\n\n .validator(valid_input_file)\n\n )\n\n .arg(\n\n Arg::with_name(\"Seq2Seq model folder\")\n\n .short(\"m\")\n", "file_path": "src/cli_api.rs", "rank": 58, "score": 9.245696680212607 }, { "content": " String::from(multi_space_re.replace_all(str.replace(\"\\r\", \" \")\n\n .replace(\"\\n\", \" \")\n\n .replace(\"\\t\", \" \")\n\n .trim(), \" \"))\n\n }\n\n\n\n fn convert_to_phonemes(str: &str, resolvers: &Vec<Box<PhonemeResolver>>) -> Vec<Phoneme> {\n\n let mut result: Vec<Phoneme> = Vec::new();\n\n let words: Vec<&str> = str.split_whitespace().collect();\n\n\n\n for i in 0..words.len() {\n\n let word = if words[i].starts_with(\"[\") {\n\n words[i].to_string()\n\n } else {\n\n words[i].to_lowercase()\n\n };\n\n\n\n for resolver in resolvers.iter() {\n\n match resolver.resolve(&word) {\n\n Some(mut v) => {\n", "file_path": "src/training_entry.rs", "rank": 59, "score": 8.845937391618875 }, { "content": " paths.push(String::from(path_str));\n\n } else {\n\n trace!(\"Adding file \\\"{}\\\".\", path_str);\n\n files.push(String::from(path_str));\n\n }\n\n }\n\n }\n\n\n\n for file_str in files {\n\n let file = Path::new(&file_str);\n\n\n\n let file_stem = match file.file_stem() {\n\n Some(v) => match v.to_str() {\n\n Some(v) => v,\n\n None => return Err(format!(\"Cannot get &str from OsStr \\\"{:?}\\\"!\", v))\n\n },\n\n None => return Err(format!(\"Cannot get file stem from {:?}!\", file))\n\n };\n\n\n\n let mut remove: Option<String> = None;\n", "file_path": "src/dict_entry.rs", "rank": 60, "score": 8.378607830748916 }, { "content": " v.drain(0..)\n\n .for_each(|v| result.push(v));\n\n if i != words.len()-1 {\n\n result.push(Phoneme::from_str(\" \").unwrap());\n\n }\n\n break;\n\n },\n\n None => continue\n\n }\n\n }\n\n }\n\n\n\n result\n\n }\n\n\n\n pub fn construct(de: DictEntry, resolvers: &Vec<Box<PhonemeResolver>>) -> Self {\n\n let mut t = TrainingEntry::fix_encoding_errors(de.transcript);\n\n t = TrainingEntry::fix_spelling_errors(t);\n\n t = TrainingEntry::process_markers(t);\n\n t = TrainingEntry::process_accents(t);\n\n t = TrainingEntry::perform_postprocessing(t);\n\n\n\n TrainingEntry {\n\n phonemes: TrainingEntry::convert_to_phonemes(&t, resolvers),\n\n audio_path: de.audio_path,\n\n transcript: t\n\n }\n\n }\n\n}", "file_path": "src/training_entry.rs", "rank": 61, "score": 7.6806375065415935 }, { "content": "use std::path::Path;\n\nuse std::fs::File;\n\nuse std::error::Error;\n\nuse std::io::Read;\n\nuse decode::decode_utf16_le;\n\nuse decode::decode_windows_1257;\n\nuse decode::decode_utf16_be;\n\nuse std::collections::HashMap;\n\nuse std::fs::read_dir;\n\nuse std::collections::HashSet;\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct DictEntry {\n\n pub name: String,\n\n pub transcript: String,\n\n pub containing_dir: String,\n\n pub audio_path: String,\n\n pub transcript_path: String\n\n}\n\n\n", "file_path": "src/dict_entry.rs", "rank": 62, "score": 7.577570213973795 }, { "content": " let p = paths.remove(0);\n\n debug!(\"Visiting path \\\"{}\\\".\", p);\n\n\n\n let dir_entries = match read_dir(p) {\n\n Ok(v) => v,\n\n Err(e) => return Err(String::from(e.description()))\n\n };\n\n\n\n for entry in dir_entries {\n\n let entry = match entry {\n\n Ok(v) => v,\n\n Err(e) => return Err(String::from(e.description()))\n\n };\n\n\n\n let path_str = match entry.path().to_str() {\n\n Some(v) => String::from(v),\n\n None => return Err(format!(\"Cannot get string representation of path \\\"{:?}\\\"\", entry.path()))\n\n };\n\n if entry.path().is_dir() {\n\n trace!(\"Adding path \\\"{}\\\".\", path_str);\n", "file_path": "src/dict_entry.rs", "rank": 63, "score": 6.974558665098336 }, { "content": " trace!(\"Will try to read as windows 1257...\");\n\n decode_windows_1257(&bytes)\n\n }\n\n }\n\n } else if bytes.len() >= 2 && bytes[0] == 0xFE && bytes[1] == 0xFF {\n\n trace!(\"0xFE 0xFF bytes detected. Trying to decode as UTF-16BE...\");\n\n match decode_utf16_be(&bytes[2..]) {\n\n Ok(v) => Ok(v),\n\n Err(e) => {\n\n debug!(\"Failed to read as UTF-16BE: {}\", e);\n\n trace!(\"Will try to read as windows 1257...\");\n\n decode_windows_1257(&bytes)\n\n }\n\n }\n\n } else {\n\n debug!(\"No 0xFF 0xFE bytes.\");\n\n trace!(\"Will try to read as windows 1257...\");\n\n decode_windows_1257(&bytes)\n\n }\n\n }\n", "file_path": "src/dict_entry.rs", "rank": 64, "score": 6.072226657690804 }, { "content": " if audio_exts.contains(&extension.to_lowercase()) {\n\n if !entry.audio_path.is_empty() {\n\n return Err(format!(\"Naming collision: \\\"{}\\\" vs \\\"{}\\\"!\", entry.audio_path, file_str));\n\n }\n\n entry.audio_path = String::from(file_str.clone());\n\n } else if text_exts.contains(&extension.to_lowercase()) {\n\n if !entry.transcript_path.is_empty() {\n\n return Err(format!(\"Naming collision: \\\"{}\\\" vs \\\"{}\\\"!\", entry.transcript_path, file_str));\n\n }\n\n entry.transcript_path = String::from(file_str.clone());\n\n entry.transcript = DictEntry::read_transcript(file)?;\n\n } else {\n\n warn!(\"Unknown file extension \\\"{}\\\", file {:?}!\", extension, file);\n\n remove = Some(String::from(file_stem));\n\n }\n\n }\n\n\n\n match remove {\n\n Some(v) => { entries.remove(&v); },\n\n None => ()\n", "file_path": "src/dict_entry.rs", "rank": 65, "score": 4.266188710311047 }, { "content": " .help(\"Path to output json file\")\n\n .takes_value(true)\n\n .required(true)\n\n .validator(valid_dest_file)\n\n .default_value(\"./output.json\")\n\n )\n\n .arg(\n\n Arg::with_name(\"force\")\n\n .short(\"f\")\n\n .long(\"force\")\n\n .help(\"Allow overwriting of the output file\")\n\n .takes_value(false)\n\n .required(false)\n\n )\n\n .arg(\n\n Arg::with_name(\"log level\")\n\n .short(\"l\")\n\n .long(\"level\")\n\n .value_name(\"LEVEL\")\n\n .help(\"logging level\")\n", "file_path": "src/cli_api.rs", "rank": 66, "score": 3.838630813546995 }, { "content": " .replace(\"_pilvas\", \"[STOMACH]\")\n\n .replace(\"_garsas\", \"[NOISE]\")\n\n .replace(\"_puslapis\", \"[PAGE]\")\n\n .replace(\"_durys\", \"[DOOR]\")\n\n .replace(\"_eh\", \"[EH]\")\n\n .replace(\"-\", \"[MIDWORDPAUSE]\")\n\n }\n\n\n\n fn process_accents(str: String) -> String {\n\n lazy_static! {\n\n static ref postfix_accent_re: Regex = Regex::new(r\"(?P<last>[^ ])(?P<accent>_[^ ]+)\").unwrap();\n\n }\n\n postfix_accent_re.replace_all(&str, \"$last\")\n\n .replace(\"_\", \"\")\n\n }\n\n\n\n fn perform_postprocessing(str: String) -> String {\n\n lazy_static! {\n\n static ref multi_space_re: Regex = Regex::new(r\" {2,}\").unwrap();\n\n }\n", "file_path": "src/training_entry.rs", "rank": 67, "score": 2.773216510332427 }, { "content": "\n\n // --post cleaning ops:\n\n // remove \\r \\n and \\t\n\n // remove spaces from start of string\n\n // remove trailing spaces\n\n // replace multiple spaces with a single space\n\n\n\n fn fix_encoding_errors(str: String) -> String {\n\n str.replace(\"\\u{feff}\", \"\")\n\n .replace(\"\\u{9a}\", \"ž\")\n\n .replace(\"\\u{1f}\", \"\")\n\n }\n\n\n\n fn fix_spelling_errors(str: String) -> String {\n\n str.replace(\"_centrai centrai\", \"centrai\")\n\n .replace(\"indais _dais\", \"indais_dais\")\n\n .replace(\"_is kvepimas\", \"_iskvepimas\")\n\n .replace(\"_puslpais\", \"_puslapis\")\n\n .replace(\"_dutys\", \"_durys\")\n\n .replace(\"Simono-Petro\", \"Simono Petro\")\n", "file_path": "src/training_entry.rs", "rank": 68, "score": 1.507819169457083 } ]
Rust
src/pixel.rs
B-Reif/asefile
4bb33e1398461b3d153d99ab0830bb87f347dce2
use image::{Pixel, Rgba}; use crate::{reader::AseReader, AsepriteParseError, ColorPalette, PixelFormat, Result}; use std::{borrow::Cow, io::Read}; fn read_rgba(chunk: &[u8]) -> Result<Rgba<u8>> { let mut reader = AseReader::new(chunk); let red = reader.byte()?; let green = reader.byte()?; let blue = reader.byte()?; let alpha = reader.byte()?; Ok(Rgba::from_channels(red, green, blue, alpha)) } #[derive(Debug, Clone, Copy)] pub(crate) struct Grayscale { value: u8, alpha: u8, } impl Grayscale { fn new(chunk: &[u8]) -> Result<Self> { let mut reader = AseReader::new(chunk); let value = reader.byte()?; let alpha = reader.byte()?; Ok(Self { value, alpha }) } pub(crate) fn into_rgba(self) -> Rgba<u8> { let Self { value, alpha } = self; Rgba::from_channels(value, value, value, alpha) } } #[derive(Debug, Clone, Copy)] pub(crate) struct Indexed(u8); impl Indexed { pub(crate) fn value(&self) -> u8 { self.0 } pub(crate) fn as_rgba( &self, palette: &ColorPalette, transparent_color_index: u8, layer_is_background: bool, ) -> Option<Rgba<u8>> { let index = self.0; palette.color(index as u32).map(|c| { let alpha = if transparent_color_index == index && !layer_is_background { 0 } else { c.alpha() }; Rgba::from_channels(c.red(), c.green(), c.blue(), alpha) }) } } fn output_size(pixel_format: PixelFormat, expected_pixel_count: usize) -> usize { pixel_format.bytes_per_pixel() * expected_pixel_count } #[derive(Debug)] pub(crate) enum Pixels { Rgba(Vec<Rgba<u8>>), Grayscale(Vec<Grayscale>), Indexed(Vec<Indexed>), } impl Pixels { fn from_bytes(bytes: Vec<u8>, pixel_format: PixelFormat) -> Result<Self> { match pixel_format { PixelFormat::Indexed { .. } => { let pixels = bytes.iter().map(|byte| Indexed(*byte)).collect(); Ok(Self::Indexed(pixels)) } PixelFormat::Grayscale => { if bytes.len() % 2 != 0 { return Err(AsepriteParseError::InvalidInput( "Incorrect length of bytes for Grayscale image data".to_string(), )); } let pixels: Result<Vec<_>> = bytes.chunks_exact(2).map(Grayscale::new).collect(); pixels.map(Self::Grayscale) } PixelFormat::Rgba => { if bytes.len() % 4 != 0 { return Err(AsepriteParseError::InvalidInput( "Incorrect length of bytes for RGBA image data".to_string(), )); } let pixels: Result<Vec<_>> = bytes.chunks_exact(4).map(read_rgba).collect(); pixels.map(Self::Rgba) } } } pub(crate) fn from_raw<T: Read>( reader: AseReader<T>, pixel_format: PixelFormat, expected_pixel_count: usize, ) -> Result<Self> { let expected_output_size = output_size(pixel_format, expected_pixel_count); reader .take_bytes(expected_output_size) .and_then(|bytes| Self::from_bytes(bytes, pixel_format)) } pub(crate) fn from_compressed<T: Read>( reader: AseReader<T>, pixel_format: PixelFormat, expected_pixel_count: usize, ) -> Result<Self> { let expected_output_size = output_size(pixel_format, expected_pixel_count); reader .unzip(expected_output_size) .and_then(|bytes| Self::from_bytes(bytes, pixel_format)) } pub(crate) fn byte_count(&self) -> usize { match self { Pixels::Rgba(v) => v.len() * 4, Pixels::Grayscale(v) => v.len() * 2, Pixels::Indexed(v) => v.len(), } } pub(crate) fn clone_as_image_rgba( &self, index_resolver_data: IndexResolverData<'_>, ) -> Cow<Vec<image::Rgba<u8>>> { match self { Pixels::Rgba(rgba) => Cow::Borrowed(rgba), Pixels::Grayscale(grayscale) => { Cow::Owned(grayscale.iter().map(|gs| gs.into_rgba()).collect()) } Pixels::Indexed(indexed) => { let IndexResolverData { palette, transparent_color_index, layer_is_background, } = index_resolver_data; let palette = palette.expect("Expected a palette when resolving indexed pixels. Should have been caught in validation"); let transparent_color_index = transparent_color_index.expect( "Indexed tilemap pixels in non-indexed pixel format. Should have been caught in validation", ); let resolver = |px: &Indexed| { px.as_rgba(palette, transparent_color_index, layer_is_background) .expect("Indexed pixel out of range. Should have been caught in validation") }; Cow::Owned(indexed.iter().map(resolver).collect()) } } } } pub(crate) struct IndexResolverData<'a> { pub(crate) palette: Option<&'a ColorPalette>, pub(crate) transparent_color_index: Option<u8>, pub(crate) layer_is_background: bool, }
use image::{Pixel, Rgba}; use crate::{reader::AseReader, AsepriteParseError, ColorPalette, PixelFormat, Result}; use std::{borrow::Cow, io::Read}; fn read_rgba(chunk: &[u8]) -> Result<Rgba<u8>> { let mut reader = AseReader::new(chunk); let red = reader.byte()?; let green = reader.byte()?; let blue = reader.byte()?; let alpha = reader.byte()?; Ok(Rgba::from_channels(red, green, blue, alpha)) } #[derive(Debug, Clone, Copy)] pub(crate) struct Grayscale { value: u8, alpha: u8, } impl Grayscale { fn new(chunk: &[u8]) -> Result<Self> { let mut reader = AseReader::new(chunk); let value = reader.byte()?; let alpha = reader.byte()?; Ok(Self { value, alpha }) } pub(crate) fn into_rgba(self) -> Rgba<u8> { let Self { value, alpha } = self; Rgba::from_channels(value, value, value, alpha) } } #[derive(Debug, Clone, Copy)] pub(crate) struct Indexed(u8); impl Indexed { pub(crate) fn value(&self) -> u8 { self.0 } pub(crate) fn as_rgba( &self, palette: &ColorPalette, transparent_color_index: u8, layer_is_background: bool, ) -> Option<Rgba<u8>> { let index = self.0; palette.color(index as u32).map(|c| { let alpha = if transparent_color_index == index && !layer_is_background { 0 } else { c.alpha() }; Rgba::from_channels(c.red(), c.green(), c.blue(), alpha) }) } } fn output_size(pixel_format: PixelFormat, expected_pixel_count: usize) -> usize { pixel_format.bytes_per_pixel() * expected_pixel_count } #[derive(Debug)] pub(crate) enum Pixels { Rgba(Vec<Rgba<u8>>), Grayscale(Vec<Grayscale>), Indexed(Vec<Indexed>), }
let pixels: Result<Vec<_>> = bytes.chunks_exact(2).map(Grayscale::new).collect(); pixels.map(Self::Grayscale) } PixelFormat::Rgba => { if bytes.len() % 4 != 0 { return Err(AsepriteParseError::InvalidInput( "Incorrect length of bytes for RGBA image data".to_string(), )); } let pixels: Result<Vec<_>> = bytes.chunks_exact(4).map(read_rgba).collect(); pixels.map(Self::Rgba) } } } pub(crate) fn from_raw<T: Read>( reader: AseReader<T>, pixel_format: PixelFormat, expected_pixel_count: usize, ) -> Result<Self> { let expected_output_size = output_size(pixel_format, expected_pixel_count); reader .take_bytes(expected_output_size) .and_then(|bytes| Self::from_bytes(bytes, pixel_format)) } pub(crate) fn from_compressed<T: Read>( reader: AseReader<T>, pixel_format: PixelFormat, expected_pixel_count: usize, ) -> Result<Self> { let expected_output_size = output_size(pixel_format, expected_pixel_count); reader .unzip(expected_output_size) .and_then(|bytes| Self::from_bytes(bytes, pixel_format)) } pub(crate) fn byte_count(&self) -> usize { match self { Pixels::Rgba(v) => v.len() * 4, Pixels::Grayscale(v) => v.len() * 2, Pixels::Indexed(v) => v.len(), } } pub(crate) fn clone_as_image_rgba( &self, index_resolver_data: IndexResolverData<'_>, ) -> Cow<Vec<image::Rgba<u8>>> { match self { Pixels::Rgba(rgba) => Cow::Borrowed(rgba), Pixels::Grayscale(grayscale) => { Cow::Owned(grayscale.iter().map(|gs| gs.into_rgba()).collect()) } Pixels::Indexed(indexed) => { let IndexResolverData { palette, transparent_color_index, layer_is_background, } = index_resolver_data; let palette = palette.expect("Expected a palette when resolving indexed pixels. Should have been caught in validation"); let transparent_color_index = transparent_color_index.expect( "Indexed tilemap pixels in non-indexed pixel format. Should have been caught in validation", ); let resolver = |px: &Indexed| { px.as_rgba(palette, transparent_color_index, layer_is_background) .expect("Indexed pixel out of range. Should have been caught in validation") }; Cow::Owned(indexed.iter().map(resolver).collect()) } } } } pub(crate) struct IndexResolverData<'a> { pub(crate) palette: Option<&'a ColorPalette>, pub(crate) transparent_color_index: Option<u8>, pub(crate) layer_is_background: bool, }
impl Pixels { fn from_bytes(bytes: Vec<u8>, pixel_format: PixelFormat) -> Result<Self> { match pixel_format { PixelFormat::Indexed { .. } => { let pixels = bytes.iter().map(|byte| Indexed(*byte)).collect(); Ok(Self::Indexed(pixels)) } PixelFormat::Grayscale => { if bytes.len() % 2 != 0 { return Err(AsepriteParseError::InvalidInput( "Incorrect length of bytes for Grayscale image data".to_string(), )); }
random
[ { "content": "fn parse_pixel_format(color_depth: u16, transparent_color_index: u8) -> Result<PixelFormat> {\n\n match color_depth {\n\n 8 => Ok(PixelFormat::Indexed {\n\n transparent_color_index,\n\n }),\n\n 16 => Ok(PixelFormat::Grayscale),\n\n 32 => Ok(PixelFormat::Rgba),\n\n _ => Err(AsepriteParseError::InvalidInput(format!(\n\n \"Unknown pixel format. Color depth: {}\",\n\n color_depth\n\n ))),\n\n }\n\n}\n", "file_path": "src/parse.rs", "rank": 1, "score": 180532.49675134599 }, { "content": "fn is_transparent(col: &image::Rgba<u8>) -> bool {\n\n col.0[3] == 0\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 2, "score": 160905.42229431908 }, { "content": "fn parse_layer_type<R: Read>(id: u16, reader: &mut AseReader<R>) -> Result<LayerType> {\n\n match id {\n\n 0 => Ok(LayerType::Image),\n\n 1 => Ok(LayerType::Group),\n\n 2 => reader.dword().map(TilesetId::new).map(LayerType::Tilemap),\n\n _ => Err(AsepriteParseError::InvalidInput(format!(\n\n \"Invalid layer type: {}\",\n\n id\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/layer.rs", "rank": 4, "score": 127833.72667046392 }, { "content": "fn parse_animation_direction(id: u8) -> Result<AnimationDirection> {\n\n match id {\n\n 0 => Ok(AnimationDirection::Forward),\n\n 1 => Ok(AnimationDirection::Reverse),\n\n 2 => Ok(AnimationDirection::PingPong),\n\n _ => Err(AsepriteParseError::InvalidInput(format!(\n\n \"Unknown animation direction: {}\",\n\n id\n\n ))),\n\n }\n\n}\n", "file_path": "src/tags.rs", "rank": 5, "score": 113752.23651372896 }, { "content": "fn blend8(back: u8, src: u8, opacity: u8) -> u8 {\n\n let src_x = src as i32;\n\n let back_x = back as i32;\n\n let a = src_x - back_x;\n\n let b = opacity as i32;\n\n let t = a * b + 0x80;\n\n let r = ((t >> 8) + t) >> 8;\n\n (back as i32 + r) as u8\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 6, "score": 98606.66279075437 }, { "content": "fn as_bool(bitwise_and: u32) -> bool {\n\n bitwise_and != 0\n\n}\n", "file_path": "src/tile.rs", "rank": 7, "score": 93082.81462244174 }, { "content": "fn clip_color(mut r: f64, mut g: f64, mut b: f64) -> (f64, f64, f64) {\n\n let lum = luminosity(r, g, b);\n\n let min = r.min(g.min(b));\n\n let max = r.max(g.max(b));\n\n\n\n if min < 0.0 {\n\n r = lum + (((r - lum) * lum) / (lum - min));\n\n g = lum + (((g - lum) * lum) / (lum - min));\n\n b = lum + (((b - lum) * lum) / (lum - min));\n\n }\n\n\n\n if max > 1.0 {\n\n r = lum + (((r - lum) * (1.0 - lum)) / (max - lum));\n\n g = lum + (((g - lum) * (1.0 - lum)) / (max - lum));\n\n b = lum + (((b - lum) * (1.0 - lum)) / (max - lum));\n\n }\n\n (r, g, b)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 8, "score": 84971.02965504325 }, { "content": "#[allow(dead_code)]\n\nfn dump_bytes(data: &[u8]) {\n\n let mut column = 0;\n\n for d in data {\n\n print!(\"{:02x} \", d);\n\n column += 1;\n\n if column >= 16 {\n\n column = 0;\n\n println!();\n\n }\n\n }\n\n}\n", "file_path": "src/cel.rs", "rank": 9, "score": 83337.04984372627 }, { "content": "fn blend_difference(b: i32, s: i32) -> u8 {\n\n (b - s).abs() as u8\n\n}\n\n\n\n// --- exclusion ---------------------------------------------------------------\n\n\n\npub(crate) fn exclusion(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, exclusion_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 10, "score": 74455.59167032977 }, { "content": "fn blend_divide(b: i32, s: i32) -> u8 {\n\n if b == 0 {\n\n 0\n\n } else if b >= s {\n\n 255\n\n } else {\n\n div_un8(b, s)\n\n }\n\n}\n\n\n\n// --- difference ------------------------------------------------------------------\n\n\n\npub(crate) fn difference(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, difference_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 11, "score": 74455.59167032977 }, { "content": "fn mul_un8(a: i32, b: i32) -> u8 {\n\n let t = a * b + 0x80;\n\n let r = ((t >> 8) + t) >> 8;\n\n r as u8\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 12, "score": 74455.59167032977 }, { "content": "fn blend_multiply(a: i32, b: i32) -> u8 {\n\n mul_un8(a, b)\n\n}\n\n\n\n// --- screen ------------------------------------------------------------------\n\n\n\npub(crate) fn screen(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, screen_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 13, "score": 74455.59167032977 }, { "content": "fn blend_darken(b: i32, s: i32) -> u8 {\n\n b.min(s) as u8\n\n}\n\n\n\n// --- lighten -----------------------------------------------------------------\n\n\n\npub(crate) fn lighten(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, lighten_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 14, "score": 74455.59167032977 }, { "content": "fn blend_overlay(b: i32, s: i32) -> u8 {\n\n blend_hard_light(s, b)\n\n}\n\n\n\n// --- darken ------------------------------------------------------------------\n\n\n\npub(crate) fn darken(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, darken_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 15, "score": 74455.59167032977 }, { "content": "// blend_exclusion(b, s, t) ((t) = MUL_UN8((b), (s), (t)), ((b) + (s) - 2*(t)))\n\nfn blend_exclusion(b: i32, s: i32) -> u8 {\n\n let t = mul_un8(b, s) as i32;\n\n (b + s - 2 * t) as u8\n\n}\n\n\n\n// --- addition ----------------------------------------------------------------\n\n\n\npub(crate) fn addition(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, addition_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 16, "score": 74455.59167032977 }, { "content": "fn blend_lighten(b: i32, s: i32) -> u8 {\n\n b.max(s) as u8\n\n}\n\n\n\n// --- color_dodge -------------------------------------------------------------\n\n\n\npub(crate) fn color_dodge(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, color_dodge_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 17, "score": 74455.59167032977 }, { "content": "// blend_screen(b, s, t) ((b) + (s) - MUL_UN8((b), (s), (t)))\n\nfn blend_screen(a: i32, b: i32) -> u8 {\n\n (a + b - mul_un8(a, b) as i32) as u8\n\n}\n\n\n\n// --- overlay -----------------------------------------------------------------\n\n\n\npub(crate) fn overlay(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, overlay_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 18, "score": 74455.59167032977 }, { "content": "// DIV_UN8(a, b) (((uint16_t) (a) * 0xff + ((b) / 2)) / (b))\n\nfn div_un8(a: i32, b: i32) -> u8 {\n\n let t = a * 0xff;\n\n let r = (t + (b / 2)) / b;\n\n r as u8\n\n}\n\n// fn mul_un8()\n\n\n\n/*\n\n\n\n67:#define MUL_UN8(a, b, t) \\\n\n68- ((t) = (a) * (uint16_t)(b) + ONE_HALF, ((((t) >> G_SHIFT ) + (t) ) >> G_SHIFT ))\n\n\n\n*/\n", "file_path": "src/blend.rs", "rank": 19, "score": 74455.59167032977 }, { "content": "type BlendFn = Box<dyn Fn(Color8, Color8, u8) -> Color8>;\n\n\n", "file_path": "src/file.rs", "rank": 20, "score": 72666.07020110203 }, { "content": "fn to_ase(e: std::io::Error) -> AsepriteParseError {\n\n e.into()\n\n}\n\n\n\npub(crate) struct AseReader<T: Read> {\n\n input: T,\n\n}\n\n\n\nimpl AseReader<Cursor<&[u8]>> {\n\n pub(crate) fn new(data: &[u8]) -> AseReader<Cursor<&[u8]>> {\n\n let input = Cursor::new(data);\n\n AseReader { input }\n\n }\n\n}\n\n\n\nimpl<T: Read> AseReader<T>\n\nwhere\n\n T: Read,\n\n{\n\n pub(crate) fn with(input: T) -> Self {\n", "file_path": "src/reader.rs", "rank": 21, "score": 72466.46276387345 }, { "content": "fn blend_color_dodge(b: i32, s: i32) -> u8 {\n\n if b == 0 {\n\n return 0;\n\n }\n\n let s = 255 - s;\n\n if b >= s {\n\n 255\n\n } else {\n\n // in floating point: b / (1-s)\n\n div_un8(b, s)\n\n }\n\n}\n\n\n\n// --- color_burn --------------------------------------------------------------\n\n\n\npub(crate) fn color_burn(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, color_burn_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 22, "score": 72291.28715103964 }, { "content": "fn blend_color_burn(b: i32, s: i32) -> u8 {\n\n if b == 255 {\n\n return 255;\n\n }\n\n let b = 255 - b;\n\n if b >= s {\n\n 0\n\n } else {\n\n // in floating point: 1 - ((1-b)/s)\n\n 255 - div_un8(b, s)\n\n }\n\n}\n\n\n\n// --- hard_light --------------------------------------------------------------\n\n\n\npub(crate) fn hard_light(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, hard_light_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 23, "score": 72291.28715103964 }, { "content": "fn blend_hard_light(b: i32, s: i32) -> u8 {\n\n if s < 128 {\n\n blend_multiply(b, s << 1)\n\n } else {\n\n blend_screen(b, (s << 1) - 255)\n\n }\n\n}\n\n\n\n// --- soft_light --------------------------------------------------------------\n\n\n\npub(crate) fn soft_light(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, soft_light_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 24, "score": 72291.28715103964 }, { "content": "fn parse_blend_mode(id: u16) -> Result<BlendMode> {\n\n match id {\n\n 0 => Ok(BlendMode::Normal),\n\n 1 => Ok(BlendMode::Multiply),\n\n 2 => Ok(BlendMode::Screen),\n\n 3 => Ok(BlendMode::Overlay),\n\n 4 => Ok(BlendMode::Darken),\n\n 5 => Ok(BlendMode::Lighten),\n\n 6 => Ok(BlendMode::ColorDodge),\n\n 7 => Ok(BlendMode::ColorBurn),\n\n 8 => Ok(BlendMode::HardLight),\n\n 9 => Ok(BlendMode::SoftLight),\n\n 10 => Ok(BlendMode::Difference),\n\n 11 => Ok(BlendMode::Exclusion),\n\n 12 => Ok(BlendMode::Hue),\n\n 13 => Ok(BlendMode::Saturation),\n\n 14 => Ok(BlendMode::Color),\n\n 15 => Ok(BlendMode::Luminosity),\n\n 16 => Ok(BlendMode::Addition),\n\n 17 => Ok(BlendMode::Subtract),\n\n 18 => Ok(BlendMode::Divide),\n\n _ => Err(AsepriteParseError::InvalidInput(format!(\n\n \"Invalid/Unsupported blend mode: {}\",\n\n id\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "src/layer.rs", "rank": 25, "score": 72273.88005285992 }, { "content": "#[test]\n\nfn grayscale() {\n\n let f = load_test_file(\"grayscale\");\n\n assert_eq!(f.size(), (64, 64));\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"grayscale\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 26, "score": 71674.30559855321 }, { "content": "#[test]\n\nfn indexed() {\n\n let f = load_test_file(\"indexed\");\n\n\n\n assert_eq!(f.size(), (64, 64));\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"indexed_01\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 27, "score": 71655.64687939065 }, { "content": "#[test]\n\nfn palette() {\n\n let f = load_test_file(\"palette\");\n\n\n\n let pal = f.palette().unwrap();\n\n assert_eq!(pal.num_colors(), 85);\n\n assert_eq!(pal.color(0).unwrap().raw_rgba8(), [46, 34, 47, 255]);\n\n assert_eq!(pal.color(71).unwrap().raw_rgba8(), [0, 0, 0, 83]);\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 28, "score": 71565.00638038336 }, { "content": "fn parse_chunk_type(chunk_type: u16) -> Result<ChunkType> {\n\n match chunk_type {\n\n 0x0004 => Ok(ChunkType::OldPalette04),\n\n 0x0011 => Ok(ChunkType::OldPalette11),\n\n 0x2004 => Ok(ChunkType::Layer),\n\n 0x2005 => Ok(ChunkType::Cel),\n\n 0x2006 => Ok(ChunkType::CelExtra),\n\n 0x2007 => Ok(ChunkType::ColorProfile),\n\n 0x2008 => Ok(ChunkType::ExternalFiles),\n\n 0x2016 => Ok(ChunkType::Mask),\n\n 0x2017 => Ok(ChunkType::Path),\n\n 0x2018 => Ok(ChunkType::Tags),\n\n 0x2019 => Ok(ChunkType::Palette),\n\n 0x2020 => Ok(ChunkType::UserData),\n\n 0x2022 => Ok(ChunkType::Slice),\n\n 0x2023 => Ok(ChunkType::Tileset),\n\n _ => Err(AsepriteParseError::UnsupportedFeature(format!(\n\n \"Invalid or unsupported chunk type: 0x{:x}\",\n\n chunk_type\n\n ))),\n\n }\n\n}\n\n\n\nconst CHUNK_HEADER_SIZE: usize = 6;\n\nconst FRAME_HEADER_SIZE: i64 = 16;\n\n\n", "file_path": "src/parse.rs", "rank": 29, "score": 70589.39816544912 }, { "content": "fn test_user_data(s: &str, c: [u8; 4]) -> UserData {\n\n UserData {\n\n text: Some(s.to_string()),\n\n color: Some(image::Rgba::from_channels(c[0], c[1], c[2], c[3])),\n\n }\n\n}\n\n\n\nconst COLOR_GREEN: [u8; 4] = [0, 255, 0, 255];\n\nconst COLOR_RED: [u8; 4] = [255, 0, 0, 255];\n\n\n", "file_path": "src/tests.rs", "rank": 30, "score": 68677.31245484047 }, { "content": "#[test]\n\nfn tilemap_grayscale() {\n\n let f = load_test_file(\"tilemap_grayscale\");\n\n let img = f.frame(0).image();\n\n assert_eq!(f.size(), (32, 32));\n\n let ts = f\n\n .tilesets()\n\n .get(&tileset::TilesetId::new(0))\n\n .expect(\"No tileset found\");\n\n assert_eq!(ts.name(), \"test_tileset\");\n\n\n\n compare_with_reference_image(img, \"tilemap_grayscale\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 31, "score": 68364.78889686313 }, { "content": "#[test]\n\nfn tilemap_indexed() {\n\n let f = load_test_file(\"tilemap_indexed\");\n\n let img = f.frame(0).image();\n\n assert_eq!(f.size(), (32, 32));\n\n let ts = f\n\n .tilesets()\n\n .get(&tileset::TilesetId::new(0))\n\n .expect(\"No tileset found\");\n\n assert_eq!(ts.name(), \"test_tileset\");\n\n\n\n compare_with_reference_image(img, \"tilemap_indexed\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 32, "score": 68347.16652477448 }, { "content": "fn parse_color_profile_type(id: u16) -> Result<ColorProfileType> {\n\n match id {\n\n 0x0000 => Ok(ColorProfileType::None),\n\n 0x0001 => Ok(ColorProfileType::Srgb),\n\n 0x0002 => Ok(ColorProfileType::ICC),\n\n _ => Err(AsepriteParseError::UnsupportedFeature(format!(\n\n \"Unknown color profile type: {}\",\n\n id\n\n ))),\n\n }\n\n}\n", "file_path": "src/color_profile.rs", "rank": 33, "score": 67594.7038852058 }, { "content": "fn check_chunk_bytes(chunk_size: u32, bytes_available: i64) -> Result<()> {\n\n if (chunk_size as usize) < CHUNK_HEADER_SIZE {\n\n return Err(AsepriteParseError::InvalidInput(format!(\n\n \"Chunk size is too small {}, minimum_size: {}\",\n\n chunk_size, CHUNK_HEADER_SIZE\n\n )));\n\n }\n\n if chunk_size as i64 > bytes_available {\n\n return Err(AsepriteParseError::InvalidInput(format!(\n\n \"Trying to read chunk of size {}, but there are only {} bytes available in the frame\",\n\n chunk_size, bytes_available\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 34, "score": 67074.13206108498 }, { "content": "#[test]\n\nfn gen_random_pixels() {\n\n use rand::Rng;\n\n use image::{Rgba};\n\n use std::path::Path;\n\n let mut rng = rand::thread_rng();\n\n\n\n let (width, height) = (256, 256);\n\n let mut img = image::RgbaImage::new(width, height);\n\n for y in 0..width {\n\n for x in 0..height {\n\n let r: u8 = rng.gen();\n\n let g: u8 = rng.gen();\n\n let b: u8 = rng.gen();\n\n let a: u8 = rng.gen();\n\n img.put_pixel(x, y, Rgba([r, g, b, a]));\n\n }\n\n }\n\n img.save(&Path::new(\"tests/data/random-256x256.png\")).unwrap();\n\n}\n\n// */\n", "file_path": "src/tests.rs", "rank": 35, "score": 65240.68558976756 }, { "content": "fn from_rgb_f64(r: f64, g: f64, b: f64, a: u8) -> Color8 {\n\n from_rgba_i32(\n\n (r * 255.0) as i32,\n\n (g * 255.0) as i32,\n\n (b * 255.0) as i32,\n\n a as i32,\n\n )\n\n}\n\n\n\n/*\n\ncolor_t rgba_blender_merge(color_t backdrop, color_t src, int opacity)\n\n{\n\n int Br, Bg, Bb, Ba;\n\n int Sr, Sg, Sb, Sa;\n\n int Rr, Rg, Rb, Ra;\n\n int t;\n\n\n\n Br = rgba_getr(backdrop);\n\n Bg = rgba_getg(backdrop);\n\n Bb = rgba_getb(backdrop);\n", "file_path": "src/blend.rs", "rank": 36, "score": 64777.82436809532 }, { "content": "// Returns (smallest, middle, highest) where smallest is the index\n\n// of the smallest element. I.e., 0 if it's `r`, 1 if it's `g`, etc.\n\n//\n\n// Implements this static sorting network. Vertical lines are swaps.\n\n//\n\n// r --*--*----- min\n\n// | |\n\n// g --*--|--*-- mid\n\n// | |\n\n// b -----*--*-- max\n\n//\n\nfn static_sort3(r: f64, g: f64, b: f64) -> (usize, usize, usize) {\n\n let (min0, mid0, max0) = ((r, 0), (g, 1), (b, 2));\n\n // dbg!(\"--------\");\n\n // dbg!(min0, mid0, max0);\n\n let (min1, mid1) = if min0.0 < mid0.0 {\n\n (min0, mid0)\n\n } else {\n\n (mid0, min0)\n\n };\n\n // dbg!(min1, mid1);\n\n let (min2, max1) = if min1.0 < max0.0 {\n\n (min1, max0)\n\n } else {\n\n (max0, min1)\n\n };\n\n // dbg!(min2, max1);\n\n let (mid2, max2) = if mid1.0 < max1.0 {\n\n (mid1, max1)\n\n } else {\n\n (max1, mid1)\n\n };\n\n // dbg!(mid2, max2);\n\n (min2.1, mid2.1, max2.1)\n\n}\n\n\n\n// Array based implementation as a reference for testing.\n", "file_path": "src/blend.rs", "rank": 37, "score": 63976.35391491804 }, { "content": "// file format docs: https://github.com/aseprite/aseprite/blob/master/docs/ase-file-specs.md\n\n// v1.3 spec diff doc: https://gist.github.com/dacap/35f3b54fbcd021d099e0166a4f295bab\n\npub fn read_aseprite<R: Read>(input: R) -> Result<AsepriteFile> {\n\n let mut reader = AseReader::with(input);\n\n let _size = reader.dword()?;\n\n let magic_number = reader.word()?;\n\n if magic_number != 0xA5E0 {\n\n return Err(AsepriteParseError::InvalidInput(format!(\n\n \"Invalid magic number for header: {:x} != {:x}\",\n\n magic_number, 0xA5E0\n\n )));\n\n }\n\n\n\n let num_frames = reader.word()?;\n\n let width = reader.word()?;\n\n let height = reader.word()?;\n\n let color_depth = reader.word()?;\n\n let _flags = reader.dword()?;\n\n let default_frame_time = reader.word()?;\n\n let _placeholder1 = reader.dword()?;\n\n let _placeholder2 = reader.dword()?;\n\n let transparent_color_index = reader.byte()?;\n", "file_path": "src/parse.rs", "rank": 38, "score": 63830.12154944558 }, { "content": "fn addition_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n let (back_r, back_g, back_b, _) = as_rgba_i32(backdrop);\n\n let (src_r, src_g, src_b, src_a) = as_rgba_i32(src);\n\n let r = back_r + src_r;\n\n let g = back_g + src_g;\n\n let b = back_b + src_b;\n\n\n\n let src = from_rgba_i32(r.min(255), g.min(255), b.min(255), src_a);\n\n\n\n normal(backdrop, src, opacity)\n\n}\n\n\n\n// --- subtract ----------------------------------------------------------------\n\n\n\npub(crate) fn subtract(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, subtract_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 39, "score": 63749.83827841185 }, { "content": "fn difference_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blend_channel(backdrop, src, opacity, blend_difference)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 40, "score": 63749.83827841185 }, { "content": "fn darken_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blend_channel(backdrop, src, opacity, blend_darken)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 41, "score": 63749.83827841185 }, { "content": "fn multiply_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blend_channel(backdrop, src, opacity, blend_multiply)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 42, "score": 63749.83827841185 }, { "content": "fn divide_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blend_channel(backdrop, src, opacity, blend_divide)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 43, "score": 63749.83827841185 }, { "content": "fn overlay_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blend_channel(backdrop, src, opacity, blend_overlay)\n\n}\n\n\n\n// blend_overlay(b, s, t) (blend_hard_light(s, b, t))\n\n// blend_hard_light(b, s, t) ((s) < 128 ? \\\n\n// blend_multiply((b), (s)<<1, (t)): \\\n\n// blend_screen((b), ((s)<<1)-255, (t)))\n\n\n", "file_path": "src/blend.rs", "rank": 44, "score": 63749.83827841185 }, { "content": "fn exclusion_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blend_channel(backdrop, src, opacity, blend_exclusion)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 45, "score": 63749.83827841185 }, { "content": "fn lighten_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blend_channel(backdrop, src, opacity, blend_lighten)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 46, "score": 63749.83827841185 }, { "content": "fn screen_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blend_channel(backdrop, src, opacity, blend_screen)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 47, "score": 63749.83827841185 }, { "content": "fn subtract_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n let (back_r, back_g, back_b, _) = as_rgba_i32(backdrop);\n\n let (src_r, src_g, src_b, src_a) = as_rgba_i32(src);\n\n let r = back_r - src_r;\n\n let g = back_g - src_g;\n\n let b = back_b - src_b;\n\n\n\n let src = from_rgba_i32(r.max(0), g.max(0), b.max(0), src_a);\n\n\n\n normal(backdrop, src, opacity)\n\n}\n\n\n\n// --- hsl_hue -----------------------------------------------------------------\n\n\n\npub(crate) fn hsl_hue(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, hsl_hue_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 48, "score": 63749.83827841185 }, { "content": "// implementation used in Aseprite, even though it uses a lot of compares and\n\n// is actually broken if r == g and g < b.\n\nfn static_sort3_orig(r: f64, g: f64, b: f64) -> (usize, usize, usize) {\n\n // min = MIN(r, MIN(g, b));\n\n // ((r) < (((g) < (b)) ? (g) : (b))) ? (r) : (((g) < (b)) ? (g) : (b));\n\n // max = MAX(r, MAX(g, b));\n\n // ((r) > (((g) > (b)) ? (g) : (b))) ? (r) : (((g) > (b)) ? (g) : (b))\n\n // mid = ((r) > (g) ?\n\n // ((g) > (b) ?\n\n // (g) :\n\n // ((r) > (b) ?\n\n // (b) :\n\n // (r)\n\n // )\n\n // ) :\n\n // ((g) > (b) ?\n\n // ((b) > (r) ?\n\n // (b) :\n\n // (r)\n\n // ) :\n\n // (g)))\n\n\n", "file_path": "src/blend.rs", "rank": 49, "score": 62312.40100088693 }, { "content": "#[cfg(test)]\n\nfn static_sort3_spec(r: f64, g: f64, b: f64) -> (usize, usize, usize) {\n\n let mut inp = [(r, 0), (g, 1), (b, 2)];\n\n inp.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());\n\n let res: Vec<usize> = inp.iter().map(|p| p.1).collect();\n\n //dbg!(r, g, b);\n\n (res[0], res[1], res[2])\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 50, "score": 62308.37702462974 }, { "content": "fn hsl_saturation_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n //dbg!(backdrop, src);\n\n let (r, g, b) = as_rgb_f64(src);\n\n //dbg!(\"src\", (r, g, b));\n\n let sat = saturation(r, g, b);\n\n //dbg!(sat);\n\n\n\n let (r, g, b) = as_rgb_f64(backdrop);\n\n //dbg!(\"back\", (r, g, b));\n\n let lum = luminosity(r, g, b);\n\n //dbg!(lum);\n\n\n\n let (r, g, b) = set_saturation(r, g, b, sat);\n\n //dbg!(\"sat\", (r, g, b));\n\n let (r, g, b) = set_luminocity(r, g, b, lum);\n\n\n\n //dbg!((r, g, b), saturation(r, g, b), luminosity(r, g, b));\n\n\n\n let src = from_rgb_f64(r, g, b, src[3]);\n\n // dbg!(src);\n\n normal(backdrop, src, opacity)\n\n}\n\n\n\n// --- hsl_color ---------------------------------------------------------------\n\n\n\npub(crate) fn hsl_color(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, hsl_color_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 51, "score": 62211.42024240618 }, { "content": "fn hsl_hue_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n let (r, g, b) = as_rgb_f64(backdrop);\n\n let sat = saturation(r, g, b);\n\n let lum = luminosity(r, g, b);\n\n\n\n let (r, g, b) = as_rgb_f64(src);\n\n\n\n let (r, g, b) = set_saturation(r, g, b, sat);\n\n let (r, g, b) = set_luminocity(r, g, b, lum);\n\n\n\n let src = from_rgb_f64(r, g, b, src[3]);\n\n\n\n normal(backdrop, src, opacity)\n\n}\n\n\n\n// --- hsl_saturation ----------------------------------------------------------\n\n\n\npub(crate) fn hsl_saturation(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, hsl_saturation_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 52, "score": 62211.42024240618 }, { "content": "fn soft_light_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n let (back_r, back_g, back_b, _) = as_rgba_i32(backdrop);\n\n let (src_r, src_g, src_b, src_a) = as_rgba_i32(src);\n\n let r = blend_soft_light(back_r, src_r);\n\n let g = blend_soft_light(back_g, src_g);\n\n let b = blend_soft_light(back_b, src_b);\n\n\n\n let src = from_rgba_i32(r, g, b, src_a);\n\n\n\n normal(backdrop, src, opacity)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 53, "score": 62211.42024240618 }, { "content": "fn color_burn_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blend_channel(backdrop, src, opacity, blend_color_burn)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 54, "score": 62211.42024240618 }, { "content": "fn hsl_color_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n let (r, g, b) = as_rgb_f64(backdrop);\n\n let lum = luminosity(r, g, b);\n\n\n\n let (r, g, b) = as_rgb_f64(src);\n\n\n\n let (r, g, b) = set_luminocity(r, g, b, lum);\n\n\n\n let src = from_rgb_f64(r, g, b, src[3]);\n\n normal(backdrop, src, opacity)\n\n}\n\n\n\n// --- hsl_luminosity ----------------------------------------------------------\n\n\n\npub(crate) fn hsl_luminosity(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, hsl_luminosity_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 55, "score": 62211.42024240618 }, { "content": "fn hard_light_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blend_channel(backdrop, src, opacity, blend_hard_light)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 56, "score": 62211.42024240618 }, { "content": "fn hsl_luminosity_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n let (r, g, b) = as_rgb_f64(src);\n\n let lum = luminosity(r, g, b);\n\n\n\n let (r, g, b) = as_rgb_f64(backdrop);\n\n\n\n let (r, g, b) = set_luminocity(r, g, b, lum);\n\n\n\n let src = from_rgb_f64(r, g, b, src[3]);\n\n\n\n normal(backdrop, src, opacity)\n\n}\n\n\n\n// --- Hue/Saturation/Luminance Utils ------------------------------------------\n\n\n", "file_path": "src/blend.rs", "rank": 57, "score": 62211.42024240618 }, { "content": "fn color_dodge_baseline(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blend_channel(backdrop, src, opacity, blend_color_dodge)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 58, "score": 62211.42024240618 }, { "content": "struct Chunk {\n\n chunk_type: ChunkType,\n\n data: Vec<u8>,\n\n}\n\n\n\nimpl Chunk {\n\n fn read<R: Read>(bytes_available: &mut i64, reader: &mut AseReader<R>) -> Result<Self> {\n\n let chunk_size = reader.dword()?;\n\n let chunk_type_code = reader.word()?;\n\n let chunk_type = parse_chunk_type(chunk_type_code)?;\n\n\n\n check_chunk_bytes(chunk_size, *bytes_available)?;\n\n\n\n let chunk_data_bytes = chunk_size as usize - CHUNK_HEADER_SIZE;\n\n let mut data = vec![0_u8; chunk_data_bytes];\n\n reader.read_exact(&mut data)?;\n\n *bytes_available -= chunk_size as i64;\n\n Ok(Chunk { chunk_type, data })\n\n }\n\n fn read_all<R: Read>(\n", "file_path": "src/parse.rs", "rank": 59, "score": 61095.1548136229 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\nenum ChunkType {\n\n OldPalette04, // deprecated\n\n OldPalette11, // deprecated\n\n Palette,\n\n Layer,\n\n Cel,\n\n CelExtra,\n\n ColorProfile,\n\n Mask, // deprecated\n\n Path,\n\n Tags,\n\n UserData,\n\n Slice,\n\n ExternalFiles,\n\n Tileset,\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 60, "score": 59441.2358677248 }, { "content": "struct ParseInfo {\n\n palette: Option<palette::ColorPalette>,\n\n color_profile: Option<color_profile::ColorProfile>,\n\n layers: Vec<LayerData>,\n\n framedata: cel::CelsData, // Vec<Vec<cel::RawCel>>,\n\n frame_times: Vec<u16>,\n\n tags: Option<Vec<Tag>>,\n\n external_files: ExternalFilesById,\n\n tilesets: TilesetsById,\n\n sprite_user_data: Option<UserData>,\n\n user_data_context: Option<UserDataContext>,\n\n slices: Vec<Slice>,\n\n}\n\n\n\nimpl ParseInfo {\n\n fn new(num_frames: u16, default_frame_time: u16) -> Self {\n\n Self {\n\n palette: None,\n\n color_profile: None,\n\n layers: Vec::new(),\n", "file_path": "src/parse.rs", "rank": 61, "score": 59367.69002974936 }, { "content": "fn blender<F>(backdrop: Color8, src: Color8, opacity: u8, f: F) -> Color8\n\nwhere\n\n F: Fn(Color8, Color8, u8) -> Color8,\n\n{\n\n if backdrop[3] != 0 {\n\n let norm = normal(backdrop, src, opacity);\n\n let blend = f(backdrop, src, opacity);\n\n let back_alpha = backdrop[3];\n\n let normal_to_blend_merge = merge(norm, blend, back_alpha);\n\n let src_total_alpha = mul_un8(src[3] as i32, opacity as i32);\n\n let composite_alpha = mul_un8(back_alpha as i32, src_total_alpha as i32);\n\n merge(normal_to_blend_merge, blend, composite_alpha)\n\n //todo!()\n\n } else {\n\n normal(backdrop, src, opacity)\n\n }\n\n}\n\n\n\n/*\n\n int t;\n\n int r = blend_multiply(rgba_getr(backdrop), rgba_getr(src), t);\n\n int g = blend_multiply(rgba_getg(backdrop), rgba_getg(src), t);\n\n int b = blend_multiply(rgba_getb(backdrop), rgba_getb(src), t);\n\n src = rgba(r, g, b, 0) | (src & rgba_a_mask);\n\n return rgba_blender_normal(backdrop, src, opacity);\n\n*/\n", "file_path": "src/blend.rs", "rank": 62, "score": 58716.57922158243 }, { "content": "#[derive(Clone, Copy)]\n\nenum UserDataContext {\n\n CelId(CelId),\n\n LayerIndex(u32),\n\n OldPalette,\n\n TagIndex(u16),\n\n SliceIndex(u32),\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 63, "score": 57898.68514005719 }, { "content": "struct ValidatedParseInfo {\n\n layers: layer::LayersData,\n\n tilesets: TilesetsById,\n\n framedata: cel::CelsData,\n\n external_files: ExternalFilesById,\n\n palette: Option<palette::ColorPalette>,\n\n tags: Vec<Tag>,\n\n frame_times: Vec<u16>,\n\n sprite_user_data: Option<UserData>,\n\n slices: Vec<Slice>,\n\n}\n\n\n", "file_path": "src/parse.rs", "rank": 64, "score": 57822.02235714911 }, { "content": "fn blend_channel<F>(backdrop: Color8, src: Color8, opacity: u8, f: F) -> Color8\n\nwhere\n\n F: Fn(i32, i32) -> u8,\n\n{\n\n let (back_r, back_g, back_b, _) = as_rgba_i32(backdrop);\n\n let (src_r, src_g, src_b, _) = as_rgba_i32(src);\n\n let r = f(back_r, src_r);\n\n let g = f(back_g, src_g);\n\n let b = f(back_b, src_b);\n\n let src = Rgba([r, g, b, src[3]]);\n\n normal(backdrop, src, opacity)\n\n}\n\n\n\n// --- multiply ----------------------------------------------------------------\n\n\n\npub(crate) fn multiply(backdrop: Color8, src: Color8, opacity: u8) -> Color8 {\n\n blender(backdrop, src, opacity, multiply_baseline)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 65, "score": 57292.099868066056 }, { "content": "fn from_rgba_i32(r: i32, g: i32, b: i32, a: i32) -> Color8 {\n\n debug_assert!((0..=255).contains(&r));\n\n debug_assert!((0..=255).contains(&g));\n\n debug_assert!((0..=255).contains(&b));\n\n debug_assert!((0..=255).contains(&a));\n\n\n\n Rgba([r as u8, g as u8, b as u8, a as u8])\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 66, "score": 50479.17279568794 }, { "content": "fn as_rgba_i32(color: Color8) -> (i32, i32, i32, i32) {\n\n let [r, g, b, a] = color.0;\n\n (r as i32, g as i32, b as i32, a as i32)\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 67, "score": 49831.823010896456 }, { "content": "fn compare_with_reference_image(img: image::RgbaImage, filename: &str) {\n\n let mut reference_path = PathBuf::new();\n\n reference_path.push(\"tests\");\n\n reference_path.push(\"data\");\n\n let mut actual_path = reference_path.clone();\n\n reference_path.push(format!(\"{}.png\", filename));\n\n actual_path.push(format!(\"{}.actual.png\", filename));\n\n let ref_image = image::open(&reference_path).unwrap();\n\n let ref_rgba = ref_image.to_rgba8();\n\n\n\n assert_eq!(img.dimensions(), ref_rgba.dimensions());\n\n img.save(&actual_path).unwrap();\n\n\n\n for (x, y, expected_color) in ref_rgba.enumerate_pixels() {\n\n let actual_color = img.get_pixel(x, y);\n\n if actual_color == expected_color {\n\n continue;\n\n } else if is_transparent(expected_color) && is_transparent(actual_color) {\n\n continue;\n\n } else {\n", "file_path": "src/tests.rs", "rank": 68, "score": 48643.346436026804 }, { "content": "fn blend_mode_to_blend_fn(mode: BlendMode) -> BlendFn {\n\n // TODO: Make these statically allocated\n\n match mode {\n\n BlendMode::Normal => Box::new(blend::normal),\n\n BlendMode::Multiply => Box::new(blend::multiply),\n\n BlendMode::Screen => Box::new(blend::screen),\n\n BlendMode::Overlay => Box::new(blend::overlay),\n\n BlendMode::Darken => Box::new(blend::darken),\n\n BlendMode::Lighten => Box::new(blend::lighten),\n\n BlendMode::ColorDodge => Box::new(blend::color_dodge),\n\n BlendMode::ColorBurn => Box::new(blend::color_burn),\n\n BlendMode::HardLight => Box::new(blend::hard_light),\n\n BlendMode::SoftLight => Box::new(blend::soft_light),\n\n BlendMode::Difference => Box::new(blend::difference),\n\n BlendMode::Exclusion => Box::new(blend::exclusion),\n\n BlendMode::Hue => Box::new(blend::hsl_hue),\n\n BlendMode::Saturation => Box::new(blend::hsl_saturation),\n\n BlendMode::Color => Box::new(blend::hsl_color),\n\n BlendMode::Luminosity => Box::new(blend::hsl_luminosity),\n\n BlendMode::Addition => Box::new(blend::addition),\n\n BlendMode::Subtract => Box::new(blend::subtract),\n\n BlendMode::Divide => Box::new(blend::divide),\n\n }\n\n}\n\n\n", "file_path": "src/file.rs", "rank": 69, "score": 42781.19159305885 }, { "content": "#[test]\n\nfn transparency() {\n\n let f = load_test_file(\"transparency\");\n\n\n\n assert_eq!(f.num_frames(), 2);\n\n assert_eq!(f.size(), (16, 16));\n\n assert_eq!(f.num_layers(), 2);\n\n assert_eq!(f.pixel_format(), PixelFormat::Rgba);\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"transparency_01\");\n\n compare_with_reference_image(f.frame(1).image(), \"transparency_02\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 70, "score": 40553.826768442006 }, { "content": "#[test]\n\nfn big() {\n\n let f = load_test_file(\"big\");\n\n\n\n assert_eq!(f.num_frames, 1);\n\n assert_eq!((f.width, f.height), (256, 256));\n\n assert_eq!(f.num_layers(), 1);\n\n assert_eq!(f.pixel_format, PixelFormat::Rgba);\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"big\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 71, "score": 40553.826768442006 }, { "content": "#[test]\n\nfn tilemap() {\n\n let f = load_test_file(\"tilemap\");\n\n let img = f.frame(0).image();\n\n assert_eq!(f.size(), (32, 32));\n\n let ts = f\n\n .tilesets()\n\n .get(&tileset::TilesetId::new(0))\n\n .expect(\"No tileset found\");\n\n assert_eq!(ts.name(), \"test_tileset\");\n\n\n\n compare_with_reference_image(img, \"tilemap\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 72, "score": 40553.826768442006 }, { "content": "#[test]\n\nfn background() {\n\n let f = load_test_file(\"background\");\n\n\n\n assert_eq!(f.num_frames, 1);\n\n assert_eq!((f.width, f.height), (256, 256));\n\n assert_eq!(f.num_layers(), 1);\n\n assert_eq!(f.pixel_format, PixelFormat::Rgba);\n\n println!(\"{:#?}\", f.layers);\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"background\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 73, "score": 40553.826768442006 }, { "content": "#[test]\n\nfn basic() {\n\n let f = load_test_file(\"basic-16x16\");\n\n assert_eq!(f.num_frames, 1);\n\n assert_eq!((f.width, f.height), (16, 16));\n\n assert_eq!(f.num_layers(), 1);\n\n assert_eq!(f.pixel_format, PixelFormat::Rgba);\n\n assert!(f.layer(0).flags().contains(LayerFlags::VISIBLE));\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"basic-16x16\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 74, "score": 40553.826768442006 }, { "content": "fn tile_slice<'a, T>(pixels: &'a [T], tile_size: &TileSize, tile_id: &TileId) -> &'a [T] {\n\n let pixels_per_tile = tile_size.pixels_per_tile() as usize;\n\n let start = pixels_per_tile * (tile_id.0 as usize);\n\n let end = start + pixels_per_tile;\n\n &pixels[start..end]\n\n}\n\n\n", "file_path": "src/file.rs", "rank": 75, "score": 39535.46980284207 }, { "content": "#[test]\n\nfn blend_divide() {\n\n blend_test(\"blend_divide\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 76, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_color() {\n\n blend_test(\"blend_color\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 77, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn layers_and_tags() {\n\n let f = load_test_file(\"layers_and_tags\");\n\n\n\n assert_eq!(f.num_frames, 4);\n\n assert_eq!((f.width, f.height), (16, 16));\n\n assert_eq!(f.num_layers(), 6);\n\n assert_eq!(f.pixel_format, PixelFormat::Rgba);\n\n assert_eq!(f.tags.len(), 3);\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"layers_and_tags_01\");\n\n compare_with_reference_image(f.frame(1).image(), \"layers_and_tags_02\");\n\n compare_with_reference_image(f.frame(2).image(), \"layers_and_tags_03\");\n\n compare_with_reference_image(f.frame(3).image(), \"layers_and_tags_04\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 78, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_hue() {\n\n blend_test(\"blend_hue\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 79, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn single_layer() {\n\n let f = load_test_file(\"layers_and_tags\");\n\n\n\n assert_eq!(f.num_frames, 4);\n\n assert_eq!(f.num_layers(), 6);\n\n assert_eq!(f.layer_by_name(\"Layer 1\").map(|l| l.id()), Some(1));\n\n\n\n compare_with_reference_image(f.layer_image(2, 1), \"single_layer\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 80, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn linked_cels() {\n\n let f = load_test_file(\"linked_cels\");\n\n\n\n assert_eq!(f.num_frames, 3);\n\n assert_eq!(f.num_layers(), 3);\n\n //assert_eq!(f.named_layer(\"Layer 1\").map(|l| l.id()), Some(1));\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"linked_cels_01\");\n\n compare_with_reference_image(f.frame(1).image(), \"linked_cels_02\");\n\n compare_with_reference_image(f.frame(2).image(), \"linked_cels_03\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 81, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn test_normal() {\n\n let back = Rgba([0, 205, 249, 255]);\n\n let front = Rgba([237, 118, 20, 255]);\n\n let res = normal(back, front, 128);\n\n assert_eq!(Rgba([118, 162, 135, 255]), res);\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 82, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_exclusion() {\n\n blend_test(\"blend_exclusion\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 83, "score": 38972.81119769938 }, { "content": "fn main() {\n\n let config = Config {\n\n width: 64,\n\n height: 64,\n\n border_padding: 0,\n\n rectangle_padding: 1,\n\n };\n\n\n\n let mut packer = Packer::new(config);\n\n\n\n let basedir = Path::new(\"examples\").join(\"atlas\");\n\n\n\n let mut sprites: Vec<SpriteInfo> = Vec::new();\n\n let mut images: Vec<ImageInfo> = Vec::new();\n\n\n\n // Place all the sprites\n\n for basename in &[\"big\", \"small\"] {\n\n let file = format!(\"{}.aseprite\", basename);\n\n let ase = AsepriteFile::read_file(&basedir.join(&file)).unwrap();\n\n let (width, height) = ase.size();\n", "file_path": "examples/atlas/main.rs", "rank": 84, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn tileset_export() {\n\n let f = load_test_file(\"tileset\");\n\n let tileset = f\n\n .tilesets()\n\n .get(&tileset::TilesetId::new(0))\n\n .expect(\"No tileset found\");\n\n let img = f.tileset_image(tileset.id()).unwrap();\n\n\n\n compare_with_reference_image(img, \"tileset\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 85, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_saturation() {\n\n blend_test(\"blend_saturation\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 86, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_subtract() {\n\n blend_test(\"blend_subtract\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 87, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_screen() {\n\n let f = load_test_file(\"blend_screen\");\n\n\n\n assert_eq!(f.num_frames, 1);\n\n assert_eq!((f.width, f.height), (256, 256));\n\n assert_eq!(f.num_layers(), 2);\n\n assert_eq!(f.pixel_format, PixelFormat::Rgba);\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"blend_screen\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 88, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_normal() {\n\n let f = load_test_file(\"blend_normal\");\n\n\n\n assert_eq!(f.num_frames, 1);\n\n assert_eq!((f.width, f.height), (256, 256));\n\n assert_eq!(f.num_layers(), 2);\n\n assert_eq!(f.pixel_format, PixelFormat::Rgba);\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"blend_normal\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 89, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_luminosity() {\n\n blend_test(\"blend_luminosity\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 90, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_overlay() {\n\n let f = load_test_file(\"blend_overlay\");\n\n\n\n assert_eq!(f.num_frames, 1);\n\n assert_eq!((f.width, f.height), (256, 256));\n\n assert_eq!(f.num_layers(), 2);\n\n assert_eq!(f.pixel_format, PixelFormat::Rgba);\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"blend_overlay\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 91, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_addition() {\n\n blend_test(\"blend_addition\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 92, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_multiply() {\n\n let f = load_test_file(\"blend_multiply\");\n\n\n\n assert_eq!(f.num_frames, 1);\n\n assert_eq!((f.width, f.height), (256, 256));\n\n assert_eq!(f.num_layers(), 2);\n\n assert_eq!(f.pixel_format, PixelFormat::Rgba);\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"blend_multiply\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 93, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn test_blend8() {\n\n assert_eq!(blend8(80, 50, 0), 80);\n\n assert_eq!(blend8(80, 50, 128), 65);\n\n assert_eq!(blend8(80, 50, 255), 50);\n\n assert_eq!(blend8(80, 150, 128), 80 + (70 / 2));\n\n assert_eq!(blend8(80, 150, 51), 80 + (70 / 5));\n\n assert_eq!(blend8(80, 150, 36), 80 + (70 / 7));\n\n\n\n //assert_eq!(blend8(0, 237, 128), 0);\n\n}\n\n\n", "file_path": "src/blend.rs", "rank": 94, "score": 38972.81119769938 }, { "content": "fn main() {\n\n let basedir = Path::new(\"examples\").join(\"basic\");\n\n let file = basedir.join(\"input.aseprite\");\n\n let ase = AsepriteFile::read_file(&file).unwrap();\n\n for frame in 0..ase.num_frames() {\n\n let output = format!(\"output_{}.png\", frame);\n\n let outpath = basedir.join(&output);\n\n let img = ase.frame(frame).image();\n\n img.save_with_format(outpath, ImageFormat::Png).unwrap();\n\n }\n\n}\n", "file_path": "examples/basic/main.rs", "rank": 95, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn cels_basic() {\n\n use std::path::Path;\n\n let path = Path::new(\"./tests/data/basic-16x16.aseprite\");\n\n let ase = AsepriteFile::read_file(&path).unwrap();\n\n\n\n let layer0 = ase.layer(0);\n\n let cel1 = layer0.frame(0);\n\n let _cel2 = ase.frame(0).layer(0);\n\n\n\n let _image = cel1.image();\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 96, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_darken() {\n\n let f = load_test_file(\"blend_darken\");\n\n\n\n assert_eq!(f.num_frames, 1);\n\n assert_eq!((f.width, f.height), (256, 256));\n\n assert_eq!(f.num_layers(), 2);\n\n assert_eq!(f.pixel_format, PixelFormat::Rgba);\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"blend_darken\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 97, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_lighten() {\n\n let f = load_test_file(\"blend_lighten\");\n\n\n\n assert_eq!(f.num_frames, 1);\n\n assert_eq!((f.width, f.height), (256, 256));\n\n assert_eq!(f.num_layers(), 2);\n\n assert_eq!(f.pixel_format, PixelFormat::Rgba);\n\n\n\n compare_with_reference_image(f.frame(0).image(), \"blend_lighten\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 98, "score": 38972.81119769938 }, { "content": "#[test]\n\nfn blend_difference() {\n\n blend_test(\"blend_difference\");\n\n}\n\n\n", "file_path": "src/tests.rs", "rank": 99, "score": 38972.81119769938 } ]
Rust
modbus_server/src/server_status.rs
guozhaohui/modbus_simulator
3707d068a4ea10925a25f489516d9721b67f5ba0
use rand::Rng; use modbus_protocol::exception_code::{Result, Error, ExceptionCode}; use modbus_protocol::coils::Coil; use modbus_protocol::requests::Requests; enum ModbusRegisterType{ Coil = 0x01, DiscreteInput = 0x02, InputRegister = 0x04, HoldingRegister = 0x03, } pub struct StatusInfo { capacity: u16, coils : Vec<Coil>, discrete_inputs: Vec<Coil>, input_registers: Vec<u16>, holding_registers: Vec<u16>, } impl StatusInfo { pub fn create(size: usize) -> StatusInfo { let mut coils = vec![Coil::Off; size]; let discrete_inputs : Vec<Coil> = (0..size).map(|_| Coil::from(rand::thread_rng().gen_bool(0.5))).collect(); let input_registers: Vec<u16> = (0..size).map(|_| rand::thread_rng().gen_range(0..100)).collect(); let holding_registers = vec![0u16; size]; for i in 0..size { coils[i] = Coil::from(rand::thread_rng().gen_bool(0.5)); } StatusInfo{ capacity: size as u16, coils: coils, discrete_inputs: discrete_inputs, input_registers: input_registers, holding_registers: holding_registers} } fn convert_addr_to_index(self: &Self, register_type: ModbusRegisterType, addr: u16) -> Result<u16> { match register_type { ModbusRegisterType::Coil => { if addr > self.capacity { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(addr - (0 * self.capacity)); } ModbusRegisterType::DiscreteInput => { if addr < self.capacity || addr > 2 * self.capacity { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(addr - (1 * self.capacity)); } ModbusRegisterType::InputRegister => { if addr < 2 * self.capacity || addr > 3 * self.capacity { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(addr - (2 * self.capacity)); } ModbusRegisterType::HoldingRegister => { if addr < 3 * self.capacity { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(addr - (3 * self.capacity)); } } } fn check_range(self: &Self, register_type: ModbusRegisterType, addr: u16, count: u16) -> Result<()> { match register_type { ModbusRegisterType::Coil => { if (addr + count + 1) as usize > self.coils.len() { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(()); } ModbusRegisterType::DiscreteInput => { if (addr + count + 1) as usize > self.discrete_inputs.len() { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(()); } ModbusRegisterType::InputRegister => { if (addr + count + 1) as usize > self.input_registers.len() { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(()); } ModbusRegisterType::HoldingRegister => { if (addr + count + 1) as usize > self.holding_registers.len() { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(()); } } } } impl Requests for StatusInfo { fn read_coils(self: &mut Self, addr: u16, count: u16) -> Result<Vec<Coil>> { let address = self.convert_addr_to_index(ModbusRegisterType::Coil, addr)?; self.check_range(ModbusRegisterType::Coil, address, count)?; let mut coils: Vec<Coil> = vec![Coil::Off; count as usize]; coils.clone_from_slice(&self.coils[(address) as usize..(address+count) as usize]); Ok(coils) } fn read_discrete_inputs(self: &mut Self, addr: u16, count: u16) -> Result<Vec<Coil>> { let address = self.convert_addr_to_index(ModbusRegisterType::DiscreteInput, addr)?; self.check_range(ModbusRegisterType::DiscreteInput, address, count)?; let mut coils: Vec<Coil> = vec![Coil::Off; count as usize]; coils.clone_from_slice(&self.discrete_inputs[address as usize..(address+count) as usize]); Ok(coils) } fn read_holding_registers(self: &mut Self, addr: u16, count: u16) -> Result<Vec<u16>> { let address = self.convert_addr_to_index(ModbusRegisterType::HoldingRegister, addr)?; self.check_range(ModbusRegisterType::HoldingRegister, address, count)?; let mut registers: Vec<u16> = vec![0u16; count as usize]; registers.clone_from_slice(&self.holding_registers[address as usize..(address+count) as usize]); Ok(registers) } fn read_input_registers(self: &mut Self, addr: u16, count: u16) -> Result<Vec<u16>> { let address = self.convert_addr_to_index(ModbusRegisterType::InputRegister, addr)?; self.check_range(ModbusRegisterType::InputRegister, address, count)?; let mut registers: Vec<u16> = vec![0u16; count as usize]; registers.clone_from_slice(&self.input_registers[address as usize..(address+count) as usize]); Ok(registers) } fn write_single_coil(self: &mut Self, addr: u16, value: Coil) -> Result<()> { let address = self.convert_addr_to_index(ModbusRegisterType::Coil, addr)?; self.check_range(ModbusRegisterType::Coil, address, 1)?; self.coils[address as usize] = value; return Ok(()) } fn write_single_register(self: &mut Self, addr: u16, value: u16) -> Result<()> { let address = self.convert_addr_to_index(ModbusRegisterType::HoldingRegister, addr)?; self.check_range(ModbusRegisterType::HoldingRegister, address, 1)?; self.holding_registers[address as usize] = value; return Ok(()) } fn write_multiple_coils(self: &mut Self, addr: u16, values: &[Coil]) -> Result<()> { let address = self.convert_addr_to_index(ModbusRegisterType::Coil, addr)?; let n = values.len(); self.check_range(ModbusRegisterType::Coil, address, n as u16)?; for i in 0..n { self.coils[i + address as usize] = values[i]; } return Ok(()) } fn write_multiple_registers(self: &mut Self, addr: u16, values: &[u16]) -> Result<()> { let address = self.convert_addr_to_index(ModbusRegisterType::HoldingRegister, addr)?; let n = values.len(); self.check_range(ModbusRegisterType::HoldingRegister, address, n as u16)?; for i in 0..n { self.holding_registers[i + address as usize] = values[i]; } return Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_server_initializaion(){ let mut status_info = StatusInfo::create(10usize); match status_info.read_coils(7u16, 2u16) { Ok(_coils) => { assert!(true); }, Err(_e) => { assert!(false); }, } match status_info.read_discrete_inputs(17u16, 2u16) { Ok(_coils) => { assert!(true); }, Err(_e) => { assert!(false); }, } match status_info.read_input_registers(26u16, 3u16) { Ok(_registers) => { assert!(true); }, Err(_e) => { assert!(false); }, } match status_info.read_holding_registers(36u16, 3u16) { Ok(registers) => { assert_eq!(registers, [0u16, 0u16, 0u16]); }, Err(_e) => { assert!(false); }, } } #[test] fn test_server_invalid_param1(){ let mut status_info = StatusInfo::create(10usize); match status_info.read_coils(7u16, 3u16) { Ok(_coils) => { assert!(false); }, Err(_e) => { assert!(true); }, } match status_info.read_holding_registers(37u16, 3u16) { Ok(_registers) => { assert!(false); }, Err(_e) => { assert!(true); }, } } #[test] fn test_server_writeread(){ let mut status_info = StatusInfo::create(10usize); let coils = vec![Coil::On, Coil::Off, Coil::On]; match status_info.write_multiple_coils(6u16, &coils) { Ok(()) => { assert!(true); }, Err(_e) => { assert!(false); }, } match status_info.read_coils(6u16, 3u16) { Ok(_coils) => { assert_eq!(_coils, coils); }, Err(_e) => { assert!(false); }, } let regs = vec![1u16, 2u16, 3u16]; match status_info.write_multiple_registers(36u16, &regs) { Ok(()) => { assert!(true); }, Err(_e) => { assert!(false); }, } match status_info.read_holding_registers(36u16, 3u16) { Ok(registers) => { assert_eq!(registers, regs); }, Err(_e) => { assert!(true); }, } } }
use rand::Rng; use modbus_protocol::exception_code::{Result, Error, ExceptionCode}; use modbus_protocol::coils::Coil; use modbus_protocol::requests::Requests; enum ModbusRegisterType{ Coil = 0x01, DiscreteInput = 0x02, InputRegister = 0x04, HoldingRegister = 0x03, } pub struct StatusInfo { capacity: u16, coils : Vec<Coil>, discrete_inputs: Vec<Coil>, input_registers: Vec<u16>, holding_registers: Vec<u16>, } impl StatusInfo { pub fn create(size: usize) -> StatusInfo { let mut coils = vec![Coil::Off; size]; let discrete_inputs : Vec<Coil> = (0..size).map(|_| Coil::from(ran
fn convert_addr_to_index(self: &Self, register_type: ModbusRegisterType, addr: u16) -> Result<u16> { match register_type { ModbusRegisterType::Coil => { if addr > self.capacity { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(addr - (0 * self.capacity)); } ModbusRegisterType::DiscreteInput => { if addr < self.capacity || addr > 2 * self.capacity { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(addr - (1 * self.capacity)); } ModbusRegisterType::InputRegister => { if addr < 2 * self.capacity || addr > 3 * self.capacity { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(addr - (2 * self.capacity)); } ModbusRegisterType::HoldingRegister => { if addr < 3 * self.capacity { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(addr - (3 * self.capacity)); } } } fn check_range(self: &Self, register_type: ModbusRegisterType, addr: u16, count: u16) -> Result<()> { match register_type { ModbusRegisterType::Coil => { if (addr + count + 1) as usize > self.coils.len() { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(()); } ModbusRegisterType::DiscreteInput => { if (addr + count + 1) as usize > self.discrete_inputs.len() { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(()); } ModbusRegisterType::InputRegister => { if (addr + count + 1) as usize > self.input_registers.len() { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(()); } ModbusRegisterType::HoldingRegister => { if (addr + count + 1) as usize > self.holding_registers.len() { return Err(Error::Exception(ExceptionCode::IllegalDataAddress)); } return Ok(()); } } } } impl Requests for StatusInfo { fn read_coils(self: &mut Self, addr: u16, count: u16) -> Result<Vec<Coil>> { let address = self.convert_addr_to_index(ModbusRegisterType::Coil, addr)?; self.check_range(ModbusRegisterType::Coil, address, count)?; let mut coils: Vec<Coil> = vec![Coil::Off; count as usize]; coils.clone_from_slice(&self.coils[(address) as usize..(address+count) as usize]); Ok(coils) } fn read_discrete_inputs(self: &mut Self, addr: u16, count: u16) -> Result<Vec<Coil>> { let address = self.convert_addr_to_index(ModbusRegisterType::DiscreteInput, addr)?; self.check_range(ModbusRegisterType::DiscreteInput, address, count)?; let mut coils: Vec<Coil> = vec![Coil::Off; count as usize]; coils.clone_from_slice(&self.discrete_inputs[address as usize..(address+count) as usize]); Ok(coils) } fn read_holding_registers(self: &mut Self, addr: u16, count: u16) -> Result<Vec<u16>> { let address = self.convert_addr_to_index(ModbusRegisterType::HoldingRegister, addr)?; self.check_range(ModbusRegisterType::HoldingRegister, address, count)?; let mut registers: Vec<u16> = vec![0u16; count as usize]; registers.clone_from_slice(&self.holding_registers[address as usize..(address+count) as usize]); Ok(registers) } fn read_input_registers(self: &mut Self, addr: u16, count: u16) -> Result<Vec<u16>> { let address = self.convert_addr_to_index(ModbusRegisterType::InputRegister, addr)?; self.check_range(ModbusRegisterType::InputRegister, address, count)?; let mut registers: Vec<u16> = vec![0u16; count as usize]; registers.clone_from_slice(&self.input_registers[address as usize..(address+count) as usize]); Ok(registers) } fn write_single_coil(self: &mut Self, addr: u16, value: Coil) -> Result<()> { let address = self.convert_addr_to_index(ModbusRegisterType::Coil, addr)?; self.check_range(ModbusRegisterType::Coil, address, 1)?; self.coils[address as usize] = value; return Ok(()) } fn write_single_register(self: &mut Self, addr: u16, value: u16) -> Result<()> { let address = self.convert_addr_to_index(ModbusRegisterType::HoldingRegister, addr)?; self.check_range(ModbusRegisterType::HoldingRegister, address, 1)?; self.holding_registers[address as usize] = value; return Ok(()) } fn write_multiple_coils(self: &mut Self, addr: u16, values: &[Coil]) -> Result<()> { let address = self.convert_addr_to_index(ModbusRegisterType::Coil, addr)?; let n = values.len(); self.check_range(ModbusRegisterType::Coil, address, n as u16)?; for i in 0..n { self.coils[i + address as usize] = values[i]; } return Ok(()) } fn write_multiple_registers(self: &mut Self, addr: u16, values: &[u16]) -> Result<()> { let address = self.convert_addr_to_index(ModbusRegisterType::HoldingRegister, addr)?; let n = values.len(); self.check_range(ModbusRegisterType::HoldingRegister, address, n as u16)?; for i in 0..n { self.holding_registers[i + address as usize] = values[i]; } return Ok(()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_server_initializaion(){ let mut status_info = StatusInfo::create(10usize); match status_info.read_coils(7u16, 2u16) { Ok(_coils) => { assert!(true); }, Err(_e) => { assert!(false); }, } match status_info.read_discrete_inputs(17u16, 2u16) { Ok(_coils) => { assert!(true); }, Err(_e) => { assert!(false); }, } match status_info.read_input_registers(26u16, 3u16) { Ok(_registers) => { assert!(true); }, Err(_e) => { assert!(false); }, } match status_info.read_holding_registers(36u16, 3u16) { Ok(registers) => { assert_eq!(registers, [0u16, 0u16, 0u16]); }, Err(_e) => { assert!(false); }, } } #[test] fn test_server_invalid_param1(){ let mut status_info = StatusInfo::create(10usize); match status_info.read_coils(7u16, 3u16) { Ok(_coils) => { assert!(false); }, Err(_e) => { assert!(true); }, } match status_info.read_holding_registers(37u16, 3u16) { Ok(_registers) => { assert!(false); }, Err(_e) => { assert!(true); }, } } #[test] fn test_server_writeread(){ let mut status_info = StatusInfo::create(10usize); let coils = vec![Coil::On, Coil::Off, Coil::On]; match status_info.write_multiple_coils(6u16, &coils) { Ok(()) => { assert!(true); }, Err(_e) => { assert!(false); }, } match status_info.read_coils(6u16, 3u16) { Ok(_coils) => { assert_eq!(_coils, coils); }, Err(_e) => { assert!(false); }, } let regs = vec![1u16, 2u16, 3u16]; match status_info.write_multiple_registers(36u16, &regs) { Ok(()) => { assert!(true); }, Err(_e) => { assert!(false); }, } match status_info.read_holding_registers(36u16, 3u16) { Ok(registers) => { assert_eq!(registers, regs); }, Err(_e) => { assert!(true); }, } } }
d::thread_rng().gen_bool(0.5))).collect(); let input_registers: Vec<u16> = (0..size).map(|_| rand::thread_rng().gen_range(0..100)).collect(); let holding_registers = vec![0u16; size]; for i in 0..size { coils[i] = Coil::from(rand::thread_rng().gen_bool(0.5)); } StatusInfo{ capacity: size as u16, coils: coils, discrete_inputs: discrete_inputs, input_registers: input_registers, holding_registers: holding_registers} }
function_block-function_prefixed
[ { "content": "pub fn unpack_bits(bytes: &[u8], count: u16) -> Vec<Coil> {\n\n let mut res = Vec::with_capacity(count as usize);\n\n for i in 0..count {\n\n if (bytes[(i / 8u16) as usize] >> (i % 8)) & 0b1 > 0 {\n\n res.push(Coil::On);\n\n } else {\n\n res.push(Coil::Off);\n\n }\n\n }\n\n res\n\n}\n\n\n", "file_path": "modbus_protocol/src/utils.rs", "rank": 0, "score": 128071.16418200437 }, { "content": "pub fn handle_client(mut stream: TcpStream, _tid: u16, _uid: u8,\n\n shared_status: Arc<Mutex<StatusInfo>>,\n\n peer_addr: &SocketAddr){\n\n let data = &mut [0 as u8; MODBUS_MAX_PACKET_SIZE];\n\n loop {\n\n match stream.read(data) {\n\n Err(_) => {\n\n log::info!(\"connection with {} terminated\", peer_addr.to_string());\n\n match stream.shutdown(Shutdown::Both) {\n\n Err(e) => {\n\n log::warn!(\"connection with {} shutdown failed, {}\",\n\n peer_addr.to_string(), e);\n\n },\n\n Ok(_) => {\n\n }\n\n }\n\n break;\n\n },\n\n Ok(size) => {\n\n if size > 0 {\n", "file_path": "modbus_server/src/tcp.rs", "rank": 1, "score": 120849.67555574648 }, { "content": "pub fn pack_bits(bits: &[Coil]) -> Vec<u8> {\n\n let bitcount = bits.len();\n\n let packed_size = bitcount / 8 + if bitcount % 8 > 0 { 1 } else { 0 };\n\n let mut res = vec![0; packed_size];\n\n for (i, b) in bits.iter().enumerate() {\n\n let v = match *b {\n\n Coil::On => 1u8,\n\n Coil::Off => 0u8,\n\n };\n\n res[(i / 8) as usize] |= v << (i % 8);\n\n }\n\n res\n\n}\n\n\n", "file_path": "modbus_protocol/src/utils.rs", "rank": 2, "score": 104618.12061120423 }, { "content": "pub fn unpack_bytes(data: &[u16]) -> Vec<u8> {\n\n let size = data.len();\n\n let mut res = Vec::with_capacity(size * 2);\n\n for b in data {\n\n res.push((*b >> 8 & 0xff) as u8);\n\n res.push((*b & 0xff) as u8);\n\n }\n\n res\n\n}\n\n\n", "file_path": "modbus_protocol/src/utils.rs", "rank": 3, "score": 104617.75043310375 }, { "content": "pub fn handle_pdu_data(stream: &mut TcpStream, status: &mut StatusInfo, mbap_header: Header, data: &mut [u8]){\n\n let mut pdu_data = Cursor::new(data.borrow_mut());\n\n let function_code = pdu_data.read_u8().unwrap();\n\n let mut buff = vec![0; MODBUS_HEADER_SIZE];\n\n match FromPrimitive::from_u8(function_code) {\n\n Some(FunctionCode::ReadCoils) =>{\n\n let addr= pdu_data.read_u16::<BigEndian>().unwrap();\n\n let count = pdu_data.read_u16::<BigEndian>().unwrap();\n\n log::info!(\"request ReadCoils addr: {}; count: {}\", addr, count);\n\n match status.read_coils(addr, count) {\n\n Ok(coils) => {\n\n buff.write_u8(function_code).unwrap();\n\n let bits = utils::pack_bits(&coils);\n\n buff.write_u8(bits.len() as u8).unwrap();\n\n for v in bits {\n\n buff.write_u8(v).unwrap();\n\n }\n\n },\n\n Err(e) => {\n\n log::info!(\"something wrong {}\", e);\n", "file_path": "modbus_server/src/tcp.rs", "rank": 4, "score": 102898.29971287717 }, { "content": "fn handle_status_error(function_code: u8, e: Error, buff: &mut [u8]) {\n\n let mut start = Cursor::new(buff.borrow_mut());\n\n start.write_u8(function_code + 0x80).unwrap();\n\n match e {\n\n Error::Exception(code) => {\n\n start.write_u8(code as u8).unwrap();\n\n },\n\n _ => (),\n\n }\n\n}\n", "file_path": "modbus_server/src/tcp.rs", "rank": 5, "score": 102752.55232308926 }, { "content": "pub fn pack_bytes(bytes: &[u8]) -> Result<Vec<u16>> {\n\n let size = bytes.len();\n\n // check if we can create u16s from bytes by packing two u8s together without rest\n\n if size % 2 != 0 {\n\n return Err(Error::InvalidData(Reason::BytecountNotEven));\n\n }\n\n\n\n let mut res = Vec::with_capacity(size / 2 + 1);\n\n let mut rdr = Cursor::new(bytes);\n\n for _ in 0..size / 2 {\n\n res.push(rdr.read_u16::<BigEndian>()?);\n\n }\n\n Ok(res)\n\n}\n\n\n", "file_path": "modbus_protocol/src/utils.rs", "rank": 6, "score": 99943.2513696251 }, { "content": "fn handle_error(e: Error) {\n\n log::info!(\"failed with {}\", e);\n\n}\n\n\n", "file_path": "modbus_client/src/main.rs", "rank": 7, "score": 92009.47219454589 }, { "content": "fn write_response(stream: &mut TcpStream, header: Header, buff: &mut [u8]) {\n\n if buff.is_empty() {\n\n return;\n\n }\n\n\n\n if buff.len() > MODBUS_MAX_PACKET_SIZE {\n\n return;\n\n }\n\n\n\n let reply_header = Header::new(header.tid,\n\n header.uid,\n\n buff.len() as u16);\n\n\n\n let head_buff = reply_header.pack();\n\n let mut start = Cursor::new(buff.borrow_mut());\n\n match start.write_all(&head_buff.unwrap()) {\n\n Ok(_s) => {\n\n },\n\n Err(_e) => {\n\n },\n\n }\n\n match stream.write_all(buff) {\n\n Ok(_s) => {\n\n log::debug!(\"send reply message\");\n\n },\n\n Err(_e) => {\n\n },\n\n }\n\n}\n\n\n", "file_path": "modbus_server/src/tcp.rs", "rank": 8, "score": 74077.54254896202 }, { "content": "fn handle_request(client: &mut Transport, fun: &ModbusFunction) {\n\n match *fun {\n\n ModbusFunction::ReadCoils(addr, qtty) => {\n\n match client.read_coils(addr, qtty) {\n\n Err(e) =>{\n\n handle_error(e);\n\n },\n\n Ok(_) => {\n\n log::info!(\"Succeeded\");\n\n }\n\n };\n\n }\n\n ModbusFunction::ReadDiscreteInputs(addr, qtty) => {\n\n match client.read_discrete_inputs(addr, qtty) {\n\n Err(e) =>{\n\n handle_error(e);\n\n },\n\n Ok(_) => {\n\n log::info!(\"Succeeded\");\n\n }\n", "file_path": "modbus_client/src/main.rs", "rank": 9, "score": 68224.73960402023 }, { "content": "#[derive(Debug, PartialEq)]\n\nstruct Header {\n\n tid: u16,\n\n pid: u16,\n\n len: u16,\n\n uid: u8,\n\n}\n\n\n\nimpl Header {\n\n fn new(transport: &mut Transport, len: u16) -> Header {\n\n Header {\n\n tid: transport.new_tid(),\n\n pid: MODBUS_PROTOCOL_TCP,\n\n len: len - MODBUS_HEADER_SIZE as u16,\n\n uid: transport.uid,\n\n }\n\n }\n\n\n\n fn pack(&self) -> Result<Vec<u8>> {\n\n let mut buff = vec![];\n\n buff.write_u16::<BigEndian>(self.tid)?;\n", "file_path": "modbus_client/src/tcp.rs", "rank": 10, "score": 50299.94633372931 }, { "content": "fn main() {\n\n\n\n #[cfg(feature=\"log4rs_yaml\")]\n\n log4rs::init_file(\"modbus_client_log.yaml\", Default::default()).unwrap();\n\n #[cfg(not(feature=\"log4rs_yaml\"))]\n\n {\n\n let logfile = log4rs::append::file::FileAppender::builder()\n\n .encoder(Box::new(log4rs::encode::pattern::PatternEncoder::new(\"{d} - {m}{n}\")))\n\n .build(\"log/modbus_client.log\").unwrap();\n\n\n\n let log4rs_config = log4rs::config::Config::builder()\n\n .appender(log4rs::config::Appender::builder().build(\"logfile\", Box::new(logfile)))\n\n .build(log4rs::config::Root::builder()\n\n .appender(\"logfile\")\n\n .build(log::LevelFilter::Info)).unwrap();\n\n\n\n log4rs::init_config(log4rs_config).unwrap();\n\n }\n\n let matches = App::new(\"client\")\n\n .author(\"Zhaohui GUO <[email protected]>\")\n", "file_path": "modbus_client/src/main.rs", "rank": 11, "score": 48552.88897334824 }, { "content": "fn main() {\n\n let mut children = vec![];\n\n let mut uid: u8 = 0;\n\n let mut tid: u16 = 0;\n\n let mut size: usize = 0;\n\n\n\n #[cfg(feature=\"log4rs_yaml\")]\n\n log4rs::init_file(\"modbus_server_log.yaml\", Default::default()).unwrap();\n\n #[cfg(not(feature=\"log4rs_yaml\"))]\n\n {\n\n let logfile = log4rs::append::file::FileAppender::builder()\n\n .encoder(Box::new(log4rs::encode::pattern::PatternEncoder::new(\"{d} - {T} - {m}{n}\")))\n\n .build(\"log/modbus_server.log\").unwrap();\n\n\n\n let log4rs_config = log4rs::config::Config::builder()\n\n .appender(log4rs::config::Appender::builder().build(\"logfile\", Box::new(logfile)))\n\n .build(log4rs::config::Root::builder()\n\n .appender(\"logfile\")\n\n .build(log::LevelFilter::Info)).unwrap();\n\n\n", "file_path": "modbus_server/src/main.rs", "rank": 12, "score": 48552.88897334824 }, { "content": "#[test]\n\nfn test_unpack_bits() {\n\n // assert_eq!(unpack_bits(, 0), &[]);\n\n assert_eq!(unpack_bits(&[0, 0], 0), &[]);\n\n assert_eq!(unpack_bits(&[0b1], 1), &[Coil::On]);\n\n assert_eq!(unpack_bits(&[0b01], 2), &[Coil::On, Coil::Off]);\n\n assert_eq!(unpack_bits(&[0b10], 2), &[Coil::Off, Coil::On]);\n\n assert_eq!(unpack_bits(&[0b101], 3), &[Coil::On, Coil::Off, Coil::On]);\n\n assert_eq!(unpack_bits(&[0xff, 0b11], 10), &[Coil::On; 10]);\n\n}\n\n\n", "file_path": "modbus_protocol/src/utils.rs", "rank": 14, "score": 46785.907096051276 }, { "content": "#[test]\n\nfn test_pack_bits() {\n\n assert_eq!(pack_bits(&[]), &[]);\n\n assert_eq!(pack_bits(&[Coil::On]), &[1]);\n\n assert_eq!(pack_bits(&[Coil::Off]), &[0]);\n\n assert_eq!(pack_bits(&[Coil::On, Coil::Off]), &[1]);\n\n assert_eq!(pack_bits(&[Coil::Off, Coil::On]), &[2]);\n\n assert_eq!(pack_bits(&[Coil::On, Coil::On]), &[3]);\n\n assert_eq!(pack_bits(&[Coil::On; 8]), &[255]);\n\n assert_eq!(pack_bits(&[Coil::On; 9]), &[255, 1]);\n\n assert_eq!(pack_bits(&[Coil::Off; 8]), &[0]);\n\n assert_eq!(pack_bits(&[Coil::Off; 9]), &[0, 0]);\n\n}\n\n\n", "file_path": "modbus_protocol/src/utils.rs", "rank": 15, "score": 46785.907096051276 }, { "content": "#[test]\n\nfn test_unpack_bytes() {\n\n assert_eq!(unpack_bytes(&[]), &[]);\n\n assert_eq!(unpack_bytes(&[0]), &[0, 0]);\n\n assert_eq!(unpack_bytes(&[1]), &[0, 1]);\n\n assert_eq!(unpack_bytes(&[0xffff]), &[0xff, 0xff]);\n\n assert_eq!(unpack_bytes(&[0xffff, 0x0001]), &[0xff, 0xff, 0x00, 0x01]);\n\n assert_eq!(unpack_bytes(&[0xffff, 0x1001]), &[0xff, 0xff, 0x10, 0x01]);\n\n}\n\n\n", "file_path": "modbus_protocol/src/utils.rs", "rank": 16, "score": 46785.907096051276 }, { "content": "#[test]\n\nfn test_pack_bytes() {\n\n assert_eq!(pack_bytes(&[]).unwrap(), &[]);\n\n assert_eq!(pack_bytes(&[0, 0]).unwrap(), &[0]);\n\n assert_eq!(pack_bytes(&[0, 1]).unwrap(), &[1]);\n\n assert_eq!(pack_bytes(&[1, 0]).unwrap(), &[256]);\n\n assert_eq!(pack_bytes(&[1, 1]).unwrap(), &[257]);\n\n assert_eq!(pack_bytes(&[0, 1, 0, 2]).unwrap(), &[1, 2]);\n\n assert_eq!(pack_bytes(&[1, 1, 1, 2]).unwrap(), &[257, 258]);\n\n assert!(pack_bytes(&[1]).is_err());\n\n assert!(pack_bytes(&[1, 2, 3]).is_err());\n\n}\n", "file_path": "modbus_protocol/src/utils.rs", "rank": 17, "score": 46785.907096051276 }, { "content": "pub trait Requests {\n\n fn read_discrete_inputs(&mut self, address: u16, quantity: u16) -> Result<Vec<Coil>>;\n\n\n\n fn read_coils(&mut self, address: u16, quantity: u16) -> Result<Vec<Coil>>;\n\n\n\n fn write_single_coil(&mut self, address: u16, value: Coil) -> Result<()>;\n\n\n\n fn write_multiple_coils(&mut self, address: u16, coils: &[Coil]) -> Result<()>;\n\n\n\n fn read_input_registers(&mut self, address: u16, quantity: u16) -> Result<Vec<u16>>;\n\n\n\n fn read_holding_registers(&mut self, address: u16, quantity: u16) -> Result<Vec<u16>>;\n\n\n\n fn write_single_register(&mut self, address: u16, value: u16) -> Result<()>;\n\n\n\n fn write_multiple_registers(&mut self, address: u16, values: &[u16]) -> Result<()>;\n\n}\n", "file_path": "modbus_protocol/src/requests.rs", "rank": 18, "score": 45485.48330513471 }, { "content": "/// Single bit status values, used in read or write coil functions\n\nuse super::exception_code::Error;\n\nuse super::exception_code::Result;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum Coil {\n\n On,\n\n Off,\n\n}\n\n\n\nimpl Coil {\n\n pub fn code(self) -> u16 {\n\n match self {\n\n Coil::On => 0xff00,\n\n Coil::Off => 0x0000,\n\n }\n\n }\n\n pub fn from_u16(code: u16) -> Result<Coil> {\n\n match code {\n", "file_path": "modbus_protocol/src/coils.rs", "rank": 19, "score": 30545.414396018805 }, { "content": " } else {\n\n Err(Error::ParseCoilError)\n\n }\n\n }\n\n}\n\n\n\nimpl From<bool> for Coil {\n\n fn from(b: bool) -> Coil {\n\n if b {\n\n Coil::On\n\n } else {\n\n Coil::Off\n\n }\n\n }\n\n}\n\n\n\nimpl std::ops::Not for Coil {\n\n type Output = Coil;\n\n\n\n fn not(self) -> Coil {\n\n match self {\n\n Coil::On => Coil::Off,\n\n Coil::Off => Coil::On,\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "modbus_protocol/src/coils.rs", "rank": 20, "score": 30540.27205310455 }, { "content": " 0xff00 => {\n\n Ok(Coil::On)\n\n }\n\n 0x0000 => {\n\n Ok(Coil::Off)\n\n }\n\n _ => {\n\n Err(Error::ParseCoilError)\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for Coil {\n\n type Err = Error;\n\n fn from_str(s: &str) -> Result<Coil> {\n\n if s == \"On\" {\n\n Ok(Coil::On)\n\n } else if s == \"Off\" {\n\n Ok(Coil::Off)\n", "file_path": "modbus_protocol/src/coils.rs", "rank": 21, "score": 30539.71467364912 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"modbus_protocol\"] = [{\"text\":\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/std/error/trait.Error.html\\\" title=\\\"trait std::error::Error\\\">Error</a> for <a class=\\\"enum\\\" href=\\\"modbus_protocol/exception_code/enum.Error.html\\\" title=\\\"enum modbus_protocol::exception_code::Error\\\">Error</a>\",\"synthetic\":false,\"types\":[\"modbus_protocol::exception_code::Error\"]}];\n", "file_path": "docs/implementors/std/error/trait.Error.js", "rank": 22, "score": 29446.018752229254 }, { "content": "initSidebarItems({\"enum\":[[\"Coil\",\"\"]]});", "file_path": "docs/modbus_protocol/coils/sidebar-items.js", "rank": 23, "score": 20805.815367238858 }, { "content": "use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};\n\nuse std::io::Cursor;\n\nuse modbus_protocol::exception_code::{Error, Reason, Result};\n\nconst MODBUS_PROTOCOL_TCP: u16 = 0x0000;\n\npub const MODBUS_HEADER_SIZE: usize = 7;\n\n#[derive(Debug, PartialEq)]\n\npub struct Header {\n\n pub tid: u16,\n\n pub pid: u16,\n\n pub len: u16,\n\n pub uid: u8,\n\n}\n\n\n\nimpl Header {\n\n pub fn new(tid: u16, uid: u8, len: u16) -> Header {\n\n Header {\n\n tid: tid,\n\n pid: MODBUS_PROTOCOL_TCP,\n\n len: len - MODBUS_HEADER_SIZE as u16,\n\n uid: uid,\n", "file_path": "modbus_server/src/mbap.rs", "rank": 24, "score": 13.892117262307387 }, { "content": "use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};\n\nuse std::borrow::BorrowMut;\n\nuse std::io::{self, Cursor, Read, Write};\n\nuse std::net::{Shutdown, TcpStream, ToSocketAddrs};\n\nuse std::time::Duration;\n\nuse modbus_protocol::exception_code::{Error, ExceptionCode, Reason, Result};\n\nuse modbus_protocol::coils::Coil;\n\nuse modbus_protocol::requests::Requests;\n\nuse modbus_protocol::utils;\n\nuse num_traits::FromPrimitive;\n\n\n\nconst MODBUS_PROTOCOL_TCP: u16 = 0x0000;\n\nconst MODBUS_TCP_DEFAULT_PORT: u16 = 502;\n\nconst MODBUS_HEADER_SIZE: usize = 7;\n\nconst MODBUS_MAX_PACKET_SIZE: usize = 260;\n\n\n\n/// ModbusConfig structure for more control over the tcp socket settings\n\n#[derive(Clone, Copy)]\n\npub struct ModbusConfig {\n\n /// The TCP port to use for communication (Default: `502`)\n", "file_path": "modbus_client/src/tcp.rs", "rank": 25, "score": 12.727768205250085 }, { "content": " Err(e) => Err(e),\n\n }\n\n }\n\n\n\n /// Set the unit identifier.\n\n pub fn _set_uid(&mut self, uid: u8) {\n\n self.uid = uid;\n\n }\n\n // Create a new transaction Id, incrementing the previous one.\n\n // The Id is wrapping around if the Id reaches `u16::MAX`.\n\n fn new_tid(&mut self) -> u16 {\n\n self.tid = self.tid.wrapping_add(1);\n\n self.tid\n\n }\n\n\n\n fn read(self: &mut Self, fun: &Function) -> Result<Vec<u8>> {\n\n let packed_size = |v: u16| v / 8 + if v % 8 > 0 { 1 } else { 0 };\n\n let (addr, count, expected_bytes) = match *fun {\n\n Function::ReadCoils(a, c) | Function::ReadDiscreteInputs(a, c) => {\n\n (a, c, packed_size(c) as usize)\n", "file_path": "modbus_client/src/tcp.rs", "rank": 26, "score": 12.343368157269373 }, { "content": " Err(e) => Err(Error::Io(e)),\n\n }\n\n }\n\n Err(e) => Err(Error::Io(e)),\n\n }\n\n }\n\n\n\n pub fn _close(self: &mut Self) -> Result<()> {\n\n self.stream.shutdown(Shutdown::Both).map_err(Error::Io)\n\n }\n\n}\n\n\n\nimpl Requests for Transport {\n\n /// Read `count` bits starting at address `addr`.\n\n fn read_coils(self: &mut Self, addr: u16, count: u16) -> Result<Vec<Coil>> {\n\n let bytes = self.read(&Function::ReadCoils(addr, count))?;\n\n Ok(utils::unpack_bits(&bytes, count))\n\n }\n\n\n\n /// Read `count` input bits starting at address `addr`.\n", "file_path": "modbus_client/src/tcp.rs", "rank": 28, "score": 11.436195417249795 }, { "content": "use num_derive::FromPrimitive;\n\nuse super::coils::Coil;\n\n\n\npub type Address = u16;\n\npub type Quantity = u16;\n\npub type Value = u16;\n\npub type Values= Vec<u16> ;\n\npub type Coils= Vec<Coil> ;\n\n\n\n#[derive(FromPrimitive)]\n\npub enum FunctionCode{\n\n ReadCoils = 0x01,\n\n ReadDiscreteInputs = 0x02,\n\n ReadHoldingRegisters = 0x03,\n\n ReadInputRegisters = 0x04,\n\n WriteSingleCoil = 0x05,\n\n WriteSingleRegister = 0x06,\n\n WriteMultipleCoils = 0x0f,\n\n WriteMultipleRegisters = 0x10,\n\n}\n", "file_path": "modbus_protocol/src/function_code.rs", "rank": 30, "score": 10.723038651460342 }, { "content": " }\n\n Function::ReadHoldingRegisters(a, c) | Function::ReadInputRegisters(a, c) => {\n\n (a, c, 2 * c as usize)\n\n }\n\n _ => return Err(Error::InvalidFunction),\n\n };\n\n\n\n if count < 1 {\n\n return Err(Error::InvalidData(Reason::RecvBufferEmpty));\n\n }\n\n\n\n if count as usize > MODBUS_MAX_PACKET_SIZE {\n\n return Err(Error::InvalidData(Reason::UnexpectedReplySize));\n\n }\n\n\n\n let header = Header::new(self, MODBUS_HEADER_SIZE as u16 + 6u16);\n\n let mut buff = header.pack()?;\n\n buff.write_u8(fun.code())?;\n\n buff.write_u16::<BigEndian>(addr)?;\n\n buff.write_u16::<BigEndian>(count)?;\n", "file_path": "modbus_client/src/tcp.rs", "rank": 31, "score": 10.691693685036784 }, { "content": " buff.write_u8(function_code).unwrap();\n\n },\n\n Err(e) => {\n\n log::info!(\"something wrong {}\", e);\n\n handle_status_error(function_code, e, &mut buff);\n\n }\n\n }\n\n },\n\n Some(FunctionCode::WriteMultipleCoils) => {\n\n let addr= pdu_data.read_u16::<BigEndian>().unwrap();\n\n let count = pdu_data.read_u16::<BigEndian>().unwrap();\n\n log::info!(\"request WriteMultipleCoils, addr: {}; count: {}\", addr, count);\n\n let mut values :Vec<Coil> = Vec::with_capacity(count as usize);\n\n for i in 0..count-1 {\n\n values[i as usize] = Coil::from_u16(pdu_data.read_u16::<BigEndian>().unwrap()).unwrap();\n\n }\n\n match status.write_multiple_coils(addr, &values[..]) {\n\n Ok(()) => {\n\n buff.write_u8(function_code).unwrap();\n\n },\n", "file_path": "modbus_server/src/tcp.rs", "rank": 33, "score": 10.611264590018438 }, { "content": "use num_traits::FromPrimitive;\n\nuse std::net::{Shutdown, TcpStream};\n\nuse std::net::{SocketAddr};\n\nuse std::io::{Cursor, Read, Write};\n\nuse std::sync::{Arc, Mutex};\n\nuse byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};\n\nuse std::borrow::BorrowMut;\n\n\n\nuse modbus_protocol::utils;\n\nuse modbus_protocol::coils::Coil;\n\nuse modbus_protocol::function_code::FunctionCode;\n\nuse modbus_protocol::requests::Requests;\n\nuse modbus_protocol::exception_code::{Error, ExceptionCode};\n\nuse super::server_status::StatusInfo;\n\nuse super::mbap::Header;\n\nuse super::mbap::MODBUS_HEADER_SIZE;\n\n\n\nconst MODBUS_MAX_PACKET_SIZE: usize = 260;\n\n\n", "file_path": "modbus_server/src/tcp.rs", "rank": 36, "score": 10.019102453895533 }, { "content": " }\n\n\n\n fn _get_data(reply: &[u8], expected_bytes: usize) -> Result<Vec<u8>> {\n\n if reply[8] as usize != expected_bytes\n\n || reply.len() != MODBUS_HEADER_SIZE + expected_bytes + 2\n\n {\n\n Err(Error::InvalidData(Reason::UnexpectedReplySize))\n\n } else {\n\n let mut d = Vec::new();\n\n d.extend_from_slice(&reply[MODBUS_HEADER_SIZE + 2..]);\n\n Ok(d)\n\n }\n\n }\n\n\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n", "file_path": "modbus_server/src/mbap.rs", "rank": 37, "score": 9.920700025957098 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl From<ExceptionCode> for Error {\n\n fn from(err: ExceptionCode) -> Error {\n\n Error::Exception(err)\n\n }\n\n}\n\n\n\nimpl From<io::Error> for Error {\n\n fn from(err: io::Error) -> Error {\n\n Error::Io(err)\n\n }\n\n}\n\n\n\n/// Result type used to nofify success or failure in communication\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n\n", "file_path": "modbus_protocol/src/exception_code.rs", "rank": 38, "score": 9.675227135976975 }, { "content": " }\n\n}\n\n\n\nimpl ModbusConfig {\n\n pub fn set_port(self: &mut Self, port: u16) {\n\n self.tcp_port = port;\n\n }\n\n pub fn set_uid(self: &mut Self, uid: u8) {\n\n self.modbus_uid = uid;\n\n }\n\n}\n\n\n\n\n\n#[derive(Debug, PartialEq)]\n", "file_path": "modbus_client/src/tcp.rs", "rank": 40, "score": 9.24877478623141 }, { "content": " Io(io::Error),\n\n InvalidResponse,\n\n InvalidData(Reason),\n\n InvalidFunction,\n\n ParseCoilError,\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n use Error::*;\n\n\n\n match *self {\n\n Exception(ref code) => write!(f, \"modbus exception: {:?}\", code),\n\n Io(ref err) => write!(f, \"I/O error: {}\", err),\n\n InvalidResponse => write!(f, \"invalid response\"),\n\n InvalidData(ref reason) => write!(f, \"invalid data: {:?}\", reason),\n\n InvalidFunction => write!(f, \"invalid modbus function\"),\n\n ParseCoilError => write!(f, \"parse coil could not be parsed\"),\n\n }\n\n }\n", "file_path": "modbus_protocol/src/exception_code.rs", "rank": 42, "score": 9.075414427712007 }, { "content": " Ok(d)\n\n }\n\n }\n\n\n\n fn write_single(self: &mut Self, fun: &Function) -> Result<()> {\n\n let (addr, value) = match *fun {\n\n Function::WriteSingleCoil(a, v) | Function::WriteSingleRegister(a, v) => (a, v),\n\n _ => return Err(Error::InvalidFunction),\n\n };\n\n\n\n let mut buff = vec![0; MODBUS_HEADER_SIZE]; // Header gets filled in later\n\n buff.write_u8(fun.code())?;\n\n buff.write_u16::<BigEndian>(addr)?;\n\n buff.write_u16::<BigEndian>(value)?;\n\n self.write(&mut buff)\n\n }\n\n\n\n fn write_multiple(self: &mut Self, fun: &Function) -> Result<()> {\n\n let (addr, quantity, values) = match *fun {\n\n Function::WriteMultipleCoils(a, q, v) | Function::WriteMultipleRegisters(a, q, v) => {\n", "file_path": "modbus_client/src/tcp.rs", "rank": 44, "score": 8.663704748802992 }, { "content": "}\n\n\n\nimpl std::error::Error for Error {\n\n fn description(&self) -> &str {\n\n use Error::*;\n\n\n\n match *self {\n\n Exception(_) => \"modbus exception\",\n\n Io(_) => \"I/O error\",\n\n InvalidResponse => \"invalid response\",\n\n InvalidData(_) => \"invalid data\",\n\n InvalidFunction => \"invalid modbus function\",\n\n ParseCoilError => \"parse coil could not be parsed\",\n\n }\n\n }\n\n\n\n fn cause(&self) -> Option<&dyn std::error::Error> {\n\n match *self {\n\n Error::Io(ref err) => Some(err),\n\n _ => None,\n", "file_path": "modbus_protocol/src/exception_code.rs", "rank": 45, "score": 8.626184476696361 }, { "content": " Err(e) => {\n\n log::info!(\"something wrong {}\", e);\n\n handle_status_error(function_code, e, &mut buff);\n\n }\n\n }\n\n },\n\n Some(FunctionCode::WriteMultipleRegisters) => {\n\n let addr= pdu_data.read_u16::<BigEndian>().unwrap();\n\n let count = pdu_data.read_u16::<BigEndian>().unwrap();\n\n log::info!(\"request WriteMultipleRegisters, addr: {}; count: {}\", addr, count);\n\n let mut values :Vec<u16> = Vec::with_capacity(count as usize);\n\n for i in 0..count-1 {\n\n values[i as usize] = pdu_data.read_u16::<BigEndian>().unwrap();\n\n }\n\n match status.write_multiple_registers(addr, &values[..]) {\n\n Ok(()) => {\n\n buff.write_u8(function_code).unwrap();\n\n },\n\n Err(e) => {\n\n log::info!(\"something wrong {}\", e);\n", "file_path": "modbus_server/src/tcp.rs", "rank": 47, "score": 8.319600961520734 }, { "content": " ReadHoldingRegisters(u16, u16),\n\n ReadInputRegisters(u16, u16),\n\n WriteSingleCoil(u16, u16),\n\n WriteSingleRegister(u16, u16),\n\n WriteMultipleCoils(u16, u16, &'a [u8]),\n\n WriteMultipleRegisters(u16, u16, &'a [u8]),\n\n}\n\n\n\nimpl<'a> Function<'a> {\n\n pub fn code(&self) -> u8 {\n\n match *self {\n\n Function::ReadCoils(_, _) => 0x01,\n\n Function::ReadDiscreteInputs(_, _) => 0x02,\n\n Function::ReadHoldingRegisters(_, _) => 0x03,\n\n Function::ReadInputRegisters(_, _) => 0x04,\n\n Function::WriteSingleCoil(_, _) => 0x05,\n\n Function::WriteSingleRegister(_, _) => 0x06,\n\n Function::WriteMultipleCoils(_, _, _) => 0x0f,\n\n Function::WriteMultipleRegisters(_, _, _) => 0x10,\n\n }\n", "file_path": "modbus_client/src/tcp.rs", "rank": 48, "score": 8.177936676955662 }, { "content": " (a, q, v)\n\n }\n\n _ => return Err(Error::InvalidFunction),\n\n };\n\n\n\n let mut buff = vec![0; MODBUS_HEADER_SIZE]; // Header gets filled in later\n\n buff.write_u8(fun.code())?;\n\n buff.write_u16::<BigEndian>(addr)?;\n\n buff.write_u16::<BigEndian>(quantity)?;\n\n buff.write_u8(values.len() as u8)?;\n\n for v in values {\n\n buff.write_u8(*v)?;\n\n }\n\n self.write(&mut buff)\n\n }\n\n\n\n fn write(self: &mut Self, buff: &mut [u8]) -> Result<()> {\n\n if buff.is_empty() {\n\n return Err(Error::InvalidData(Reason::SendBufferEmpty));\n\n }\n", "file_path": "modbus_client/src/tcp.rs", "rank": 50, "score": 7.934060926204323 }, { "content": "extern crate byteorder;\n\n\n\nuse byteorder::{BigEndian, ReadBytesExt};\n\nuse std::io::Cursor;\n\nuse super::coils::Coil;\n\nuse super::exception_code::{Error, Reason, Result};\n\n\n", "file_path": "modbus_protocol/src/utils.rs", "rank": 51, "score": 7.809734044376703 }, { "content": "mod tcp;\n\nconst MODBUS_TCP_DEFAULT_PORT: u16 = 502;\n\n\n\n/// Config structure for more control over the tcp socket settings\n\n#[derive(Clone, Copy)]\n\npub struct ModbusConfig {\n\n /// The TCP port to use for communication (Default: `502`)\n\n pub tcp_port: u16,\n\n /// Connection timeout for TCP socket (Default: `OS Default`)\n\n pub tcp_connect_timeout: Option<Duration>,\n\n /// Timeout when reading from the TCP socket (Default: `infinite`)\n\n pub tcp_read_timeout: Option<Duration>,\n\n /// Timeout when writing to the TCP socket (Default: `infinite`)\n\n pub tcp_write_timeout: Option<Duration>,\n\n /// The modbus Unit Identifier used in the modbus layer (Default: `1`)\n\n pub modbus_uid: u8,\n\n}\n\n\n\nimpl Default for ModbusConfig {\n\n fn default() -> ModbusConfig {\n", "file_path": "modbus_server/src/main.rs", "rank": 52, "score": 7.620244425804724 }, { "content": " } else {\n\n log::info!(\"invalid response code\");\n\n log::info!(\" expected: {}, result: {}\", req[7], resp[7]);\n\n Err(Error::InvalidResponse)\n\n }\n\n }\n\n\n\n fn get_reply_data(reply: &[u8], expected_bytes: usize) -> Result<Vec<u8>> {\n\n if reply[8] as usize != expected_bytes\n\n || reply.len() != MODBUS_HEADER_SIZE + expected_bytes + 2\n\n {\n\n log::info!(\"Unexpected reply size\");\n\n log::info!(\" length field, expected: {}, result: {}\",\n\n expected_bytes, reply[8]);\n\n log::info!(\" length expected: {}, result: {}\",\n\n MODBUS_HEADER_SIZE + expected_bytes + 2, reply.len());\n\n Err(Error::InvalidData(Reason::UnexpectedReplySize))\n\n } else {\n\n let mut d = Vec::new();\n\n d.extend_from_slice(&reply[MODBUS_HEADER_SIZE + 2..]);\n", "file_path": "modbus_client/src/tcp.rs", "rank": 53, "score": 7.582006598978881 }, { "content": " }\n\n\n\n /// Write a single 16bit register to address `addr`.\n\n fn write_single_register(self: &mut Self, addr: u16, value: u16) -> Result<()> {\n\n self.write_single(&Function::WriteSingleRegister(addr, value))\n\n }\n\n\n\n /// Write a multiple coils (bits) starting at address `addr`.\n\n fn write_multiple_coils(self: &mut Self, addr: u16, values: &[Coil]) -> Result<()> {\n\n let bytes = utils::pack_bits(values);\n\n self.write_multiple(&Function::WriteMultipleCoils(\n\n addr,\n\n values.len() as u16,\n\n &bytes,\n\n ))\n\n }\n\n\n\n /// Write a multiple 16bit registers starting at address `addr`.\n\n fn write_multiple_registers(self: &mut Self, addr: u16, values: &[u16]) -> Result<()> {\n\n let bytes = utils::unpack_bytes(values);\n", "file_path": "modbus_client/src/tcp.rs", "rank": 54, "score": 7.119124920539209 }, { "content": " fn read_discrete_inputs(self: &mut Self, addr: u16, count: u16) -> Result<Vec<Coil>> {\n\n let bytes = self.read(&Function::ReadDiscreteInputs(addr, count))?;\n\n Ok(utils::unpack_bits(&bytes, count))\n\n }\n\n\n\n /// Read `count` 16bit registers starting at address `addr`.\n\n fn read_holding_registers(self: &mut Self, addr: u16, count: u16) -> Result<Vec<u16>> {\n\n let bytes = self.read(&Function::ReadHoldingRegisters(addr, count))?;\n\n utils::pack_bytes(&bytes[..])\n\n }\n\n\n\n /// Read `count` 16bit input registers starting at address `addr`.\n\n fn read_input_registers(self: &mut Self, addr: u16, count: u16) -> Result<Vec<u16>> {\n\n let bytes = self.read(&Function::ReadInputRegisters(addr, count))?;\n\n utils::pack_bytes(&bytes[..])\n\n }\n\n\n\n /// Write a single coil (bit) to address `addr`.\n\n fn write_single_coil(self: &mut Self, addr: u16, value: Coil) -> Result<()> {\n\n self.write_single(&Function::WriteSingleCoil(addr, value.code()))\n", "file_path": "modbus_client/src/tcp.rs", "rank": 55, "score": 7.045302390219495 }, { "content": " buff.write_u16::<BigEndian>(self.pid)?;\n\n buff.write_u16::<BigEndian>(self.len)?;\n\n buff.write_u8(self.uid)?;\n\n Ok(buff)\n\n }\n\n\n\n fn unpack(buff: &[u8]) -> Result<Header> {\n\n let mut rdr = Cursor::new(buff);\n\n Ok(Header {\n\n tid: rdr.read_u16::<BigEndian>()?,\n\n pid: rdr.read_u16::<BigEndian>()?,\n\n len: rdr.read_u16::<BigEndian>()?,\n\n uid: rdr.read_u8()?,\n\n })\n\n }\n\n}\n\n\n\npub enum Function<'a> {\n\n ReadCoils(u16, u16),\n\n ReadDiscreteInputs(u16, u16),\n", "file_path": "modbus_client/src/tcp.rs", "rank": 56, "score": 6.996262913417439 }, { "content": "\n\n/// `InvalidData` reasons\n\n#[derive(Debug, PartialEq)]\n\npub enum Reason {\n\n UnexpectedReplySize,\n\n BytecountNotEven,\n\n SendBufferEmpty,\n\n RecvBufferEmpty,\n\n SendBufferTooBig,\n\n DecodingError,\n\n EncodingError,\n\n InvalidByteorder,\n\n InvalidRequestParameter,\n\n Custom(String),\n\n}\n\n\n\n/// Combination of Modbus, IO and data corruption errors\n\n#[derive(Debug)]\n\npub enum Error {\n\n Exception(ExceptionCode),\n", "file_path": "modbus_protocol/src/exception_code.rs", "rank": 57, "score": 6.80115766524432 }, { "content": " handle_status_error(function_code, e, &mut buff);\n\n }\n\n }\n\n },\n\n Some(FunctionCode::ReadDiscreteInputs) =>{\n\n let addr= pdu_data.read_u16::<BigEndian>().unwrap();\n\n let count = pdu_data.read_u16::<BigEndian>().unwrap();\n\n log::info!(\"request ReadDiscreteInputs, addr: {}; count: {}\", addr, count);\n\n match status.read_discrete_inputs(addr, count) {\n\n Ok(coils) => {\n\n buff.write_u8(function_code).unwrap();\n\n let bits = utils::pack_bits(&coils);\n\n buff.write_u8(bits.len() as u8).unwrap();\n\n for v in bits {\n\n buff.write_u8(v).unwrap();\n\n }\n\n },\n\n Err(e) => {\n\n log::info!(\"something wrong {}\", e);\n\n handle_status_error(function_code, e, &mut buff);\n", "file_path": "modbus_server/src/tcp.rs", "rank": 58, "score": 6.7530932686157215 }, { "content": "\n\n if buff.len() > MODBUS_MAX_PACKET_SIZE {\n\n return Err(Error::InvalidData(Reason::SendBufferTooBig));\n\n }\n\n\n\n let header = Header::new(self, buff.len() as u16 + 1u16);\n\n let head_buff = header.pack()?;\n\n {\n\n let mut start = Cursor::new(buff.borrow_mut());\n\n start.write_all(&head_buff)?;\n\n }\n\n match self.stream.write_all(buff) {\n\n Ok(_s) => {\n\n let reply = &mut [0; 12];\n\n match self.stream.read(reply) {\n\n Ok(_s) => {\n\n let resp_hd = Header::unpack(reply)?;\n\n Transport::validate_response_header(&header, &resp_hd)?;\n\n Transport::validate_response_code(buff, reply)\n\n }\n", "file_path": "modbus_client/src/tcp.rs", "rank": 59, "score": 6.732658574079838 }, { "content": " }\n\n }\n\n\n\n pub fn pack(&self) -> Result<Vec<u8>> {\n\n let mut buff = vec![];\n\n buff.write_u16::<BigEndian>(self.tid)?;\n\n buff.write_u16::<BigEndian>(self.pid)?;\n\n buff.write_u16::<BigEndian>(self.len)?;\n\n buff.write_u8(self.uid)?;\n\n Ok(buff)\n\n }\n\n\n\n pub fn unpack(buff: &[u8]) -> Result<Header> {\n\n let mut rdr = Cursor::new(buff);\n\n Ok(Header {\n\n tid: rdr.read_u16::<BigEndian>()?,\n\n pid: rdr.read_u16::<BigEndian>()?,\n\n len: rdr.read_u16::<BigEndian>()?,\n\n uid: rdr.read_u8()?,\n\n })\n", "file_path": "modbus_server/src/mbap.rs", "rank": 60, "score": 6.68289026333804 }, { "content": " Some(FunctionCode::WriteSingleCoil) => {\n\n let addr= pdu_data.read_u16::<BigEndian>().unwrap();\n\n log::info!(\"request WriteSingleCoil, addr: {}\", addr);\n\n let value = pdu_data.read_u16::<BigEndian>().unwrap();\n\n match status.write_single_coil(addr, Coil::from_u16(value).unwrap()) {\n\n Ok(()) => {\n\n buff.write_u8(function_code).unwrap();\n\n },\n\n Err(e) => {\n\n log::info!(\"something wrong {}\", e);\n\n handle_status_error(function_code, e, &mut buff);\n\n }\n\n }\n\n },\n\n Some(FunctionCode::WriteSingleRegister) => {\n\n let addr= pdu_data.read_u16::<BigEndian>().unwrap();\n\n log::info!(\"request WriteSingleRegisters, addr: {}\", addr);\n\n let value = pdu_data.read_u16::<BigEndian>().unwrap();\n\n match status.write_single_register(addr, value) {\n\n Ok(()) => {\n", "file_path": "modbus_server/src/tcp.rs", "rank": 61, "score": 6.57400188007348 }, { "content": "extern crate num_derive;\n\nextern crate num_traits;\n\nextern crate clap;\n\nextern crate modbus_protocol;\n\nextern crate log;\n\nextern crate log4rs;\n\n\n\nuse clap::App;\n\nuse clap::crate_version;\n\nuse modbus_protocol::coils::Coil;\n\nuse modbus_protocol::requests::Requests;\n\nuse modbus_protocol::exception_code::{Error};\n\nuse modbus_protocol::function_code::ModbusFunction;\n\nmod tcp;\n\nuse tcp::{ModbusConfig, Transport};\n\n\n\n\n", "file_path": "modbus_client/src/main.rs", "rank": 62, "score": 6.555885138686149 }, { "content": " // ReadExceptionStatus = 0x07,\n\n // ReportSlaveId = 0x11,\n\n // MaskWriteRegister = 0x16,\n\n // WriteAndReadRegisters = 0x17\n\n }\n\n}\n\n\n\n\n\n/// Context object which holds state for all modbus operations.\n\npub struct Transport {\n\n tid: u16,\n\n uid: u8,\n\n stream: TcpStream,\n\n}\n\n\n\nimpl Transport {\n\n /// Create a new context object and connect it to `addr` on port `port`\n\n pub fn new_with_cfg(addr: &str, cfg: ModbusConfig) -> io::Result<Transport> {\n\n let stream = match cfg.tcp_connect_timeout {\n\n Some(timeout) => {\n", "file_path": "modbus_client/src/tcp.rs", "rank": 63, "score": 6.534214283416943 }, { "content": " pub tcp_port: u16,\n\n /// Connection timeout for TCP socket (Default: `OS Default`)\n\n pub tcp_connect_timeout: Option<Duration>,\n\n /// Timeout when reading from the TCP socket (Default: `infinite`)\n\n pub tcp_read_timeout: Option<Duration>,\n\n /// Timeout when writing to the TCP socket (Default: `infinite`)\n\n pub tcp_write_timeout: Option<Duration>,\n\n /// The modbus Unit Identifier used in the modbus layer (Default: `1`)\n\n pub modbus_uid: u8,\n\n}\n\n\n\nimpl Default for ModbusConfig {\n\n fn default() -> ModbusConfig {\n\n ModbusConfig {\n\n tcp_port: MODBUS_TCP_DEFAULT_PORT,\n\n tcp_connect_timeout: None,\n\n tcp_read_timeout: None,\n\n tcp_write_timeout: None,\n\n modbus_uid: 1,\n\n }\n", "file_path": "modbus_client/src/tcp.rs", "rank": 65, "score": 6.286303378482842 }, { "content": " }\n\n }\n\n },\n\n Some(FunctionCode::ReadHoldingRegisters) =>{\n\n let addr= pdu_data.read_u16::<BigEndian>().unwrap();\n\n let count = pdu_data.read_u16::<BigEndian>().unwrap();\n\n log::info!(\"request ReadHoldingRegisters, addr: {}; count: {}\", addr, count);\n\n let mut buff = vec![0; MODBUS_HEADER_SIZE];\n\n match status.read_holding_registers(addr, count) {\n\n Ok(registers) => {\n\n buff.write_u8(function_code).unwrap();\n\n buff.write_u8(registers.len() as u8 * 2).unwrap();\n\n for v in registers {\n\n buff.write_u16::<BigEndian>(v).unwrap();\n\n }\n\n },\n\n Err(e) => {\n\n log::info!(\"something wrong {}\", e);\n\n handle_status_error(function_code, e, &mut buff);\n\n }\n", "file_path": "modbus_server/src/tcp.rs", "rank": 66, "score": 6.202942221721403 }, { "content": "use super::coils::Coil;\n\nuse super::exception_code::Result;\n\n\n", "file_path": "modbus_protocol/src/requests.rs", "rank": 67, "score": 6.193335345031878 }, { "content": " let addr: u16 = args[0].parse().expect(matches.usage());\n\n let value: Coil = args[1].parse().expect(matches.usage());\n\n handle_request(&mut client, &ModbusFunction::WriteSingleCoil(addr, value));\n\n } else if let Some(args) = matches.values_of(\"write-multiple-coils\") {\n\n let args: Vec<&str> = args.collect();\n\n let addr: u16 = args[0].parse().expect(matches.usage());\n\n let values: Vec<Coil> = args[1]\n\n .split(',')\n\n .map(|s| s.trim().parse().expect(matches.usage()))\n\n .collect();\n\n handle_request(&mut client, &ModbusFunction::WriteMultipleCoils(addr, &values));\n\n } else if let Some(args) = matches.values_of(\"read-holding-registers\") {\n\n let args: Vec<&str> = args.collect();\n\n let addr: u16 = args[0].parse().expect(matches.usage());\n\n let qtty: u16 = args[1].parse().expect(matches.usage());\n\n handle_request(&mut client, &ModbusFunction::ReadHoldingRegisters(addr, qtty));\n\n } else if let Some(args) = matches.values_of(\"write-single-register\") {\n\n let args: Vec<&str> = args.collect();\n\n let addr: u16 = args[0].parse().expect(matches.usage());\n\n let value: u16 = args[1].parse().expect(matches.usage());\n", "file_path": "modbus_client/src/main.rs", "rank": 69, "score": 6.174087003688114 }, { "content": "\n\n if let Some(args) = matches.values_of(\"unit_id\") {\n\n let args: Vec<&str> = args.collect();\n\n let uid = args[0].parse().expect(matches.usage());\n\n modbus_config.set_uid(uid);\n\n }\n\n let mut client = tcp::Transport::new_with_cfg(addr, modbus_config).unwrap();\n\n\n\n if let Some(args) = matches.values_of(\"read-coils\") {\n\n let args: Vec<&str> = args.collect();\n\n let addr: u16 = args[0].parse().expect(matches.usage());\n\n let qtty: u16 = args[1].parse().expect(matches.usage());\n\n handle_request(&mut client, &ModbusFunction::ReadCoils(addr, qtty));\n\n } else if let Some(args) = matches.values_of(\"read-discrete-inputs\") {\n\n let args: Vec<&str> = args.collect();\n\n let addr: u16 = args[0].parse().expect(matches.usage());\n\n let qtty: u16 = args[1].parse().expect(matches.usage());\n\n handle_request(&mut client, &ModbusFunction::ReadDiscreteInputs(addr, qtty));\n\n } else if let Some(args) = matches.values_of(\"write-single-coil\") {\n\n let args: Vec<&str> = args.collect();\n", "file_path": "modbus_client/src/main.rs", "rank": 71, "score": 5.649547127279337 }, { "content": "\n\n match self.stream.write_all(&buff) {\n\n Ok(_s) => {\n\n let mut reply = vec![0; MODBUS_HEADER_SIZE + expected_bytes + 2];\n\n match self.stream.read(&mut reply) {\n\n Ok(_s) => {\n\n let resp_hd = Header::unpack(&reply[..MODBUS_HEADER_SIZE])?;\n\n Transport::validate_response_header(&header, &resp_hd)?;\n\n Transport::validate_response_code(&buff, &reply)?;\n\n Transport::get_reply_data(&reply, expected_bytes)\n\n }\n\n Err(e) => Err(Error::Io(e)),\n\n }\n\n }\n\n Err(e) => Err(Error::Io(e)),\n\n }\n\n }\n\n\n\n fn validate_response_header(req: &Header, resp: &Header) -> Result<()> {\n\n if req.tid != resp.tid || resp.pid != MODBUS_PROTOCOL_TCP {\n", "file_path": "modbus_client/src/tcp.rs", "rank": 72, "score": 5.6345652954786996 }, { "content": " ModbusConfig {\n\n tcp_port: MODBUS_TCP_DEFAULT_PORT,\n\n tcp_connect_timeout: None,\n\n tcp_read_timeout: None,\n\n tcp_write_timeout: None,\n\n modbus_uid: 1,\n\n }\n\n }\n\n}\n\n\n\nimpl ModbusConfig {\n\n fn set_port(self: &mut Self, port: u16) {\n\n self.tcp_port = port;\n\n }\n\n fn set_uid(self: &mut Self, uid: u8) {\n\n self.modbus_uid = uid;\n\n }\n\n}\n\n\n", "file_path": "modbus_server/src/main.rs", "rank": 73, "score": 5.464479243171651 }, { "content": "extern crate num_derive;\n\nextern crate num_traits;\n\n\n\npub mod coils;\n\npub mod requests;\n\npub mod exception_code;\n\npub mod function_code;\n\npub mod utils;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n assert_eq!(2 + 2, 4);\n\n }\n\n}\n\n\n", "file_path": "modbus_protocol/src/lib.rs", "rank": 74, "score": 5.116304655486729 }, { "content": " let mut status = shared_status.lock().unwrap();\n\n let mbap_header = Header::unpack(data).unwrap();\n\n let pdu_data = &mut data[MODBUS_HEADER_SIZE..];\n\n handle_pdu_data(&mut stream, &mut status, mbap_header, pdu_data);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "modbus_server/src/tcp.rs", "rank": 75, "score": 5.000704614062446 }, { "content": "\n\npub enum ModbusFunction<'a> {\n\n ReadCoils(Address, Quantity),\n\n ReadDiscreteInputs(Address, Quantity),\n\n ReadHoldingRegisters(Address, Quantity),\n\n ReadInputRegisters(Address, Quantity),\n\n WriteSingleCoil(Address, Coil),\n\n WriteSingleRegister(Address, Value),\n\n WriteMultipleCoils(Address, &'a Coils),\n\n WriteMultipleRegisters(Address, &'a Values),\n\n}\n\n\n", "file_path": "modbus_protocol/src/function_code.rs", "rank": 76, "score": 4.654535303590128 }, { "content": " }\n\n },\n\n Some(FunctionCode::ReadInputRegisters) =>{\n\n let addr= pdu_data.read_u16::<BigEndian>().unwrap();\n\n let count = pdu_data.read_u16::<BigEndian>().unwrap();\n\n log::info!(\"request ReadInputRegisters, addr: {}; count: {}\", addr, count);\n\n match status.read_input_registers(addr, count) {\n\n Ok(registers) => {\n\n buff.write_u8(function_code).unwrap();\n\n buff.write_u8(registers.len() as u8 * 2).unwrap();\n\n for v in registers {\n\n buff.write_u16::<BigEndian>(v).unwrap();\n\n }\n\n },\n\n Err(e) => {\n\n log::info!(\"something wrong {}\", e);\n\n handle_status_error(function_code, e, &mut buff);\n\n }\n\n }\n\n },\n", "file_path": "modbus_server/src/tcp.rs", "rank": 77, "score": 4.5248341693437935 }, { "content": " };\n\n }\n\n ModbusFunction::WriteMultipleCoils(addr, values) => {\n\n match client.write_multiple_coils(addr, &values) {\n\n Err(e) =>{\n\n handle_error(e);\n\n },\n\n Ok(_) => {\n\n log::info!(\"Succeeded\");\n\n }\n\n };\n\n }\n\n ModbusFunction::WriteMultipleRegisters(addr, values) => {\n\n match client.write_multiple_registers(addr, &values) {\n\n Err(e) =>{\n\n handle_error(e);\n\n },\n\n Ok(_) => {\n\n log::info!(\"Succeeded\");\n\n }\n\n };\n\n }\n\n }\n\n}\n\n\n", "file_path": "modbus_client/src/main.rs", "rank": 78, "score": 4.461724461302778 }, { "content": " };\n\n }\n\n ModbusFunction::WriteSingleCoil(addr, value) => {\n\n match client.write_single_coil(addr, value) {\n\n Err(e) =>{\n\n handle_error(e);\n\n },\n\n Ok(_) => {\n\n log::info!(\"Succeeded\");\n\n }\n\n };\n\n }\n\n ModbusFunction::WriteSingleRegister(addr, value) => {\n\n match client.write_single_register(addr, value){\n\n Err(e) =>{\n\n handle_error(e);\n\n },\n\n Ok(_) => {\n\n log::info!(\"Succeeded\");\n\n }\n", "file_path": "modbus_client/src/main.rs", "rank": 79, "score": 4.461724461302778 }, { "content": " log4rs::init_config(log4rs_config).unwrap();\n\n }\n\n\n\n let matches = App::new(\"Modbus Server\")\n\n .author(\"Zhaohui GUO <[email protected]>\")\n\n .version(&crate_version!()[..])\n\n .about(\"Modbus Tcp Server\")\n\n .args_from_usage(\n\n \"<SERVER> 'The IP address or hostname of the server'\n\n \\\n\n --port=[port] 'port number'\n\n \\\n\n --unit_id=[UID] 'unit identifier'\n\n \\\n\n --capacity=[size] 'register number'\",\n\n )\n\n .get_matches();\n\n\n\n let mut modbus_config = ModbusConfig::default();\n\n let addr = matches.value_of(\"SERVER\").unwrap();\n", "file_path": "modbus_server/src/main.rs", "rank": 80, "score": 4.376751622928528 }, { "content": " if let Some(args) = matches.values_of(\"port\") {\n\n let args: Vec<&str> = args.collect();\n\n let port = args[0].parse().expect(matches.usage());\n\n modbus_config.set_port(port);\n\n }\n\n\n\n if let Some(args) = matches.values_of(\"unit_id\") {\n\n let args: Vec<&str> = args.collect();\n\n uid = args[0].parse().expect(matches.usage());\n\n modbus_config.set_uid(uid);\n\n }\n\n if let Some(args) = matches.values_of(\"capacity\") {\n\n let args: Vec<&str> = args.collect();\n\n size = args[0].parse().expect(matches.usage());\n\n }\n\n\n\n let status_info = Arc::new(Mutex::new(StatusInfo::create(size)));\n\n let listener = TcpListener::bind((addr, modbus_config.tcp_port)).unwrap();\n\n for stream in listener.incoming() {\n\n match stream {\n", "file_path": "modbus_server/src/main.rs", "rank": 81, "score": 4.3760351917699225 }, { "content": " handle_status_error(function_code, e, &mut buff);\n\n }\n\n }\n\n },\n\n _ => {\n\n buff.write_u8(function_code + 0x80).unwrap();\n\n buff.write_u8(ExceptionCode::IllegalFunction as u8).unwrap();\n\n },\n\n }\n\n write_response(stream, mbap_header, &mut buff);\n\n}\n\n\n", "file_path": "modbus_server/src/tcp.rs", "rank": 82, "score": 4.310770911607604 }, { "content": " .version(&crate_version!()[..])\n\n .about(\"Modbus Tcp client\")\n\n .args_from_usage(\n\n \"<SERVER> 'The IP address or hostname of the server'\n\n \\\n\n --port=[port] 'port number'\n\n \\\n\n --unit_id=[UID] 'unit identifier'\n\n \\\n\n --read-coils=[ADDR] [QUANTITY] 'Read QUANTITY coils from ADDR'\n\n \\\n\n --read-discrete-inputs=[ADDR] [QUANTITY] 'Read QUANTITY inputs from \\\n\n ADDR'\n\n --write-single-coil=[ADDR] [On,Off] \\\n\n 'Write the coil value (On or Off) to ADDR'\n\n \\\n\n --write-multiple-coils=[ADDR] [On,Off..] 'Write multiple coil values \\\n\n (On or Off) to ADDR (use \\\"..\\\" without spaces to group them e.g. \\\n\n \\\"On, Off, On, Off\\\")'\n\n \\\n", "file_path": "modbus_client/src/main.rs", "rank": 83, "score": 4.2614398577092745 }, { "content": "[![Rust](https://github.com/guozhaohui/modbus_simulator/actions/workflows/rust.yml/badge.svg)](https://github.com/guozhaohui/modbus_simulator/actions/workflows/rust.yml)\n\n[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT)\n\n# modbus_simulator\n\nA modbus simulator written by rust.\n\n\n\n## DESCRIPTION\n\nThis project is derived from https://github.com/hirschenberger/modbus-rs, with the following modifications aimed to provide full-set function simulation of modbus protocol.\n\n\n\n* Retrieve common implementation of Server/Client side to form a new modbus_protocol crate.\n\n* Create modbus_client crate which link with modbus_protocol crate.\n\n* Add new modbus_server crate to provide server side implementation.\n\n\n\n## USAGE\n\n* launch modbus server\n\n\n\n > $ ./modbus_server 127.0.0.1 --port=1234 --unit_id=5 --capacity=64\n\n\n\n* launch modbus client\n\n\n\n > $ ./modbus_client 127.0.0.1 --port=1234 --read-coils 5 3\n\n \n\n \n\n You can use *--help* option for detailed usage and more other command options.\n\n \n\n## Explanation\n\n1) The default Modbus TCP port number is 502, but the TCP/IP port numbers below 1024 are special in that normal users are not allowed to run servers on them, so you maybe encounter a permission denied error when you use the default port. *--port* is provided to enable the modbus server to bind on other port for test without root privilege.\n\n\n\n2) Multiple clients can connect to the same server simultaneously. \n\n\n\n3) Crate _log4rs_ is used to provide logging function on both server side and client side. Two .yaml files in config_samples folder are example files for log4rs configuration, you can modify them to fulfill your needs.\n", "file_path": "README.md", "rank": 84, "score": 4.121542461300604 }, { "content": "\n\n #[test]\n\n fn serialize_header() {\n\n let header = Header {\n\n tid: 12816,\n\n pid: 3930,\n\n len: 99,\n\n uid: 68,\n\n };\n\n let serialized = header.pack().unwrap();\n\n let deserialized = Header::unpack(&vec![50, 16, 15, 90, 0, 99, 68]).unwrap();\n\n let re_deserialized = Header::unpack(&serialized).unwrap();\n\n assert_eq!(serialized, vec![50, 16, 15, 90, 0, 99, 68]);\n\n assert_eq!(deserialized, header);\n\n assert_eq!(re_deserialized, header);\n\n }\n\n\n\n #[test]\n\n fn create_header() {\n\n let header = Header::new(1u16, 10u8, 100u16);\n\n assert_eq!(header.pid, 0u16);\n\n assert_eq!(header.len, (100-7) as u16);\n\n }\n\n}\n\n\n", "file_path": "modbus_server/src/mbap.rs", "rank": 85, "score": 4.112439029479509 }, { "content": " handle_request(&mut client, &ModbusFunction::WriteSingleRegister(addr, value));\n\n } else if let Some(args) = matches.values_of(\"write-multiple-registers\") {\n\n let args: Vec<&str> = args.collect();\n\n let addr: u16 = args[0].parse().expect(matches.usage());\n\n let values: Vec<u16> = args[1]\n\n .split(',')\n\n .map(|s| s.trim().parse().expect(matches.usage()))\n\n .collect();\n\n handle_request(&mut client, &ModbusFunction::WriteMultipleRegisters(addr, &values));\n\n };\n\n}\n", "file_path": "modbus_client/src/main.rs", "rank": 86, "score": 4.090194481637781 }, { "content": "use num_derive::FromPrimitive;\n\n\n\nuse std::fmt;\n\nuse std::io;\n\n\n\n#[derive(Debug, PartialEq, FromPrimitive)]\n\n/// Modbus exception codes returned from the server.\n\npub enum ExceptionCode {\n\n IllegalFunction = 0x01,\n\n IllegalDataAddress = 0x02,\n\n IllegalDataValue = 0x03,\n\n SlaveOrServerFailure = 0x04,\n\n Acknowledge = 0x05,\n\n SlaveOrServerBusy = 0x06,\n\n NegativeAcknowledge = 0x07,\n\n MemoryParity = 0x08,\n\n NotDefined = 0x09,\n\n GatewayPath = 0x0a,\n\n GatewayTarget = 0x0b\n\n}\n", "file_path": "modbus_protocol/src/exception_code.rs", "rank": 87, "score": 4.006502617529534 }, { "content": " self.write_multiple(&Function::WriteMultipleRegisters(\n\n addr,\n\n values.len() as u16,\n\n &bytes,\n\n ))\n\n }\n\n\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn serialize_header() {\n\n let header = Header {\n\n tid: 12816,\n\n pid: 3930,\n\n len: 99,\n", "file_path": "modbus_client/src/tcp.rs", "rank": 88, "score": 3.978127527619658 }, { "content": "extern crate num_derive;\n\nextern crate num_traits;\n\nextern crate clap;\n\nextern crate modbus_protocol;\n\nextern crate log;\n\nextern crate log4rs;\n\n\n\nuse clap::App;\n\nuse clap::crate_version;\n\nuse std::net::{TcpListener};\n\nuse std::{\n\n thread,\n\n sync::{Arc, Mutex},\n\n};\n\nuse std::time::Duration;\n\n\n\nmod server_status;\n\nmod mbap;\n\nuse server_status::StatusInfo;\n\n\n", "file_path": "modbus_server/src/main.rs", "rank": 89, "score": 3.1923768315119987 }, { "content": "new environment.\n\n\n\n\"Author\" refers to any designer, engineer, programmer, technical\n\nwriter or other person who contributed to the Font Software.\n\n\n\nPERMISSION & CONDITIONS\n\nPermission is hereby granted, free of charge, to any person obtaining\n\na copy of the Font Software, to use, study, copy, merge, embed, modify,\n\nredistribute, and sell modified and unmodified copies of the Font\n\nSoftware, subject to the following conditions:\n\n\n\n1) Neither the Font Software nor any of its individual components,\n\nin Original or Modified Versions, may be sold by itself.\n\n\n\n2) Original or Modified Versions of the Font Software may be bundled,\n\nredistributed and/or sold with any software, provided that each copy\n\ncontains the above copyright notice and this license. These can be\n\nincluded either as stand-alone text files, human-readable headers or\n\nin the appropriate machine-readable metadata fields within text or\n\nbinary files as long as those fields can be easily viewed by the user.\n\n\n\n3) No Modified Version of the Font Software may use the Reserved Font\n\nName(s) unless explicit written permission is granted by the corresponding\n\nCopyright Holder. This restriction only applies to the primary font name as\n\npresented to the users.\n\n\n\n4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font\n\nSoftware shall not be used to promote, endorse or advertise any\n\nModified Version, except to acknowledge the contribution(s) of the\n\nCopyright Holder(s) and the Author(s) or with their explicit written\n\npermission.\n\n\n\n5) The Font Software, modified or unmodified, in part or in whole,\n\nmust be distributed entirely under this license, and must not be\n\ndistributed under any other license. The requirement for fonts to\n\nremain under this license does not apply to any document created\n\nusing the Font Software.\n\n\n\nTERMINATION\n\nThis license becomes null and void if any of the above conditions are\n\nnot met.\n\n\n\nDISCLAIMER\n\nTHE FONT SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\n\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF\n\nMERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT\n\nOF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE\n\nCOPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\n\nINCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL\n\nDAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n\nFROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM\n\nOTHER DEALINGS IN THE FONT SOFTWARE.\n", "file_path": "docs/SourceSerif4-LICENSE.md", "rank": 90, "score": 2.642898318458307 }, { "content": " --read-input-registers=[ADDR], [QUANTITY] 'Read QUANTITY input \\\n\n registersfrom ADDR'\n\n \\\n\n --read-holding-registers=[ADDR], [QUANTITY] 'Read QUANTITY holding \\\n\n registers from ADDR'\n\n \\\n\n --write-single-register=[ADDR] [VALUE] 'Write VALUE to register ADDR'\n\n \\\n\n --write-multiple-registers=[ADDR] [V1,V2...] 'Write multiple register \\\n\n values to ADDR (use \\\"..\\\" to group them e.g. \\\"23, 24, 25\\\")'\",\n\n )\n\n .get_matches();\n\n\n\n let mut modbus_config = ModbusConfig::default();\n\n let addr = matches.value_of(\"SERVER\").unwrap();\n\n if let Some(args) = matches.values_of(\"port\") {\n\n let args: Vec<&str> = args.collect();\n\n let port = args[0].parse().expect(matches.usage());\n\n modbus_config.set_port(port);\n\n }\n", "file_path": "modbus_client/src/main.rs", "rank": 92, "score": 2.3921581407895984 }, { "content": " log::info!(\"Invalid response header:\");\n\n log::info!(\" tid: expected: {}, result: {}\", req.tid, resp.tid);\n\n log::info!(\" pid: expected: {}, result: {}\", MODBUS_PROTOCOL_TCP, resp.pid);\n\n Err(Error::InvalidResponse)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n\n\n fn validate_response_code(req: &[u8], resp: &[u8]) -> Result<()> {\n\n if req[7] + 0x80 == resp[7] {\n\n match ExceptionCode::from_u8(resp[8]) {\n\n Some(code) => Err(Error::Exception(code)),\n\n None => {\n\n log::info!(\"Invalid Exception code: {}\", resp[8]);\n\n Err(Error::InvalidResponse)\n\n }\n\n }\n\n } else if req[7] == resp[7] {\n\n Ok(())\n", "file_path": "modbus_client/src/tcp.rs", "rank": 93, "score": 2.3245312878084325 }, { "content": "Copyright 2014-2021 Adobe (http://www.adobe.com/), with Reserved Font Name 'Source'. All Rights Reserved. Source is a trademark of Adobe in the United States and/or other countries.\n\n\n\nThis Font Software is licensed under the SIL Open Font License, Version 1.1.\n\n\n\nThis license is copied below, and is also available with a FAQ at: http://scripts.sil.org/OFL\n\n\n\n\n\n-----------------------------------------------------------\n\nSIL OPEN FONT LICENSE Version 1.1 - 26 February 2007\n\n-----------------------------------------------------------\n\n\n\nPREAMBLE\n\nThe goals of the Open Font License (OFL) are to stimulate worldwide\n\ndevelopment of collaborative font projects, to support the font creation\n\nefforts of academic and linguistic communities, and to provide a free and\n\nopen framework in which fonts may be shared and improved in partnership\n\nwith others.\n\n\n\nThe OFL allows the licensed fonts to be used, studied, modified and\n\nredistributed freely as long as they are not sold by themselves. The\n\nfonts, including any derivative works, can be bundled, embedded,\n\nredistributed and/or sold with any software provided that any reserved\n\nnames are not used by derivative works. The fonts and derivatives,\n\nhowever, cannot be released under any other type of license. The\n\nrequirement for fonts to remain under this license does not apply\n\nto any document created using the fonts or their derivatives.\n\n\n\nDEFINITIONS\n\n\"Font Software\" refers to the set of files released by the Copyright\n\nHolder(s) under this license and clearly marked as such. This may\n\ninclude source files, build scripts and documentation.\n\n\n\n\"Reserved Font Name\" refers to any names specified as such after the\n\ncopyright statement(s).\n\n\n\n\"Original Version\" refers to the collection of Font Software components as\n\ndistributed by the Copyright Holder(s).\n\n\n\n\"Modified Version\" refers to any derivative made by adding to, deleting,\n\nor substituting -- in part or in whole -- any of the components of the\n\nOriginal Version, by changing formats or by porting the Font Software to a\n", "file_path": "docs/SourceSerif4-LICENSE.md", "rank": 94, "score": 2.291867078419428 }, { "content": " };\n\n }\n\n ModbusFunction::ReadHoldingRegisters(addr, qtty) => {\n\n match client.read_holding_registers(addr, qtty) {\n\n Err(e) =>{\n\n handle_error(e);\n\n },\n\n Ok(_) => {\n\n log::info!(\"Succeeded\");\n\n }\n\n };\n\n }\n\n ModbusFunction::ReadInputRegisters(addr, qtty) => {\n\n match client.read_input_registers(addr, qtty) {\n\n Err(e) =>{\n\n handle_error(e);\n\n },\n\n Ok(_) => {\n\n log::info!(\"Succeeded\");\n\n }\n", "file_path": "modbus_client/src/main.rs", "rank": 95, "score": 2.2689371165006076 }, { "content": " // Call to connect_timeout needs to be done on a single address\n\n let mut socket_addrs = (addr, cfg.tcp_port).to_socket_addrs()?;\n\n TcpStream::connect_timeout(&socket_addrs.next().unwrap(), timeout)\n\n }\n\n None => TcpStream::connect((addr, cfg.tcp_port)),\n\n };\n\n\n\n match stream {\n\n Ok(socket) => {\n\n let peer_addr = socket.peer_addr().unwrap();\n\n log::info!(\"connected server: {:?}\", peer_addr);\n\n socket.set_read_timeout(cfg.tcp_read_timeout)?;\n\n socket.set_write_timeout(cfg.tcp_write_timeout)?;\n\n socket.set_nodelay(true)?;\n\n Ok(Transport {\n\n tid: 0,\n\n uid: cfg.modbus_uid,\n\n stream: socket,\n\n })\n\n }\n", "file_path": "modbus_client/src/tcp.rs", "rank": 96, "score": 1.1261519227942935 } ]
Rust
src/n64/cpu/cpu.rs
Protowalker/rustendo64
2e6405bfadd923a3ab6c5ace29d2bdf85b0ae3c4
use super::super::Interconnect; use super::opcode::Opcode::*; use super::opcode::RegImmOpcode::*; use super::opcode::SpecialOpcode::*; use super::{cp0, Instruction}; use std::fmt; const NUM_GPR: usize = 32; enum SignExtendResult { Yes, No, } enum WriteLink { Yes, No, } pub struct Cpu { reg_gpr: [u64; NUM_GPR], reg_fpr: [f64; NUM_GPR], reg_pc: u64, reg_hi: u64, reg_lo: u64, reg_llbit: bool, reg_fcr0: u32, reg_fcr31: u32, cp0: cp0::Cp0, delay_slot_pc: Option<u64>, } impl Cpu { pub fn new() -> Cpu { Cpu { reg_gpr: [0; NUM_GPR], reg_fpr: [0.0; NUM_GPR], reg_pc: 0xffff_ffff_bfc0_0000, reg_hi: 0, reg_lo: 0, reg_llbit: false, reg_fcr0: 0, reg_fcr31: 0, cp0: cp0::Cp0::default(), delay_slot_pc: None, } } pub fn current_pc_virt(&self) -> u64 { self.delay_slot_pc.unwrap_or(self.reg_pc) } pub fn current_pc_phys(&self) -> u64 { self.virt_addr_to_phys_addr(self.current_pc_virt()) } pub fn will_execute_from_delay_slot(&self) -> bool { self.delay_slot_pc.is_some() } pub fn step(&mut self, interconnect: &mut Interconnect) { if let Some(pc) = self.delay_slot_pc { let instr = self.read_instruction(interconnect, pc); self.delay_slot_pc = None; self.execute_instruction(interconnect, instr); } else { let instr = self.read_instruction(interconnect, self.reg_pc); self.reg_pc += 4; self.execute_instruction(interconnect, instr); } } fn read_instruction(&self, interconnect: &mut Interconnect, addr: u64) -> Instruction { Instruction(self.read_word(interconnect, addr)) } fn execute_instruction(&mut self, interconnect: &mut Interconnect, instr: Instruction) { match instr.opcode() { Special => { match instr.special_op() { Sll => self.reg_instr(instr, |_, rt, sa| rt << sa), Srl => self.reg_instr(instr, |_, rt, sa| { let rt = rt as u32; (rt >> sa) as u64 }), Sllv => self.reg_instr(instr, |rs, rt, _| { let shift = rs & 0b11111; rt << shift }), Srlv => self.reg_instr(instr, |rs, rt, _| { let rs = rs as u32; let rt = rt as u32; let shift = rs & 0b11111; (rt >> shift) as u64 }), Jr => { let delay_slot_pc = self.reg_pc; self.reg_pc = self.read_reg_gpr(instr.rs()); self.delay_slot_pc = Some(delay_slot_pc); } Multu => { let rs = self.read_reg_gpr(instr.rs()) as u32; let rt = self.read_reg_gpr(instr.rt()) as u32; let res = (rs as u64) * (rt as u64); self.reg_lo = (res as i32) as u64; self.reg_hi = ((res >> 32) as i32) as u64; } Mfhi => { let value = self.reg_hi; self.write_reg_gpr(instr.rd() as usize, value); } Mflo => { let value = self.reg_lo; self.write_reg_gpr(instr.rd() as usize, value); } Addu => self.reg_instr(instr, |rs, rt, _| rs.wrapping_add(rt)), Subu => self.reg_instr(instr, |rs, rt, _| rs.wrapping_sub(rt)), And => self.reg_instr(instr, |rs, rt, _| rs & rt), Or => self.reg_instr(instr, |rs, rt, _| rs | rt), Xor => self.reg_instr(instr, |rs, rt, _| rs ^ rt), Sltu => self.reg_instr(instr, |rs, rt, _| if rs < rt { 1 } else { 0 }), } } RegImm => match instr.reg_imm_op() { Bgezal => { self.branch(instr, WriteLink::Yes, |rs, _| (rs as i64) >= 0); } }, Addi => self.imm_instr(instr, SignExtendResult::Yes, |rs, _, imm_sign_extended| { rs + imm_sign_extended }), Addiu => self.imm_instr(instr, SignExtendResult::Yes, |rs, _, imm_sign_extended| { rs.wrapping_add(imm_sign_extended) }), Andi => self.imm_instr(instr, SignExtendResult::No, |rs, imm, _| rs & imm), Ori => self.imm_instr(instr, SignExtendResult::No, |rs, imm, _| rs | imm), Lui => self.imm_instr(instr, SignExtendResult::Yes, |_, imm, _| imm << 16), Mtc0 => { let data = self.read_reg_gpr(instr.rt()); self.cp0.write_reg(instr.rd(), data); } Beq => { self.branch(instr, WriteLink::No, |rs, rt| rs == rt); } Bne => { self.branch(instr, WriteLink::No, |rs, rt| rs != rt); } Beql => self.branch_likely(instr, |rs, rt| rs == rt), Bnel => self.branch_likely(instr, |rs, rt| rs != rt), Lw => { let base = instr.rs(); let sign_extended_offset = instr.offset_sign_extended(); let virt_addr = self.read_reg_gpr(base).wrapping_add(sign_extended_offset); let mem = (self.read_word(interconnect, virt_addr) as i32) as u64; self.write_reg_gpr(instr.rt(), mem); } Sw => { let base = instr.rs(); let sign_extended_offset = instr.offset_sign_extended(); let virt_addr = self.read_reg_gpr(base).wrapping_add(sign_extended_offset); let mem = self.read_reg_gpr(instr.rt()) as u32; self.write_word(interconnect, virt_addr, mem); } } } fn imm_instr<F>(&mut self, instr: Instruction, sign_extend_result: SignExtendResult, f: F) where F: FnOnce(u64, u64, u64) -> u64, { let rs = self.read_reg_gpr(instr.rs()); let imm = instr.imm() as u64; let imm_sign_extended = instr.imm_sign_extended(); let value = f(rs, imm, imm_sign_extended); let sign_extended_value = (value as i32) as u64; let value = match sign_extend_result { SignExtendResult::Yes => sign_extended_value, _ => value, }; self.write_reg_gpr(instr.rt(), value); } fn reg_instr<F>(&mut self, instr: Instruction, f: F) where F: FnOnce(u64, u64, u32) -> u64, { let rs = self.read_reg_gpr(instr.rs()); let rt = self.read_reg_gpr(instr.rt()); let sa = instr.sa(); let value = f(rs, rt, sa); let sign_extended_value = (value as i32) as u64; self.write_reg_gpr(instr.rd() as usize, sign_extended_value); } fn branch<F>(&mut self, instr: Instruction, write_link: WriteLink, f: F) -> bool where F: FnOnce(u64, u64) -> bool, { let rs = self.read_reg_gpr(instr.rs()); let rt = self.read_reg_gpr(instr.rt()); let is_taken = f(rs, rt); let delay_slot_pc = self.reg_pc; if let WriteLink::Yes = write_link { let link_address = delay_slot_pc + 4; self.write_reg_gpr(31, link_address); } if is_taken { let sign_extended_offset = instr.offset_sign_extended() << 2; self.reg_pc = self.reg_pc.wrapping_add(sign_extended_offset); self.delay_slot_pc = Some(delay_slot_pc); } is_taken } fn branch_likely<F>(&mut self, instr: Instruction, f: F) where F: FnOnce(u64, u64) -> bool, { if !self.branch(instr, WriteLink::No, f) { self.reg_pc = self.reg_pc.wrapping_add(4); } } fn read_word(&self, interconnect: &mut Interconnect, virt_addr: u64) -> u32 { let phys_addr = self.virt_addr_to_phys_addr(virt_addr); interconnect.read_word(phys_addr as u32) } fn write_word(&mut self, interconnect: &mut Interconnect, virt_addr: u64, value: u32) { let phys_addr = self.virt_addr_to_phys_addr(virt_addr); interconnect.write_word(phys_addr as u32, value); } fn virt_addr_to_phys_addr(&self, virt_addr: u64) -> u64 { let addr_bit_values = (virt_addr >> 29) & 0b111; if addr_bit_values == 0b101 { virt_addr - 0xffff_ffff_a000_0000 } else { panic!("Unrecognized virtual address: {:#x}", virt_addr); } } fn write_reg_gpr(&mut self, index: usize, value: u64) { if index != 0 { self.reg_gpr[index] = value; } } fn read_reg_gpr(&self, index: usize) -> u64 { match index { 0 => 0, _ => self.reg_gpr[index], } } } impl fmt::Debug for Cpu { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { const REGS_PER_LINE: usize = 2; const REG_NAMES: [&'static str; NUM_GPR] = [ "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3", "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", ]; write!(f, "\nCPU General Purpose Registers:")?; for reg_num in 0..NUM_GPR { if (reg_num % REGS_PER_LINE) == 0 { writeln!(f, "")?; } write!( f, "{reg_name}/gpr{num:02}: {value:#018X} ", num = reg_num, reg_name = REG_NAMES[reg_num], value = self.reg_gpr[reg_num], )?; } write!(f, "\n\nCPU Floating Point Registers:")?; for reg_num in 0..NUM_GPR { if (reg_num % REGS_PER_LINE) == 0 { writeln!(f, "")?; } write!( f, "fpr{num:02}: {value:21} ", num = reg_num, value = self.reg_fpr[reg_num] )?; } writeln!(f, "\n\nCPU Special Registers:")?; writeln!( f, "\ reg_pc: {:#018X}\n\ reg_hi: {:#018X}\n\ reg_lo: {:#018X}\n\ reg_llbit: {}\n\ reg_fcr0: {:#010X}\n\ reg_fcr31: {:#010X}\n\ ", self.reg_pc, self.reg_hi, self.reg_lo, self.reg_llbit, self.reg_fcr0, self.reg_fcr31 )?; writeln!(f, "{:#?}", self.cp0) } }
use super::super::Interconnect; use super::opcode::Opcode::*; use super::opcode::RegImmOpcode::*; use super::opcode::SpecialOpcode::*; use super::{cp0, Instruction}; use std::fmt; const NUM_GPR: usize = 32; enum SignExtendResult { Yes, No, } enum WriteLink { Yes, No, } pub struct Cpu { reg_gpr: [u64; NUM_GPR], reg_fpr: [f64; NUM_GPR], reg_pc: u64, reg_hi: u64, reg_lo: u64, reg_llbit: bool, reg_fcr0: u32, reg_fcr31: u32, cp0: cp0::Cp0, delay_slot_pc: Option<u64>, } impl Cpu { pub fn new() -> Cpu { Cpu { reg_gpr: [0; NUM_GPR], reg_fpr: [0.0; NUM_GPR], reg_pc: 0xffff_ffff_bfc0_0000, reg_hi: 0, reg_lo: 0, reg_llbit: false, reg_fcr0: 0, reg_fcr31: 0, cp0: cp0::Cp0::default(), delay_slot_pc: None, } } pub fn current_pc_virt(&self) -> u64 { self.delay_slot_pc.unwrap_or(self.reg_pc) } pub fn current_pc_phys(&self) -> u64 { self.virt_addr_to_phys_addr(self.current_pc_virt()) } pub fn will_execute_from_delay_slot(&self) -> bool { self.delay_slot_pc.is_some() } pub fn step(&mut self, interconnect: &mut Interconnect) {
} fn read_instruction(&self, interconnect: &mut Interconnect, addr: u64) -> Instruction { Instruction(self.read_word(interconnect, addr)) } fn execute_instruction(&mut self, interconnect: &mut Interconnect, instr: Instruction) { match instr.opcode() { Special => { match instr.special_op() { Sll => self.reg_instr(instr, |_, rt, sa| rt << sa), Srl => self.reg_instr(instr, |_, rt, sa| { let rt = rt as u32; (rt >> sa) as u64 }), Sllv => self.reg_instr(instr, |rs, rt, _| { let shift = rs & 0b11111; rt << shift }), Srlv => self.reg_instr(instr, |rs, rt, _| { let rs = rs as u32; let rt = rt as u32; let shift = rs & 0b11111; (rt >> shift) as u64 }), Jr => { let delay_slot_pc = self.reg_pc; self.reg_pc = self.read_reg_gpr(instr.rs()); self.delay_slot_pc = Some(delay_slot_pc); } Multu => { let rs = self.read_reg_gpr(instr.rs()) as u32; let rt = self.read_reg_gpr(instr.rt()) as u32; let res = (rs as u64) * (rt as u64); self.reg_lo = (res as i32) as u64; self.reg_hi = ((res >> 32) as i32) as u64; } Mfhi => { let value = self.reg_hi; self.write_reg_gpr(instr.rd() as usize, value); } Mflo => { let value = self.reg_lo; self.write_reg_gpr(instr.rd() as usize, value); } Addu => self.reg_instr(instr, |rs, rt, _| rs.wrapping_add(rt)), Subu => self.reg_instr(instr, |rs, rt, _| rs.wrapping_sub(rt)), And => self.reg_instr(instr, |rs, rt, _| rs & rt), Or => self.reg_instr(instr, |rs, rt, _| rs | rt), Xor => self.reg_instr(instr, |rs, rt, _| rs ^ rt), Sltu => self.reg_instr(instr, |rs, rt, _| if rs < rt { 1 } else { 0 }), } } RegImm => match instr.reg_imm_op() { Bgezal => { self.branch(instr, WriteLink::Yes, |rs, _| (rs as i64) >= 0); } }, Addi => self.imm_instr(instr, SignExtendResult::Yes, |rs, _, imm_sign_extended| { rs + imm_sign_extended }), Addiu => self.imm_instr(instr, SignExtendResult::Yes, |rs, _, imm_sign_extended| { rs.wrapping_add(imm_sign_extended) }), Andi => self.imm_instr(instr, SignExtendResult::No, |rs, imm, _| rs & imm), Ori => self.imm_instr(instr, SignExtendResult::No, |rs, imm, _| rs | imm), Lui => self.imm_instr(instr, SignExtendResult::Yes, |_, imm, _| imm << 16), Mtc0 => { let data = self.read_reg_gpr(instr.rt()); self.cp0.write_reg(instr.rd(), data); } Beq => { self.branch(instr, WriteLink::No, |rs, rt| rs == rt); } Bne => { self.branch(instr, WriteLink::No, |rs, rt| rs != rt); } Beql => self.branch_likely(instr, |rs, rt| rs == rt), Bnel => self.branch_likely(instr, |rs, rt| rs != rt), Lw => { let base = instr.rs(); let sign_extended_offset = instr.offset_sign_extended(); let virt_addr = self.read_reg_gpr(base).wrapping_add(sign_extended_offset); let mem = (self.read_word(interconnect, virt_addr) as i32) as u64; self.write_reg_gpr(instr.rt(), mem); } Sw => { let base = instr.rs(); let sign_extended_offset = instr.offset_sign_extended(); let virt_addr = self.read_reg_gpr(base).wrapping_add(sign_extended_offset); let mem = self.read_reg_gpr(instr.rt()) as u32; self.write_word(interconnect, virt_addr, mem); } } } fn imm_instr<F>(&mut self, instr: Instruction, sign_extend_result: SignExtendResult, f: F) where F: FnOnce(u64, u64, u64) -> u64, { let rs = self.read_reg_gpr(instr.rs()); let imm = instr.imm() as u64; let imm_sign_extended = instr.imm_sign_extended(); let value = f(rs, imm, imm_sign_extended); let sign_extended_value = (value as i32) as u64; let value = match sign_extend_result { SignExtendResult::Yes => sign_extended_value, _ => value, }; self.write_reg_gpr(instr.rt(), value); } fn reg_instr<F>(&mut self, instr: Instruction, f: F) where F: FnOnce(u64, u64, u32) -> u64, { let rs = self.read_reg_gpr(instr.rs()); let rt = self.read_reg_gpr(instr.rt()); let sa = instr.sa(); let value = f(rs, rt, sa); let sign_extended_value = (value as i32) as u64; self.write_reg_gpr(instr.rd() as usize, sign_extended_value); } fn branch<F>(&mut self, instr: Instruction, write_link: WriteLink, f: F) -> bool where F: FnOnce(u64, u64) -> bool, { let rs = self.read_reg_gpr(instr.rs()); let rt = self.read_reg_gpr(instr.rt()); let is_taken = f(rs, rt); let delay_slot_pc = self.reg_pc; if let WriteLink::Yes = write_link { let link_address = delay_slot_pc + 4; self.write_reg_gpr(31, link_address); } if is_taken { let sign_extended_offset = instr.offset_sign_extended() << 2; self.reg_pc = self.reg_pc.wrapping_add(sign_extended_offset); self.delay_slot_pc = Some(delay_slot_pc); } is_taken } fn branch_likely<F>(&mut self, instr: Instruction, f: F) where F: FnOnce(u64, u64) -> bool, { if !self.branch(instr, WriteLink::No, f) { self.reg_pc = self.reg_pc.wrapping_add(4); } } fn read_word(&self, interconnect: &mut Interconnect, virt_addr: u64) -> u32 { let phys_addr = self.virt_addr_to_phys_addr(virt_addr); interconnect.read_word(phys_addr as u32) } fn write_word(&mut self, interconnect: &mut Interconnect, virt_addr: u64, value: u32) { let phys_addr = self.virt_addr_to_phys_addr(virt_addr); interconnect.write_word(phys_addr as u32, value); } fn virt_addr_to_phys_addr(&self, virt_addr: u64) -> u64 { let addr_bit_values = (virt_addr >> 29) & 0b111; if addr_bit_values == 0b101 { virt_addr - 0xffff_ffff_a000_0000 } else { panic!("Unrecognized virtual address: {:#x}", virt_addr); } } fn write_reg_gpr(&mut self, index: usize, value: u64) { if index != 0 { self.reg_gpr[index] = value; } } fn read_reg_gpr(&self, index: usize) -> u64 { match index { 0 => 0, _ => self.reg_gpr[index], } } } impl fmt::Debug for Cpu { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { const REGS_PER_LINE: usize = 2; const REG_NAMES: [&'static str; NUM_GPR] = [ "r0", "at", "v0", "v1", "a0", "a1", "a2", "a3", "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7", "s0", "s1", "s2", "s3", "s4", "s5", "s6", "s7", "t8", "t9", "k0", "k1", "gp", "sp", "s8", "ra", ]; write!(f, "\nCPU General Purpose Registers:")?; for reg_num in 0..NUM_GPR { if (reg_num % REGS_PER_LINE) == 0 { writeln!(f, "")?; } write!( f, "{reg_name}/gpr{num:02}: {value:#018X} ", num = reg_num, reg_name = REG_NAMES[reg_num], value = self.reg_gpr[reg_num], )?; } write!(f, "\n\nCPU Floating Point Registers:")?; for reg_num in 0..NUM_GPR { if (reg_num % REGS_PER_LINE) == 0 { writeln!(f, "")?; } write!( f, "fpr{num:02}: {value:21} ", num = reg_num, value = self.reg_fpr[reg_num] )?; } writeln!(f, "\n\nCPU Special Registers:")?; writeln!( f, "\ reg_pc: {:#018X}\n\ reg_hi: {:#018X}\n\ reg_lo: {:#018X}\n\ reg_llbit: {}\n\ reg_fcr0: {:#010X}\n\ reg_fcr31: {:#010X}\n\ ", self.reg_pc, self.reg_hi, self.reg_lo, self.reg_llbit, self.reg_fcr0, self.reg_fcr31 )?; writeln!(f, "{:#?}", self.cp0) } }
if let Some(pc) = self.delay_slot_pc { let instr = self.read_instruction(interconnect, pc); self.delay_slot_pc = None; self.execute_instruction(interconnect, instr); } else { let instr = self.read_instruction(interconnect, self.reg_pc); self.reg_pc += 4; self.execute_instruction(interconnect, instr); }
if_condition
[ { "content": "pub fn map_addr(addr: u32) -> Addr {\n\n match addr {\n\n PIF_ROM_START..=PIF_ROM_END => Addr::PifRom(addr - PIF_ROM_START),\n\n PIF_RAM_START..=PIF_RAM_END => Addr::PifRam(addr - PIF_RAM_START),\n\n\n\n CART_DOM1_ADDR2_START..=CART_DOM1_ADDR2_END => Addr::CartDom1(addr - CART_DOM1_ADDR2_START),\n\n\n\n SP_DMEM_START..=SP_DMEM_END => Addr::SpDmem(addr - SP_DMEM_START),\n\n\n\n SP_IMEM_START..=SP_IMEM_END => Addr::SpImem(addr - SP_IMEM_START),\n\n\n\n SP_STATUS_REG => Addr::SpStatusReg,\n\n SP_DMA_BUSY_REG => Addr::SpDmaBusyReg,\n\n\n\n DPC_STATUS_REG => Addr::DpcStatusReg,\n\n\n\n AI_DRAM_ADDR_REG => Addr::AiDramAddrReg,\n\n AI_LEN_REG => Addr::AiLenReg,\n\n\n\n VI_INTR_REG => Addr::ViIntrReg,\n", "file_path": "src/n64/mem_map.rs", "rank": 0, "score": 83953.29748948086 }, { "content": "#[derive(Debug, Default)]\n\nstruct DiagnosticStatus {\n\n // ITS\n\n instruction_trace_support: bool,\n\n\n\n // BEV\n\n // TODO: Better name?\n\n tlb_general_exception_vector_location: TLBGeneralExceptionVectorLocation,\n\n\n\n // TS\n\n tlb_shutdown: bool,\n\n\n\n // SR\n\n soft_reset_or_nmi_occurred: bool,\n\n\n\n // CH\n\n condition_bit: bool,\n\n}\n\n\n\nimpl From<u32> for DiagnosticStatus {\n\n fn from(value: u32) -> Self {\n", "file_path": "src/n64/cpu/cp0/reg_status.rs", "rank": 1, "score": 73652.6332554233 }, { "content": "#[derive(Debug, Default)]\n\nstruct InterruptMask {\n\n // IM(7)\n\n timer_interrupt: bool,\n\n\n\n // IM(6:2)\n\n external_interrupt_write_req: [bool; 5],\n\n\n\n // IM(1:0)\n\n software_interrupt_cause_reg: [bool; 2],\n\n}\n\n\n\nimpl From<u32> for InterruptMask {\n\n fn from(value: u32) -> Self {\n\n InterruptMask {\n\n timer_interrupt: (value & (1 << 15)) != 0,\n\n\n\n external_interrupt_write_req: [\n\n (value & (1 << 10)) != 0,\n\n (value & (1 << 11)) != 0,\n\n (value & (1 << 12)) != 0,\n\n (value & (1 << 13)) != 0,\n\n (value & (1 << 14)) != 0],\n\n\n\n software_interrupt_cause_reg: [\n\n (value & (1 << 8)) != 0,\n\n (value & (1 << 9)) != 0]\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/n64/cpu/cp0/reg_status.rs", "rank": 2, "score": 73652.6332554233 }, { "content": "#[derive(Debug)]\n\nenum Mode {\n\n Kernel,\n\n Supervisor,\n\n User,\n\n}\n\n\n\nimpl Default for Mode {\n\n fn default() -> Self {\n\n Mode::Kernel\n\n }\n\n}\n\n\n\nimpl From<u32> for Mode {\n\n fn from(value: u32) -> Self {\n\n match (value >> 3) & 0b11 {\n\n 0b00 => Mode::Kernel,\n\n 0b01 => Mode::Supervisor,\n\n 0b10 => Mode::User,\n\n _ => panic!(\"Invalid cp0 KSU bits: {:#b}\", value),\n\n }\n\n }\n\n}\n", "file_path": "src/n64/cpu/cp0/reg_status.rs", "rank": 3, "score": 73567.33831775599 }, { "content": "#[derive(Debug)]\n\nenum Endianness {\n\n Little,\n\n Big,\n\n}\n\n\n\nimpl Default for Endianness {\n\n fn default() -> Self {\n\n Endianness::Big\n\n }\n\n}\n\n\n\nimpl From<u32> for Endianness {\n\n fn from(value: u32) -> Self {\n\n match (value >> 15) & 0b1 {\n\n 0 => Endianness::Little,\n\n 1 => Endianness::Big,\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n", "file_path": "src/n64/cpu/cp0/reg_config.rs", "rank": 4, "score": 73567.33831775599 }, { "content": "#[derive(Debug)]\n\nenum DataTransferPattern {\n\n Normal, // D\n\n DxxDxx,\n\n}\n\n\n\nimpl Default for DataTransferPattern {\n\n fn default() -> Self {\n\n DataTransferPattern::Normal\n\n }\n\n}\n\n\n\nimpl From<u32> for DataTransferPattern {\n\n fn from(value: u32) -> Self {\n\n match (value >> 24) & 0b1111 {\n\n 0 => DataTransferPattern::Normal,\n\n 6 => DataTransferPattern::DxxDxx,\n\n _ => panic!(\"Invalid data transfer pattern (EP): {:#x}\", value),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/n64/cpu/cp0/reg_config.rs", "rank": 7, "score": 69109.2970785417 }, { "content": "#[derive(Debug)]\n\nenum TLBGeneralExceptionVectorLocation {\n\n Normal,\n\n Bootstrap,\n\n}\n\n\n\nimpl Default for TLBGeneralExceptionVectorLocation {\n\n fn default() -> Self {\n\n TLBGeneralExceptionVectorLocation::Normal\n\n }\n\n}\n\n\n\nimpl From<u32> for TLBGeneralExceptionVectorLocation {\n\n fn from(value: u32) -> Self {\n\n match (value >> 22) & 0b1 {\n\n 0 => TLBGeneralExceptionVectorLocation::Normal,\n\n 1 => TLBGeneralExceptionVectorLocation::Bootstrap,\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/n64/cpu/cp0/reg_status.rs", "rank": 8, "score": 65243.44225155156 }, { "content": "use super::{reg_config, reg_status};\n\n\n\n#[derive(Debug, Default)]\n\npub struct Cp0 {\n\n reg_status: reg_status::RegStatus,\n\n reg_config: reg_config::RegConfig,\n\n}\n\n\n\nimpl Cp0 {\n\n pub fn write_reg(&mut self, index: u32, data: u64) {\n\n match index {\n\n 12 => self.reg_status = (data as u32).into(),\n\n 16 => self.reg_config = (data as u32).into(),\n\n _ => panic!(\"Unrecognized Cp0 reg: {}, {:#018x}\", index, data),\n\n }\n\n }\n\n}\n", "file_path": "src/n64/cpu/cp0/cp0.rs", "rank": 9, "score": 50782.78275437573 }, { "content": "use std::fmt;\n\n\n\nuse super::opcode::{Opcode, RegImmOpcode, SpecialOpcode};\n\n\n\nuse num::FromPrimitive;\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Instruction(pub u32);\n\n\n\nimpl Instruction {\n\n #[inline(always)]\n\n pub fn opcode(&self) -> Opcode {\n\n let value = (self.0 >> 26) & 0b111111;\n\n Opcode::from_u32(value).unwrap_or_else(|| {\n\n panic!(\"Unrecognized instruction: {:#010x} (op: {:#08b})\", self.0, value)\n\n })\n\n }\n\n\n\n #[inline(always)]\n\n pub fn rs(&self) -> usize {\n", "file_path": "src/n64/cpu/instruction.rs", "rank": 10, "score": 46673.12406943106 }, { "content": " ((self.0 >> 21) & 0b11111) as usize\n\n }\n\n\n\n #[inline(always)]\n\n pub fn rt(&self) -> usize {\n\n ((self.0 >> 16) & 0b11111) as usize\n\n }\n\n\n\n #[inline(always)]\n\n pub fn rd(&self) -> u32 {\n\n (self.0 >> 11) & 0b11111\n\n }\n\n\n\n #[inline(always)]\n\n pub fn sa(&self) -> u32 {\n\n (self.0 >> 6) & 0b11111\n\n }\n\n\n\n #[inline(always)]\n\n pub fn imm(&self) -> u32 {\n", "file_path": "src/n64/cpu/instruction.rs", "rank": 11, "score": 46668.516185367014 }, { "content": " self.0 & 0xffff\n\n }\n\n\n\n #[inline(always)]\n\n pub fn imm_sign_extended(&self) -> u64 {\n\n (self.imm() as i16) as u64\n\n }\n\n\n\n #[inline(always)]\n\n pub fn offset(&self) -> u32 {\n\n self.imm()\n\n }\n\n\n\n #[inline(always)]\n\n pub fn offset_sign_extended(&self) -> u64 {\n\n (self.offset() as i16) as u64\n\n }\n\n\n\n #[inline(always)]\n\n pub fn special_op(&self) -> SpecialOpcode {\n", "file_path": "src/n64/cpu/instruction.rs", "rank": 12, "score": 46667.32702897751 }, { "content": " let value = self.0 & 0b111111;\n\n SpecialOpcode::from_u32(value).unwrap_or_else(|| {\n\n panic!(\"Unrecognized special opcode: {:#010x} (op: {:#08b})\", self.0, value)\n\n })\n\n }\n\n\n\n #[inline(always)]\n\n pub fn reg_imm_op(&self) -> RegImmOpcode {\n\n let value = (self.0 >> 16) & 0b11111;\n\n RegImmOpcode::from_u32(value).unwrap_or_else(|| {\n\n panic!(\"Unrecognized reg imm opcode: {:#010x} (op: {:#08b})\", self.0, value)\n\n })\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Instruction {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self.opcode() {\n\n Opcode::Special => write!(f, \"{:?}\", self.special_op()),\n\n _ => write!(f, \"{:?}\", self.opcode()),\n\n }\n\n }\n\n}\n", "file_path": "src/n64/cpu/instruction.rs", "rank": 13, "score": 46667.07217500289 }, { "content": "mod cp0;\n\nmod reg_config;\n\nmod reg_status;\n\n\n\npub use self::cp0::Cp0;\n", "file_path": "src/n64/cpu/cp0/mod.rs", "rank": 14, "score": 43941.61921860792 }, { "content": "impl From<u32> for RegConfig {\n\n fn from(value: u32) -> Self {\n\n RegConfig {\n\n data_transfer_pattern: value.into(),\n\n\n\n endianness: value.into(),\n\n\n\n cu: (value & (1 << 3)) != 0,\n\n kseg0_cache_enable_bits: [\n\n (value & (1 << 0)) != 0,\n\n (value & (1 << 1)) != 0,\n\n (value & (1 << 2)) != 0,\n\n ]\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/n64/cpu/cp0/reg_config.rs", "rank": 15, "score": 42190.51088007266 }, { "content": "#[derive(Debug, Default)]\n\npub struct RegConfig {\n\n // EP\n\n data_transfer_pattern: DataTransferPattern,\n\n\n\n // BE\n\n endianness: Endianness,\n\n\n\n cu: bool,\n\n kseg0_cache_enable_bits: [bool; 3],\n\n}\n\n\n\nimpl RegConfig {\n\n fn kseg0_cache_enabled(&self) -> bool {\n\n !(!self.kseg0_cache_enable_bits[0] &&\n\n self.kseg0_cache_enable_bits[1] &&\n\n !self.kseg0_cache_enable_bits[2])\n\n }\n\n}\n\n\n", "file_path": "src/n64/cpu/cp0/reg_config.rs", "rank": 16, "score": 42189.873120591146 }, { "content": "}\n\n\n\nimpl From<u32> for RegStatus {\n\n fn from(value: u32) -> Self {\n\n RegStatus {\n\n coprocessor_usability: [\n\n (value & (1 << 28)) != 0,\n\n (value & (1 << 29)) != 0,\n\n (value & (1 << 30)) != 0,\n\n (value & (1 << 31)) != 0],\n\n\n\n low_power: (value & (1 << 27)) != 0,\n\n additional_fp_regs: (value & (1 << 26)) != 0,\n\n reverse_endian: (value & (1 << 25)) != 0,\n\n\n\n diagnostic_status: value.into(),\n\n interrupt_mask: value.into(),\n\n\n\n kernel_mode_64bit_addressing: (value & (1 << 7)) != 0,\n\n supervisor_mode_64bit_addressing: (value & (1 << 6)) != 0,\n", "file_path": "src/n64/cpu/cp0/reg_status.rs", "rank": 17, "score": 42189.49540576071 }, { "content": "#[derive(Debug, Default)]\n\npub struct RegStatus {\n\n // CU\n\n coprocessor_usability: [bool; 4],\n\n\n\n // RP\n\n low_power: bool,\n\n\n\n // FR\n\n additional_fp_regs: bool,\n\n\n\n // RE\n\n reverse_endian: bool,\n\n\n\n // DS\n\n diagnostic_status: DiagnosticStatus,\n\n\n\n // IM(7:0)\n\n interrupt_mask: InterruptMask,\n\n\n", "file_path": "src/n64/cpu/cp0/reg_status.rs", "rank": 18, "score": 42188.74354192566 }, { "content": " // KX\n\n kernel_mode_64bit_addressing: bool,\n\n\n\n // SX\n\n supervisor_mode_64bit_addressing: bool,\n\n\n\n // UX\n\n user_mode_64bit_addressing: bool,\n\n\n\n // KSU\n\n mode: Mode,\n\n\n\n // ERL\n\n error_level: bool,\n\n\n\n // EXL\n\n exception_level: bool,\n\n\n\n // IE\n\n interrupts_enabled: bool,\n", "file_path": "src/n64/cpu/cp0/reg_status.rs", "rank": 19, "score": 42187.34515401058 }, { "content": " DiagnosticStatus {\n\n instruction_trace_support: (value & (1 << 24)) != 0,\n\n\n\n tlb_general_exception_vector_location: value.into(),\n\n\n\n tlb_shutdown: (value & (1 << 21)) != 0,\n\n soft_reset_or_nmi_occurred: (value & (1 << 20)) != 0,\n\n condition_bit: (value & (1 << 18)) != 0,\n\n }\n\n }\n\n}\n\n\n\n// TODO: Better name?\n", "file_path": "src/n64/cpu/cp0/reg_status.rs", "rank": 20, "score": 42185.86088649838 }, { "content": " user_mode_64bit_addressing: (value & (1 << 5)) != 0,\n\n\n\n mode: value.into(),\n\n\n\n error_level: (value & (1 << 2)) != 0,\n\n exception_level: (value & (1 << 1)) != 0,\n\n interrupts_enabled: (value & (1 << 0)) != 0,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n", "file_path": "src/n64/cpu/cp0/reg_status.rs", "rank": 21, "score": 42183.770903596356 }, { "content": "fn main() {\n\n let pif_file_name = env::args().nth(1).unwrap();\n\n let rom_file_name = env::args().nth(2).unwrap();\n\n\n\n let pif = read_bin(pif_file_name);\n\n let rom = read_bin(rom_file_name);\n\n\n\n let n64 = N64::new(pif, rom);\n\n let mut debugger = Debugger::new(n64);\n\n debugger.run();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 22, "score": 38317.67941809198 }, { "content": "fn read_stdin() -> String {\n\n let mut input = String::new();\n\n stdin().read_line(&mut input).unwrap();\n\n input.trim().into()\n\n}\n", "file_path": "src/debugger/mod.rs", "rank": 23, "score": 33510.700525741544 }, { "content": " si: SerialInterface,\n\n\n\n cart_rom: Box<[u8]>,\n\n\n\n rdram: Box<[u16]>,\n\n}\n\n\n\nimpl Interconnect {\n\n pub fn new(boot_rom: Box<[u8]>, cart_rom: Box<[u8]>) -> Interconnect {\n\n Interconnect {\n\n pif: Pif::new(boot_rom),\n\n\n\n rdp: Rdp,\n\n rsp: Rsp::new(),\n\n\n\n ai: AudioInterface::default(),\n\n vi: VideoInterface::default(),\n\n\n\n pi: PeripheralInterface::default(),\n\n\n", "file_path": "src/n64/interconnect.rs", "rank": 41, "score": 26100.290980123827 }, { "content": "use byteorder::{BigEndian, ByteOrder};\n\n\n\nuse super::mem_map::{self, Addr};\n\nuse super::{AudioInterface, PeripheralInterface, Pif, Rdp, Rsp, SerialInterface, VideoInterface};\n\n\n\nuse std::fmt;\n\n\n\nconst RDRAM_SIZE: usize = 4 * 1024 * 1024;\n\n\n\npub struct Interconnect {\n\n pif: Pif,\n\n\n\n rsp: Rsp,\n\n rdp: Rdp,\n\n\n\n ai: AudioInterface,\n\n vi: VideoInterface,\n\n\n\n pi: PeripheralInterface,\n\n\n", "file_path": "src/n64/interconnect.rs", "rank": 42, "score": 26099.338948842436 }, { "content": " si: SerialInterface::default(),\n\n\n\n cart_rom: cart_rom,\n\n\n\n rdram: vec![0; RDRAM_SIZE].into_boxed_slice(),\n\n }\n\n }\n\n\n\n pub fn pif(&self) -> &Pif {\n\n &self.pif\n\n }\n\n\n\n pub fn read_word(&self, addr: u32) -> u32 {\n\n match mem_map::map_addr(addr) {\n\n Addr::PifRom(offset) => self.pif.read_boot_rom(offset),\n\n Addr::PifRam(offset) => self.pif.read_ram(offset),\n\n\n\n Addr::CartDom1(offset) => BigEndian::read_u32(&self.cart_rom[offset as usize..]),\n\n\n\n Addr::SpDmem(offset) => self.rsp.read_dmem(offset),\n", "file_path": "src/n64/interconnect.rs", "rank": 43, "score": 26097.053157895687 }, { "content": " Addr::AiLenReg => self.ai.write_len_reg(value),\n\n\n\n Addr::ViIntrReg => self.vi.write_intr_reg(value),\n\n Addr::ViCurrentReg => self.vi.write_current_reg(value),\n\n Addr::ViHStartReg => self.vi.write_h_start_reg(value),\n\n\n\n Addr::PiStatusReg => self.pi.write_status_reg(value),\n\n Addr::PiBsdDom1LatReg => self.pi.write_bsd_dom1_lat_reg(value),\n\n Addr::PiBsdDom1PwdReg => self.pi.write_bsd_dom1_pwd_reg(value),\n\n Addr::PiBsdDom1PgsReg => self.pi.write_bsd_dom1_pgs_reg(value),\n\n Addr::PiBsdDom1RlsReg => self.pi.write_bsd_dom1_rls_reg(value),\n\n\n\n Addr::SiStatusReg => self.si.write_status_reg(value),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Interconnect {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"TODO: Impl Debug for Interconnect\")\n\n }\n\n}\n", "file_path": "src/n64/interconnect.rs", "rank": 44, "score": 26095.464080021295 }, { "content": " Addr::SiStatusReg => self.si.read_status_reg(),\n\n }\n\n }\n\n\n\n pub fn write_word(&mut self, addr: u32, value: u32) {\n\n match mem_map::map_addr(addr) {\n\n Addr::PifRom(_) => panic!(\"Cannot write to PIF ROM\"),\n\n Addr::PifRam(offset) => self.pif.write_ram(offset, value),\n\n\n\n Addr::CartDom1(_) => panic!(\"Cannot write to cart ROM\"),\n\n\n\n Addr::SpDmem(offset) => self.rsp.write_dmem(offset, value),\n\n Addr::SpImem(offset) => self.rsp.write_imem(offset, value),\n\n\n\n Addr::SpStatusReg => self.rsp.write_status_reg(value),\n\n Addr::SpDmaBusyReg => self.rsp.write_dma_busy_reg(value),\n\n\n\n Addr::DpcStatusReg => self.rdp.write_status_reg(value),\n\n\n\n Addr::AiDramAddrReg => self.ai.write_dram_addr_reg(value),\n", "file_path": "src/n64/interconnect.rs", "rank": 45, "score": 26095.05550368859 }, { "content": " Addr::SpImem(offset) => self.rsp.read_imem(offset),\n\n\n\n Addr::SpStatusReg => self.rsp.read_status_reg(),\n\n Addr::SpDmaBusyReg => self.rsp.read_dma_busy_reg(),\n\n\n\n Addr::DpcStatusReg => self.rdp.read_status_reg(),\n\n\n\n Addr::AiDramAddrReg => self.ai.read_dram_addr_reg(),\n\n Addr::AiLenReg => self.ai.read_len_reg(),\n\n\n\n Addr::ViIntrReg => self.vi.read_intr_reg(),\n\n Addr::ViCurrentReg => self.vi.read_current_reg(),\n\n Addr::ViHStartReg => self.vi.read_h_start_reg(),\n\n\n\n Addr::PiStatusReg => self.pi.read_status_reg(),\n\n Addr::PiBsdDom1LatReg => self.pi.read_bsd_dom1_lat_reg(),\n\n Addr::PiBsdDom1PwdReg => self.pi.read_bsd_dom1_pwd_reg(),\n\n Addr::PiBsdDom1PgsReg => self.pi.read_bsd_dom1_pgs_reg(),\n\n Addr::PiBsdDom1RlsReg => self.pi.read_bsd_dom1_rls_reg(),\n\n\n", "file_path": "src/n64/interconnect.rs", "rank": 46, "score": 26091.680214775988 }, { "content": "fn read_bin<P: AsRef<Path>>(path: P) -> Box<[u8]> {\n\n let mut file = File::open(path).unwrap();\n\n let mut file_buf = Vec::new();\n\n file.read_to_end(&mut file_buf).unwrap();\n\n file_buf.into_boxed_slice()\n\n}\n", "file_path": "src/main.rs", "rank": 47, "score": 24952.670886211046 }, { "content": "mod cpu;\n\nmod cp0;\n\npub mod opcode;\n\nmod instruction;\n\n\n\npub use self::cpu::Cpu;\n\npub use self::instruction::Instruction;\n", "file_path": "src/n64/cpu/mod.rs", "rank": 48, "score": 21569.041523627657 }, { "content": "\n\n Lw = 0b100011,\n\n\n\n Sw = 0b101011,\n\n }\n\n}\n\n\n\nenum_from_primitive! {\n\n #[derive(Debug)]\n\n pub enum SpecialOpcode {\n\n Sll = 0b000000,\n\n\n\n Srl = 0b000010,\n\n\n\n Sllv = 0b000100,\n\n\n\n Srlv = 0b000110,\n\n\n\n Jr = 0b001000,\n\n\n", "file_path": "src/n64/cpu/opcode.rs", "rank": 49, "score": 21555.91419998262 }, { "content": " Multu = 0b011001,\n\n\n\n Mfhi = 0b010000,\n\n Mflo = 0b010010,\n\n\n\n Addu = 0b100001,\n\n\n\n Subu = 0b100011,\n\n And = 0b100100,\n\n Or = 0b100101,\n\n Xor = 0b100110,\n\n\n\n Sltu = 0b101011,\n\n }\n\n}\n\n\n\nenum_from_primitive! {\n\n #[derive(Debug)]\n\n pub enum RegImmOpcode {\n\n Bgezal = 0b10001,\n\n }\n\n}\n", "file_path": "src/n64/cpu/opcode.rs", "rank": 50, "score": 21555.776440236878 }, { "content": "enum_from_primitive! {\n\n #[derive(Debug)]\n\n pub enum Opcode {\n\n Special = 0b000000,\n\n RegImm = 0b000001,\n\n\n\n Addi = 0b001000,\n\n Addiu = 0b001001,\n\n\n\n Andi = 0b001100,\n\n Ori = 0b001101,\n\n\n\n Lui = 0b001111,\n\n Mtc0 = 0b010000,\n\n\n\n Beq = 0b000100,\n\n Bne = 0b000101,\n\n\n\n Beql = 0b010100,\n\n Bnel = 0b010101,\n", "file_path": "src/n64/cpu/opcode.rs", "rank": 51, "score": 21555.656463849144 }, { "content": "use super::{Cpu, Interconnect};\n\n\n\n#[derive(Debug)]\n\npub struct N64 {\n\n cpu: Cpu,\n\n interconnect: Interconnect,\n\n}\n\n\n\nimpl N64 {\n\n pub fn new(boot_rom: Box<[u8]>, cart_rom: Box<[u8]>) -> N64 {\n\n N64 {\n\n cpu: Cpu::new(),\n\n interconnect: Interconnect::new(boot_rom, cart_rom),\n\n }\n\n }\n\n\n\n pub fn cpu(&self) -> &Cpu {\n\n &self.cpu\n\n }\n\n\n\n pub fn interconnect(&self) -> &Interconnect {\n\n &self.interconnect\n\n }\n\n\n\n pub fn step(&mut self) {\n\n self.cpu.step(&mut self.interconnect);\n\n }\n\n}\n", "file_path": "src/n64/n64.rs", "rank": 52, "score": 17.808008937140634 }, { "content": "use byteorder::{BigEndian, ByteOrder};\n\n\n\nuse super::mem_map::PIF_RAM_LENGTH;\n\n\n\npub struct Pif {\n\n boot_rom: Box<[u8]>,\n\n ram: Box<[u8]>,\n\n}\n\n\n\nimpl Pif {\n\n pub fn new(boot_rom: Box<[u8]>) -> Pif {\n\n Pif {\n\n boot_rom: boot_rom,\n\n\n\n ram: vec![0; PIF_RAM_LENGTH as usize].into_boxed_slice(),\n\n }\n\n }\n\n\n\n pub fn read_boot_rom(&self, addr: u32) -> u32 {\n\n BigEndian::read_u32(&self.boot_rom[addr as usize..])\n", "file_path": "src/n64/pif.rs", "rank": 53, "score": 11.73682868926477 }, { "content": "use byteorder::{BigEndian, ByteOrder};\n\n\n\nuse super::mem_map::{SP_DMEM_LENGTH, SP_IMEM_LENGTH};\n\n\n\npub struct Rsp {\n\n dmem: Box<[u8]>,\n\n imem: Box<[u8]>,\n\n\n\n halt: bool,\n\n broke: bool,\n\n interrupt_enable: bool,\n\n}\n\n\n\nimpl Rsp {\n\n pub fn new() -> Rsp {\n\n // TODO: Check for correct init hw state\n\n Rsp {\n\n dmem: vec![0; SP_DMEM_LENGTH as usize].into_boxed_slice(),\n\n imem: vec![0; SP_IMEM_LENGTH as usize].into_boxed_slice(),\n\n\n", "file_path": "src/n64/rsp.rs", "rank": 54, "score": 11.625876574213502 }, { "content": "mod audio_interface;\n\npub mod cpu;\n\nmod interconnect;\n\npub mod mem_map;\n\nmod n64;\n\nmod peripheral_interface;\n\nmod pif;\n\nmod rdp;\n\nmod rsp;\n\nmod serial_interface;\n\nmod video_interface;\n\n\n\npub use self::audio_interface::AudioInterface;\n\npub use self::cpu::Cpu;\n\npub use self::interconnect::Interconnect;\n\npub use self::n64::N64;\n\npub use self::peripheral_interface::PeripheralInterface;\n\npub use self::pif::Pif;\n\npub use self::rdp::Rdp;\n\npub use self::rsp::Rsp;\n\npub use self::serial_interface::SerialInterface;\n\npub use self::video_interface::VideoInterface;\n", "file_path": "src/n64/mod.rs", "rank": 55, "score": 11.130341618415628 }, { "content": " halt: true,\n\n broke: false,\n\n interrupt_enable: false,\n\n }\n\n }\n\n\n\n pub fn read_dmem(&self, offset: u32) -> u32 {\n\n BigEndian::read_u32(&self.dmem[offset as usize..])\n\n }\n\n\n\n pub fn write_dmem(&mut self, offset: u32, value: u32) {\n\n BigEndian::write_u32(&mut self.dmem[offset as usize..], value);\n\n }\n\n\n\n pub fn read_imem(&self, offset: u32) -> u32 {\n\n BigEndian::read_u32(&self.imem[offset as usize..])\n\n }\n\n\n\n pub fn write_imem(&mut self, offset: u32, value: u32) {\n\n BigEndian::write_u32(&mut self.imem[offset as usize..], value);\n", "file_path": "src/n64/rsp.rs", "rank": 56, "score": 11.004988113428809 }, { "content": " }\n\n\n\n pub fn read_ram(&self, addr: u32) -> u32 {\n\n BigEndian::read_u32(&self.ram[addr as usize..])\n\n }\n\n\n\n pub fn write_ram(&mut self, addr: u32, value: u32) {\n\n BigEndian::write_u32(&mut self.ram[addr as usize..], value);\n\n }\n\n}\n", "file_path": "src/n64/pif.rs", "rank": 57, "score": 10.846138291370158 }, { "content": "mod command;\n\n\n\nuse crate::n64;\n\n\n\nuse self::command::Command;\n\nuse n64::cpu::opcode::Opcode::*;\n\nuse n64::cpu::Instruction;\n\nuse n64::mem_map;\n\nuse n64::mem_map::Addr::*;\n\nuse n64::N64;\n\nuse std::io::prelude::*;\n\nuse std::io::{stdin, stdout};\n\n\n\npub struct Debugger {\n\n n64: N64,\n\n\n\n last_command: Option<Command>,\n\n}\n\n\n\nimpl Debugger {\n", "file_path": "src/debugger/mod.rs", "rank": 58, "score": 10.461674025481805 }, { "content": "pub struct Rdp;\n\n\n\nimpl Rdp {\n\n pub fn read_status_reg(&self) -> u32 {\n\n // TODO: Proper impl\n\n 0\n\n }\n\n\n\n pub fn write_status_reg(&mut self, value: u32) {\n\n // TODO\n\n panic!(\"Write to rdp status reg: {:#?}\", value);\n\n }\n\n}\n", "file_path": "src/n64/rdp.rs", "rank": 59, "score": 9.996666951600133 }, { "content": "#[derive(Default)]\n\npub struct SerialInterface;\n\n\n\nimpl SerialInterface {\n\n pub fn read_status_reg(&self) -> u32 {\n\n // TODO: Proper impl\n\n 0\n\n }\n\n\n\n pub fn write_status_reg(&mut self, value: u32) {\n\n panic!(\"Writes to SI status reg not yet implemented\");\n\n }\n\n}\n", "file_path": "src/n64/serial_interface.rs", "rank": 60, "score": 9.636529836933503 }, { "content": "use std::borrow::Cow;\n\nuse std::str::{self, FromStr};\n\n\n\nuse nom::{IResult, eof, space, digit};\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum Command {\n\n Step(usize),\n\n Exit,\n\n Repeat,\n\n}\n\n\n\nimpl FromStr for Command {\n\n type Err = Cow<'static, str>;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match command(s.as_bytes()) {\n\n IResult::Done(_, c) => Ok(c),\n\n err => Err(format!(\"Unable to parse command: {:?}\", err).into())\n\n }\n", "file_path": "src/debugger/command.rs", "rank": 61, "score": 9.515582423884386 }, { "content": " pub fn read_bsd_dom1_pgs_reg(&self) -> u32 {\n\n // TODO: Proper impl (probably not necessary)\n\n 0\n\n }\n\n\n\n pub fn write_bsd_dom1_pgs_reg(&mut self, value: u32) {\n\n // TODO: Proper impl (probably not necessary)\n\n println!(\"PI_BSD_DOM1_PGS_REG written: {:#x}\", value);\n\n }\n\n\n\n pub fn read_bsd_dom1_rls_reg(&self) -> u32 {\n\n // TODO: Proper impl (probably not necessary)\n\n 0\n\n }\n\n\n\n pub fn write_bsd_dom1_rls_reg(&mut self, value: u32) {\n\n // TODO: Proper impl (probably not necessary)\n\n println!(\"PI_BSD_DOM1_RLS_REG written: {:#x}\", value);\n\n }\n\n}\n", "file_path": "src/n64/peripheral_interface.rs", "rank": 62, "score": 9.44905396521056 }, { "content": " pub fn read_bsd_dom1_lat_reg(&self) -> u32 {\n\n // TODO: Proper impl (probably not necessary)\n\n 0\n\n }\n\n\n\n pub fn write_bsd_dom1_lat_reg(&mut self, value: u32) {\n\n // TODO: Proper impl (probably not necessary)\n\n println!(\"PI_BSD_DOM1_LAT_REG written: {:#x}\", value);\n\n }\n\n\n\n pub fn read_bsd_dom1_pwd_reg(&self) -> u32 {\n\n // TODO: Proper impl (probably not necessary)\n\n 0\n\n }\n\n\n\n pub fn write_bsd_dom1_pwd_reg(&mut self, value: u32) {\n\n // TODO: Proper impl (probably not necessary)\n\n println!(\"PI_BSD_DOM1_PWD_REG written: {:#x}\", value);\n\n }\n\n\n", "file_path": "src/n64/peripheral_interface.rs", "rank": 63, "score": 9.44905396521056 }, { "content": "#[derive(Default)]\n\npub struct AudioInterface {\n\n dram_addr: u32,\n\n length: u32,\n\n}\n\n\n\nimpl AudioInterface {\n\n pub fn read_dram_addr_reg(&self) -> u32 {\n\n self.dram_addr\n\n }\n\n\n\n pub fn write_dram_addr_reg(&mut self, value: u32) {\n\n self.dram_addr = value & 0x00ff_ffff;\n\n }\n\n\n\n pub fn read_len_reg(&self) -> u32 {\n\n self.length\n\n }\n\n\n\n pub fn write_len_reg(&mut self, value: u32) {\n\n self.length = value & 0x0003_fff8;\n\n }\n\n}\n", "file_path": "src/n64/audio_interface.rs", "rank": 64, "score": 9.43006444640174 }, { "content": "#[derive(Default)]\n\npub struct VideoInterface {\n\n interrupt_half_line: u32,\n\n\n\n active_video_start: u32,\n\n active_video_end: u32,\n\n}\n\n\n\nimpl VideoInterface {\n\n pub fn read_intr_reg(&self) -> u32 {\n\n self.interrupt_half_line\n\n }\n\n\n\n pub fn write_intr_reg(&mut self, value: u32) {\n\n self.interrupt_half_line = value & 0x0000_03ff;\n\n }\n\n\n\n pub fn read_current_reg(&self) -> u32 {\n\n // TODO: Proper impl\n\n 0\n", "file_path": "src/n64/video_interface.rs", "rank": 65, "score": 9.38553959212932 }, { "content": " match command {\n\n Ok(Command::Step(count)) => self.step(count),\n\n Ok(Command::Exit) => break,\n\n Ok(Command::Repeat) => unreachable!(),\n\n Err(ref e) => println!(\"{}\", e),\n\n }\n\n\n\n self.last_command = command.ok();\n\n }\n\n }\n\n\n\n pub fn step(&mut self, count: usize) {\n\n for _ in 0..count {\n\n let current_pc = self.n64.cpu().current_pc_phys();\n\n let addr = mem_map::map_addr(current_pc as u32);\n\n let instr = Instruction(match addr {\n\n PifRom(offset) => self.n64.interconnect().pif().read_boot_rom(offset),\n\n _ => panic!(\"Debugger can't inspect address: {:?}\", addr),\n\n });\n\n\n", "file_path": "src/debugger/mod.rs", "rank": 66, "score": 9.287918273756024 }, { "content": "#[derive(Default)]\n\npub struct PeripheralInterface;\n\n\n\nimpl PeripheralInterface {\n\n pub fn read_status_reg(&self) -> u32 {\n\n // TODO: Proper impl\n\n 0\n\n }\n\n\n\n pub fn write_status_reg(&mut self, value: u32) {\n\n if (value & (1 << 0)) != 0 {\n\n println!(\"WARNING: PI reset controller bit written but not yet implemented\");\n\n }\n\n\n\n if (value & (1 << 1)) != 0 {\n\n // TODO: Affect MI_INTR_REG\n\n println!(\"WARNING: PI clear intr bit written but not yet implemented\");\n\n }\n\n }\n\n\n", "file_path": "src/n64/peripheral_interface.rs", "rank": 67, "score": 8.30351446981376 }, { "content": " pub fn new(n64: N64) -> Debugger {\n\n Debugger {\n\n n64: n64,\n\n\n\n last_command: None,\n\n }\n\n }\n\n\n\n pub fn run(&mut self) {\n\n loop {\n\n print!(\"r64> \");\n\n stdout().flush().unwrap();\n\n\n\n let command = match (read_stdin().parse(), self.last_command) {\n\n (Ok(Command::Repeat), Some(c)) => Ok(c),\n\n (Ok(Command::Repeat), None) => Err(\"No last command\".into()),\n\n (Ok(c), _) => Ok(c),\n\n (Err(e), _) => Err(e),\n\n };\n\n\n", "file_path": "src/debugger/mod.rs", "rank": 68, "score": 8.036921104891636 }, { "content": " }\n\n\n\n pub fn write_current_reg(&mut self, value: u32) {\n\n // TODO: Clear interrupt\n\n }\n\n\n\n pub fn read_h_start_reg(&self) -> u32 {\n\n (self.active_video_start << 16) | self.active_video_end\n\n }\n\n\n\n pub fn write_h_start_reg(&mut self, value: u32) {\n\n self.active_video_start = (value >> 16) & 0x0000_03ff;\n\n self.active_video_end = value & 0x0000_03ff;\n\n }\n\n}\n", "file_path": "src/n64/video_interface.rs", "rank": 69, "score": 7.510024562996046 }, { "content": " }\n\n\n\n // TODO: Read general regs\n\n pub fn read_status_reg(&self) -> u32 {\n\n (if self.halt { 1 } else { 0 } << 0) |\n\n (if self.interrupt_enable { 1 } else { 0 } << 1)\n\n }\n\n\n\n pub fn write_status_reg(&mut self, value: u32) {\n\n // TODO: What happens if both a set and clear bit are set?\n\n if (value & (1 << 0)) != 0 {\n\n self.halt = false;\n\n }\n\n if (value & (1 << 1)) != 0 {\n\n self.halt = true;\n\n }\n\n\n\n if (value & (1 << 2)) != 0 {\n\n self.broke = false;\n\n }\n", "file_path": "src/n64/rsp.rs", "rank": 70, "score": 6.467787692911726 }, { "content": " if (value & (1 << 3)) != 0 {\n\n self.interrupt_enable = false;\n\n }\n\n\n\n // TODO: Remaining bits\n\n if (value & 0xfffffff0) != 0 {\n\n panic!(\"Write to unsupported rsp status bits: {:#?}\", value);\n\n }\n\n }\n\n\n\n pub fn read_dma_busy_reg(&self) -> u32 {\n\n // TODO: Proper impl\n\n 0\n\n }\n\n\n\n pub fn write_dma_busy_reg(&self, value: u32) {\n\n panic!(\"Attempted write to SP_DMA_BUSY: {:#?}\", value);\n\n }\n\n}\n", "file_path": "src/n64/rsp.rs", "rank": 71, "score": 6.196965382311849 }, { "content": "const PIF_ROM_START: u32 = 0x1fc0_0000;\n\nconst PIF_ROM_LENGTH: u32 = 0x0000_07c0;\n\nconst PIF_ROM_END: u32 = PIF_ROM_START + PIF_ROM_LENGTH - 1;\n\n\n\nconst PIF_RAM_START: u32 = 0x1fc0_07c0;\n\npub const PIF_RAM_LENGTH: u32 = 0x0000_0040;\n\nconst PIF_RAM_END: u32 = PIF_RAM_START + PIF_RAM_LENGTH - 1;\n\n\n\nconst CART_DOM1_ADDR2_START: u32 = 0x1000_0000;\n\nconst CART_DOM1_ADDR2_LENGTH: u32 = 0x0fc0_0000;\n\nconst CART_DOM1_ADDR2_END: u32 = CART_DOM1_ADDR2_START + CART_DOM1_ADDR2_LENGTH - 1;\n\n\n\nconst SP_DMEM_START: u32 = 0x0400_0000;\n\npub const SP_DMEM_LENGTH: u32 = 0x0000_1000;\n\nconst SP_DMEM_END: u32 = SP_DMEM_START + SP_DMEM_LENGTH - 1;\n\n\n\nconst SP_IMEM_START: u32 = 0x0400_1000;\n\npub const SP_IMEM_LENGTH: u32 = 0x0000_1000;\n\nconst SP_IMEM_END: u32 = SP_IMEM_START + SP_IMEM_LENGTH - 1;\n\n\n", "file_path": "src/n64/mem_map.rs", "rank": 72, "score": 4.924394084653571 }, { "content": "const PI_BSD_DOM1_PGS_REG: u32 = PI_BASE_REG + 0x1c;\n\nconst PI_BSD_DOM1_RLS_REG: u32 = PI_BASE_REG + 0x20;\n\n\n\nconst SI_BASE_REG: u32 = 0x0480_0000;\n\nconst SI_STATUS_REG: u32 = SI_BASE_REG + 0x18;\n\n\n\n#[derive(Debug)]\n\npub enum Addr {\n\n PifRom(u32),\n\n PifRam(u32),\n\n\n\n CartDom1(u32),\n\n\n\n SpDmem(u32),\n\n SpImem(u32),\n\n\n\n SpStatusReg,\n\n SpDmaBusyReg,\n\n\n\n DpcStatusReg,\n", "file_path": "src/n64/mem_map.rs", "rank": 73, "score": 4.398682564862602 }, { "content": "#![deny(trivial_casts, trivial_numeric_casts)]\n\n\n\nextern crate byteorder;\n\n\n\nextern crate num;\n\n\n\n#[macro_use]\n\nextern crate enum_primitive;\n\n\n\n#[macro_use]\n\nextern crate nom;\n\n\n\nmod n64;\n\nmod debugger;\n\n\n\nuse std::env;\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::path::Path;\n\nuse debugger::Debugger;\n\nuse n64::N64;\n\n\n", "file_path": "src/main.rs", "rank": 74, "score": 3.4964581000693475 }, { "content": "const SP_BASE_REG: u32 = 0x0404_0000;\n\nconst SP_STATUS_REG: u32 = SP_BASE_REG + 0x10;\n\nconst SP_DMA_BUSY_REG: u32 = SP_BASE_REG + 0x18;\n\n\n\nconst DPC_BASE_REG: u32 = 0x0410_0000;\n\nconst DPC_STATUS_REG: u32 = DPC_BASE_REG + 0xc;\n\n\n\nconst AI_BASE_REG: u32 = 0x0450_0000;\n\nconst AI_DRAM_ADDR_REG: u32 = AI_BASE_REG + 0x0;\n\nconst AI_LEN_REG: u32 = AI_BASE_REG + 0x4;\n\n\n\nconst VI_BASE_REG: u32 = 0x0440_0000;\n\nconst VI_INTR_REG: u32 = VI_BASE_REG + 0xc;\n\nconst VI_CURRENT_REG: u32 = VI_BASE_REG + 0x10;\n\nconst VI_H_START_REG: u32 = VI_BASE_REG + 0x24;\n\n\n\nconst PI_BASE_REG: u32 = 0x0460_0000;\n\nconst PI_STATUS_REG: u32 = PI_BASE_REG + 0x10;\n\nconst PI_BSD_DOM1_LAT_REG: u32 = PI_BASE_REG + 0x14;\n\nconst PI_BSD_DOM1_PWD_REG: u32 = PI_BASE_REG + 0x18;\n", "file_path": "src/n64/mem_map.rs", "rank": 75, "score": 3.162588715053335 }, { "content": "named!(\n\n exit<Command>,\n\n map!(\n\n alt_complete!(tag!(\"exit\") | tag!(\"quit\") | tag!(\"e\") | tag!(\"q\")),\n\n |_| Command::Exit));\n\n\n\nnamed!(\n\n repeat<Command>,\n\n value!(Command::Repeat));\n\n\n\nnamed!(\n\n usize_parser<usize>,\n\n map_res!(\n\n map_res!(\n\n digit,\n\n str::from_utf8),\n\n FromStr::from_str));\n", "file_path": "src/debugger/command.rs", "rank": 76, "score": 2.5965034351310674 }, { "content": " }\n\n}\n\n\n\nnamed!(\n\n command<Command>,\n\n chain!(\n\n c: alt_complete!(\n\n step |\n\n exit |\n\n repeat) ~\n\n eof,\n\n || c));\n\n\n\nnamed!(\n\n step<Command>,\n\n chain!(\n\n alt_complete!(tag!(\"step\") | tag!(\"s\")) ~\n\n count: opt!(preceded!(space, usize_parser)),\n\n || Command::Step(count.unwrap_or(1))));\n\n\n", "file_path": "src/debugger/command.rs", "rank": 77, "score": 1.9860741553082386 }, { "content": " print!(\"{:018X}: \", current_pc);\n\n\n\n match instr.opcode() {\n\n Special => print!(\"{:?} (Special)\", instr.special_op()),\n\n RegImm => print!(\"{:?} (RegImm)\", instr.reg_imm_op()),\n\n _ => print!(\"{:?}\", instr),\n\n }\n\n\n\n if self.n64.cpu().will_execute_from_delay_slot() {\n\n println!(\" (DELAY)\");\n\n } else {\n\n println!(\"\");\n\n }\n\n\n\n self.n64.step();\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/debugger/mod.rs", "rank": 78, "score": 1.8749468322581326 }, { "content": "## Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.\n\n\n\nI do accept (and appreciate!) pull requests, but please, try to keep them to _small, meaningful, isolated changes only_ that I can go over completely on the stream. Significant outside contributions, as cool as they may be, somewhat defeat the most important part of this project - **documenting everything through livecoding**. I'm happy to accept small cosmetic changes/bugfixes, but please, consider what the larger audience as a whole might be missing out on when they don't get to see the thought process and resources that went into making the contribution (which is unfortunately what happens whenever I accept a PR).\n\n\n\nIf you'd like to see a particular library or coding style used somewhere, opening an issue is much preferred over a PR, so we can discuss it beforehand and implement it live. This also keeps people from stepping on each others' toes and implementing the same things (yes, this has happened already).\n\n\n\nIssues, especially pertaining to accuracy/bugfixes, are always more than welcome!\n", "file_path": "CONTRIBUTING.md", "rank": 79, "score": 0.9654563081190917 }, { "content": "# rustendo64 [![Build Status](https://travis-ci.org/yupferris/rustendo64.svg?branch=master)](https://travis-ci.org/yupferris/rustendo64) [![Build Status](https://ci.appveyor.com/api/projects/status/owjloq84v91147nd/branch/master?svg=true)](https://ci.appveyor.com/project/yupferris/rustendo64/branch/master)\n\n\n\n![rustendo64](Rustendo-64.png)\n\n\n\nLivecoding a Nintendo 64 emulator in Rust :D\n\n\n\n## Follow along\n\nThis is a WIP emulator that we're building from scratch live. I haven't prepared hardly anything in advance before starting this endeavor (I'm new to the n64 hardware myself). The entire process is currently being streamed on [Twitch](http://www.twitch.tv/ferrisstreamsstuff), and each segment is being recorded and uploaded to [this YouTube playlist](https://www.youtube.com/playlist?list=PL-sXmdrqqYYcznDg4xwAJWQgNL2gRray2). For stream times and announcements, you can check out [my Twitter](https://twitter.com/ferristweetsnow).\n\n\n\nAt the end of each episode, I mark the latest commit with a tag so you can see where we finished. Check the [releases](https://github.com/yupferris/rustendo64/releases) for this repo to see those.\n\n\n\n## Helpful tools\n\n- [Hex Fiend](http://ridiculousfish.com/hexfiend/)\n\n- [Online disassembler](https://www.onlinedisassembler.com/odaweb/)\n\n- [Dash](https://kapeli.com/dash) (OS X / iOS) for documentation. Compatible alternatives for other platforms can be found linked from [Dash Docset Links](https://kapeli.com/docset_links).\n\n\n", "file_path": "README.md", "rank": 80, "score": 0.740954719830087 }, { "content": "## Literature\n\n- [n64dev repo doc's](https://github.com/mikeryan/n64dev/tree/master/docs)\n\n- [VR4300 datasheet](http://datasheets.chipdb.org/NEC/Vr-Series/Vr43xx/U10504EJ7V0UMJ1.pdf)\n\n- [MIPS R4000 technical overview](https://www.hotchips.org/wp-content/uploads/hc_archives/hc03/2_Mon/HC3.S1/HC3.1.2.pdf)\n\n- [MIPS pipeline info](http://www.ece.ucsb.edu/~strukov/ece154aFall2013/viewgraphs/pipelinedMIPS.pdf)\n\n- [R3000 pipeline specification](https://www.researchgate.net/publication/2643911_Pipeline_Specification_of_a_MIPS_R3000_CPU) (not necessarily applicable to the VR4300, but very likely to be the same)\n\n- [Forum post where we found some boot info](http://www.emutalk.net/threads/53938-N64-tech-documentation)\n\n- [Detailed N64 memory map](http://infrid.com/rcp64/docfiles/n64maps.txt)\n\n- [Alternate MIPS register names](http://www.cs.umd.edu/class/sum2003/cmsc311/Notes/Mips/altReg.html)\n\n\n\n## Test ROM's\n\n* [Turtle's enormous public domain ROM repository](https://github.com/vgturtle127/N64-PD-ROMS)\n\n* [Pouet list](http://www.pouet.net/prodlist.php?platform[0]=Nintendo+64&page=1)\n\n* [Zophar](http://www.zophar.net/pdroms/n64.html)\n\n* [PDROMs](http://pdroms.de/news/nintendo64/)\n\n* [Micro-64](http://micro-64.com/features/aafeatures.shtml)\n\n* [PeterLemon's ROMs](https://github.com/PeterLemon/N64)\n\n\n\n## Building and Running\n\nCurrently, the only dependency for building is Rust itself, which can be downloaded [here](https://www.rust-lang.org/downloads.html).\n\n\n\nAn N64 BIOS (PIF ROM) is required to boot the emulator. The ROM I've been testing with thus far has a SHA-1 of `9174eadc0f0ea2654c95fd941406ab46b9dc9bdd`.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n## Contribution\n\n\n\nPlease read [Contribution notes](CONTRIBUTING.md) before submitting a PR!\n", "file_path": "README.md", "rank": 81, "score": 0.5360033818584742 } ]
Rust
src/signed_url.rs
mysilkway/tame-gcs
e05ff7af85bd8592f98e4deb3a8900a20d0c0152
use crate::{error::Error, signing, types::ObjectIdentifier}; use percent_encoding as perc_enc; use std::borrow::Cow; use url::Url; pub struct UrlSigner<D, S> { digester: D, signer: S, } #[cfg(feature = "signing")] impl UrlSigner<signing::RingDigest, signing::RingSigner> { pub fn with_ring() -> UrlSigner<signing::RingDigest, signing::RingSigner> { UrlSigner::new(signing::RingDigest, signing::RingSigner) } } impl<D, S> UrlSigner<D, S> where D: signing::DigestCalulator, S: signing::Signer, { pub fn new(digester: D, signer: S) -> Self { Self { digester, signer } } pub fn generate<'a, K, OID>( &self, key_provider: &K, id: &OID, optional: SignedUrlOptional<'_>, ) -> Result<Url, Error> where K: signing::KeyProvider, OID: ObjectIdentifier<'a>, { const SEVEN_DAYS: u64 = 7 * 24 * 60 * 60; if optional.duration.as_secs() > SEVEN_DAYS { return Err(Error::TooLongExpiration { requested: optional.duration.as_secs(), max: SEVEN_DAYS, }); } let mut signed_url = Url::parse("https://storage.googleapis.com").map_err(Error::UrlParse)?; let resource_path = format!( "/{}/{}", perc_enc::percent_encode(id.bucket().as_ref(), crate::util::PATH_ENCODE_SET), perc_enc::percent_encode(id.object().as_ref(), crate::util::PATH_ENCODE_SET), ); signed_url.set_path(&resource_path); let mut headers = optional.headers; headers.insert( http::header::HOST, http::header::HeaderValue::from_static("storage.googleapis.com"), ); let headers = { let mut hdrs = Vec::with_capacity(headers.keys_len()); for key in headers.keys() { let vals_size = headers .get_all(key) .iter() .fold(0, |acc, v| acc + v.len() + 1) - 1; let mut key_vals = String::with_capacity(vals_size); for (i, val) in headers.get_all(key).iter().enumerate() { if i > 0 { key_vals.push(','); } key_vals.push_str( val.to_str() .map_err(|_err| Error::OpaqueHeaderValue(val.clone()))?, ); } hdrs.push((key.as_str().to_lowercase(), key_vals)); } hdrs.sort(); hdrs }; let signed_headers = { let signed_size = headers.iter().fold(0, |acc, (name, _)| acc + name.len()) + headers.len() - 1; let mut names = String::with_capacity(signed_size); for (i, name) in headers.iter().map(|(name, _)| name).enumerate() { if i > 0 { names.push(';'); } names.push_str(name); } assert_eq!(signed_size, names.capacity()); names }; let timestamp = time::OffsetDateTime::now_utc(); let request_timestamp = { let year = timestamp.year(); let month = timestamp.month() as u8; let day = timestamp.day(); let hour = timestamp.hour(); let minute = timestamp.minute(); let second = timestamp.second(); format!("{year:04}{month:02}{day:02}T{hour:02}{minute:02}{second:02}Z") }; let datestamp = &request_timestamp[..8]; let credential_scope = format!("{}/{}/storage/goog4_request", datestamp, optional.region); let credential_param = format!("{}/{}", key_provider.authorizer(), credential_scope); let expiration = optional.duration.as_secs().to_string(); let mut query_params = optional.query_params; query_params.extend( [ ("X-Goog-Algorithm", "GOOG4-RSA-SHA256"), ("X-Goog-Credential", &credential_param), ("X-Goog-Date", &request_timestamp), ("X-Goog-Expires", &expiration), ("X-Goog-SignedHeaders", &signed_headers), ] .iter() .map(|(k, v)| (Cow::Borrowed(*k), Cow::Borrowed(*v))), ); query_params.sort(); let canonical_query = { { let mut query_pairs = signed_url.query_pairs_mut(); query_pairs.clear(); for (key, value) in &query_params { query_pairs.append_pair(key, value); } } signed_url.query().unwrap().to_owned() }; let canonical_headers = { let canonical_size = headers .iter() .fold(0, |acc, kv| acc + kv.0.len() + kv.1.len()) + headers.len() * 2; let mut hdrs = String::with_capacity(canonical_size); for (k, v) in &headers { hdrs.push_str(k); hdrs.push(':'); hdrs.push_str(v); hdrs.push('\n'); } assert_eq!(canonical_size, hdrs.capacity()); hdrs }; let canonical_request = format!( "{verb}\n{resource}\n{query}\n{headers}\n{signed_headers}\nUNSIGNED-PAYLOAD", verb = optional.method, resource = resource_path, query = canonical_query, headers = canonical_headers, signed_headers = signed_headers, ); let mut digest = [0u8; 32]; self.digester.digest( signing::DigestAlgorithm::Sha256, canonical_request.as_bytes(), &mut digest, ); let digest_str = crate::util::to_hex(&digest); let string_to_sign = format!( "GOOG4-RSA-SHA256\n{timestamp}\n{scope}\n{hash}", timestamp = request_timestamp, scope = credential_scope, hash = digest_str, ); let signature = self.signer.sign( signing::SigningAlgorithm::RsaSha256, key_provider.key(), string_to_sign.as_bytes(), )?; let signature_str = crate::util::to_hex(&signature); signed_url .query_pairs_mut() .append_pair("X-Goog-Signature", signature_str.as_str()); Ok(signed_url) } } pub struct SignedUrlOptional<'a> { pub method: http::Method, pub duration: std::time::Duration, pub headers: http::HeaderMap, pub region: &'a str, pub query_params: Vec<(Cow<'a, str>, Cow<'a, str>)>, } impl<'a> Default for SignedUrlOptional<'a> { fn default() -> Self { Self { method: http::Method::GET, duration: std::time::Duration::from_secs(60 * 60), headers: http::HeaderMap::default(), region: "auto", query_params: Vec::new(), } } }
use crate::{error::Error, signing, types::ObjectIdentifier}; use percent_encoding as perc_enc; use std::borrow::Cow; use url::Url; pub struct UrlSigner<D, S> { digester: D, signer: S, } #[cfg(feature = "signing")] impl UrlSigner<signing::RingDigest, signing::RingSigner> { pub fn with_ring() -> UrlSigner<signing::RingDigest, signing::RingSigner> { UrlSigner::new(signing::RingDigest, signing::RingSigner) } } impl<D, S> UrlSigner<D, S> where D: signing::DigestCalulator, S: signing::Signer, { pub fn new(digester: D, signer: S) -> Self { Self { digester, signer } } pub fn generate<'a, K, OID>( &self, key_provider: &K, id: &OID, optional: SignedUrlOptional<'_>, ) -> Result<Url, Error> where K: signing::KeyProvider, OID: ObjectIdentifier<'a>, { const SEVEN_DAYS: u64 = 7 * 24 * 60 * 60; if optional.duration.as_secs() > SEVEN_DAYS { return
; } let mut signed_url = Url::parse("https://storage.googleapis.com").map_err(Error::UrlParse)?; let resource_path = format!( "/{}/{}", perc_enc::percent_encode(id.bucket().as_ref(), crate::util::PATH_ENCODE_SET), perc_enc::percent_encode(id.object().as_ref(), crate::util::PATH_ENCODE_SET), ); signed_url.set_path(&resource_path); let mut headers = optional.headers; headers.insert( http::header::HOST, http::header::HeaderValue::from_static("storage.googleapis.com"), ); let headers = { let mut hdrs = Vec::with_capacity(headers.keys_len()); for key in headers.keys() { let vals_size = headers .get_all(key) .iter() .fold(0, |acc, v| acc + v.len() + 1) - 1; let mut key_vals = String::with_capacity(vals_size); for (i, val) in headers.get_all(key).iter().enumerate() { if i > 0 { key_vals.push(','); } key_vals.push_str( val.to_str() .map_err(|_err| Error::OpaqueHeaderValue(val.clone()))?, ); } hdrs.push((key.as_str().to_lowercase(), key_vals)); } hdrs.sort(); hdrs }; let signed_headers = { let signed_size = headers.iter().fold(0, |acc, (name, _)| acc + name.len()) + headers.len() - 1; let mut names = String::with_capacity(signed_size); for (i, name) in headers.iter().map(|(name, _)| name).enumerate() { if i > 0 { names.push(';'); } names.push_str(name); } assert_eq!(signed_size, names.capacity()); names }; let timestamp = time::OffsetDateTime::now_utc(); let request_timestamp = { let year = timestamp.year(); let month = timestamp.month() as u8; let day = timestamp.day(); let hour = timestamp.hour(); let minute = timestamp.minute(); let second = timestamp.second(); format!("{year:04}{month:02}{day:02}T{hour:02}{minute:02}{second:02}Z") }; let datestamp = &request_timestamp[..8]; let credential_scope = format!("{}/{}/storage/goog4_request", datestamp, optional.region); let credential_param = format!("{}/{}", key_provider.authorizer(), credential_scope); let expiration = optional.duration.as_secs().to_string(); let mut query_params = optional.query_params; query_params.extend( [ ("X-Goog-Algorithm", "GOOG4-RSA-SHA256"), ("X-Goog-Credential", &credential_param), ("X-Goog-Date", &request_timestamp), ("X-Goog-Expires", &expiration), ("X-Goog-SignedHeaders", &signed_headers), ] .iter() .map(|(k, v)| (Cow::Borrowed(*k), Cow::Borrowed(*v))), ); query_params.sort(); let canonical_query = { { let mut query_pairs = signed_url.query_pairs_mut(); query_pairs.clear(); for (key, value) in &query_params { query_pairs.append_pair(key, value); } } signed_url.query().unwrap().to_owned() }; let canonical_headers = { let canonical_size = headers .iter() .fold(0, |acc, kv| acc + kv.0.len() + kv.1.len()) + headers.len() * 2; let mut hdrs = String::with_capacity(canonical_size); for (k, v) in &headers { hdrs.push_str(k); hdrs.push(':'); hdrs.push_str(v); hdrs.push('\n'); } assert_eq!(canonical_size, hdrs.capacity()); hdrs }; let canonical_request = format!( "{verb}\n{resource}\n{query}\n{headers}\n{signed_headers}\nUNSIGNED-PAYLOAD", verb = optional.method, resource = resource_path, query = canonical_query, headers = canonical_headers, signed_headers = signed_headers, ); let mut digest = [0u8; 32]; self.digester.digest( signing::DigestAlgorithm::Sha256, canonical_request.as_bytes(), &mut digest, ); let digest_str = crate::util::to_hex(&digest); let string_to_sign = format!( "GOOG4-RSA-SHA256\n{timestamp}\n{scope}\n{hash}", timestamp = request_timestamp, scope = credential_scope, hash = digest_str, ); let signature = self.signer.sign( signing::SigningAlgorithm::RsaSha256, key_provider.key(), string_to_sign.as_bytes(), )?; let signature_str = crate::util::to_hex(&signature); signed_url .query_pairs_mut() .append_pair("X-Goog-Signature", signature_str.as_str()); Ok(signed_url) } } pub struct SignedUrlOptional<'a> { pub method: http::Method, pub duration: std::time::Duration, pub headers: http::HeaderMap, pub region: &'a str, pub query_params: Vec<(Cow<'a, str>, Cow<'a, str>)>, } impl<'a> Default for SignedUrlOptional<'a> { fn default() -> Self { Self { method: http::Method::GET, duration: std::time::Duration::from_secs(60 * 60), headers: http::HeaderMap::default(), region: "auto", query_params: Vec::new(), } } }
Err(Error::TooLongExpiration { requested: optional.duration.as_secs(), max: SEVEN_DAYS, })
call_expression
[ { "content": "/// Used to sign a block of data\n\npub trait Signer {\n\n /// Sign a block of data with the specified algorith, and a private key\n\n fn sign(\n\n &self,\n\n algorithm: SigningAlgorithm,\n\n key: Key<'_>,\n\n data: &[u8],\n\n ) -> Result<Vec<u8>, Error>;\n\n}\n\n\n\n/// Internal type use to grab the pieces of the service account we need for signing\n", "file_path": "src/signing.rs", "rank": 0, "score": 124321.60201986757 }, { "content": "/// Used to calculate a digest of payloads with a specific algorithm\n\npub trait DigestCalulator {\n\n /// Calculate a digest of a block of data, the algorithm determines the size\n\n /// of the slice used for returning the digest\n\n fn digest(&self, algorithm: DigestAlgorithm, data: &[u8], output_digest: &mut [u8]);\n\n}\n\n\n", "file_path": "src/signing.rs", "rank": 1, "score": 100168.12788958252 }, { "content": "pub fn get_content_length(headers: &http::HeaderMap) -> Option<usize> {\n\n headers.get(http::header::CONTENT_LENGTH).and_then(|h| {\n\n h.to_str()\n\n .map_err(|_err| ())\n\n .and_then(|hv| hv.parse::<u64>().map(|l| l as usize).map_err(|_err| ()))\n\n .ok()\n\n })\n\n}\n\n\n\n#[allow(clippy::trivially_copy_pass_by_ref)]\n\npub(crate) fn if_false(v: &bool) -> bool {\n\n !v\n\n}\n\n\n\npub(crate) const QUERY_ENCODE_SET: &percent_encoding::AsciiSet = &percent_encoding::CONTROLS\n\n .add(b' ')\n\n .add(b'\"')\n\n .add(b'#')\n\n .add(b'<')\n\n .add(b'>');\n", "file_path": "src/util.rs", "rank": 2, "score": 94829.31247714278 }, { "content": "fn timestamp_rfc3339_opt<'de, D>(deserializer: D) -> Result<Option<Timestamp>, D::Error>\n\nwhere\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n let ts_str: &str = Deserialize::deserialize(deserializer)?;\n\n Timestamp::parse(ts_str, &time::format_description::well_known::Rfc3339)\n\n .map_err(serde::de::Error::custom)\n\n .map(Some)\n\n}\n", "file_path": "src/v1/objects.rs", "rank": 3, "score": 83490.43567461183 }, { "content": "fn from_str_opt<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error>\n\nwhere\n\n T: std::str::FromStr,\n\n T::Err: std::fmt::Display,\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n let s: &str = Deserialize::deserialize(deserializer)?;\n\n T::from_str(s).map_err(serde::de::Error::custom).map(Some)\n\n}\n\n\n", "file_path": "src/v1/objects.rs", "rank": 4, "score": 81962.63322769583 }, { "content": "struct Input {\n\n svc_account: signing::ServiceAccount,\n\n bucket: String,\n\n object: String,\n\n}\n\n\n\nimpl Input {\n\n fn new() -> Self {\n\n use std::env;\n\n\n\n let ret = Self {\n\n svc_account: signing::ServiceAccount::load_json_file(\n\n env::var(\"TAME_GCS_TEST_SVC_ACCOUNT\").expect(\"failed to get service account path\"),\n\n )\n\n .expect(\"failed to load service account\"),\n\n bucket: env::var(\"TAME_GCS_TEST_BUCKET\").expect(\"failed to get test bucket\"),\n\n object: env::var(\"TAME_GCS_TEST_OBJECT\").expect(\"failed to get test object\"),\n\n };\n\n\n\n BucketName::try_from(ret.bucket.as_str()).expect(\"invalid bucket name\");\n", "file_path": "tests/signed_url.rs", "rank": 5, "score": 78722.00010890214 }, { "content": "#[derive(Deserialize, Debug, Clone)]\n\nstruct ServiceAccountInfo {\n\n /// The private key we use to sign\n\n private_key: String,\n\n /// The unique id used as the issuer of the JWT claim\n\n client_email: String,\n\n}\n\n\n", "file_path": "src/signing.rs", "rank": 6, "score": 75983.78756812081 }, { "content": "pub trait ApiResponse<B>: Sized + TryFrom<http::Response<B>, Error = Error>\n\nwhere\n\n B: AsRef<[u8]>,\n\n{\n\n fn try_from_parts(resp: http::response::Response<B>) -> Result<Self, Error> {\n\n if resp.status().is_success() {\n\n Self::try_from(resp)\n\n } else {\n\n // If we get an error, but with a JSON payload, attempt to deserialize\n\n // an ApiError from it, otherwise fallback to the simple HttpStatus\n\n if let Some(ct) = resp\n\n .headers()\n\n .get(http::header::CONTENT_TYPE)\n\n .and_then(|ct| ct.to_str().ok())\n\n {\n\n if ct.starts_with(\"application/json\") {\n\n if let Ok(api_err) =\n\n serde_json::from_slice::<error::ApiError>(resp.body().as_ref())\n\n {\n\n return Err(Error::Api(api_err));\n", "file_path": "src/response.rs", "rank": 7, "score": 75376.99502173436 }, { "content": "/// Provides the details needed for signing a URL\n\npub trait KeyProvider {\n\n /// The actual key used to sign the URL\n\n fn key(&self) -> Key<'_>;\n\n /// The identifier for the key author, in GCP this is the email\n\n /// address of the service account\n\n fn authorizer(&self) -> &str;\n\n}\n\n\n\n/// A [GCP service account](https://cloud.google.com/iam/docs/creating-managing-service-account-keys),\n\n/// used as a `KeyProvider` when signing URLs.\n\npub struct ServiceAccount {\n\n key: Vec<u8>,\n\n email: String,\n\n}\n\n\n\nimpl ServiceAccount {\n\n /// Attempts to load a service account from a JSON file\n\n pub fn load_json_file<P: AsRef<std::path::Path>>(path: P) -> Result<Self, Error> {\n\n let file_content = std::fs::read(path)?;\n\n Self::load_json(file_content)\n", "file_path": "src/signing.rs", "rank": 8, "score": 72729.09928880667 }, { "content": "#[test]\n\nfn delete_all_optional() {\n\n let delete_req = Object::delete(\n\n &ObjectId::new(\"bucket\", \"object\").unwrap(),\n\n Some(DeleteObjectOptional {\n\n standard_params: StandardQueryParameters {\n\n fields: Some(\"field1\"),\n\n pretty_print: true,\n\n quota_user: Some(\"some-user\"),\n\n user_ip: Some(\"some-user-ip\"),\n\n },\n\n generation: Some(1),\n\n conditionals: Conditionals {\n\n if_generation_match: Some(2),\n\n if_generation_not_match: Some(3),\n\n if_metageneration_match: Some(4),\n\n if_metageneration_not_match: Some(5),\n\n },\n\n user_project: Some(\"some-user-project\"),\n\n }),\n\n )\n", "file_path": "tests/objects.rs", "rank": 9, "score": 71001.86418545607 }, { "content": "#[test]\n\nfn delete_some_optional() {\n\n let delete_req = Object::delete(\n\n &ObjectId::new(\"bucket\", \"object\").unwrap(),\n\n Some(DeleteObjectOptional {\n\n generation: Some(20),\n\n conditionals: Conditionals {\n\n if_metageneration_not_match: Some(999),\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n }),\n\n )\n\n .unwrap();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::DELETE)\n\n .uri(\"https://www.googleapis.com/storage/v1/b/bucket/o/object?prettyPrint=false&generation=20&ifMetagenerationNotMatch=999\")\n\n .body(std::io::empty())\n\n .unwrap();\n\n\n\n util::requests_eq(&delete_req, &expected);\n\n}\n\n\n", "file_path": "tests/objects.rs", "rank": 10, "score": 71001.86418545607 }, { "content": "pub fn cmp_strings(expected: &str, actual: &str) {\n\n if expected != actual {\n\n let cs = difference::Changeset::new(expected, actual, \"\\n\");\n\n panic!(\"{}\", cs);\n\n }\n\n}\n\n\n", "file_path": "tests/util.rs", "rank": 11, "score": 67706.73608618708 }, { "content": "fn from_str<'de, T, D>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n T: std::str::FromStr,\n\n T::Err: std::fmt::Display,\n\n D: serde::de::Deserializer<'de>,\n\n{\n\n let s: &str = Deserialize::deserialize(deserializer)?;\n\n T::from_str(s).map_err(serde::de::Error::custom)\n\n}\n\n\n", "file_path": "src/v1/objects.rs", "rank": 12, "score": 56683.65356296237 }, { "content": "pub fn requests_eq<AB: std::fmt::Debug, EB: std::fmt::Debug>(\n\n actual: &Request<AB>,\n\n expected: &Request<EB>,\n\n) {\n\n let expected = format!(\"{:#?}\", expected);\n\n let actual = format!(\"{:#?}\", actual);\n\n\n\n cmp_strings(&expected, &actual);\n\n}\n\n\n", "file_path": "tests/util.rs", "rank": 13, "score": 56363.896971661394 }, { "content": "fn url_of_sadness(u: url::Url) -> reqwest::Url {\n\n reqwest::Url::parse(u.as_str()).unwrap()\n\n}\n\n\n\n#[ignore]\n\n#[tokio::test]\n\nasync fn download_object() {\n\n let url_signer = signed_url::UrlSigner::with_ring();\n\n\n\n let input = Input::new();\n\n\n\n let signed = url_signer\n\n .generate(\n\n &input.svc_account,\n\n &(&input.bucket(), &input.object()),\n\n signed_url::SignedUrlOptional {\n\n //duration: std::time::Duration::from_secs(5),\n\n ..Default::default()\n\n },\n\n )\n", "file_path": "tests/signed_url.rs", "rank": 14, "score": 53771.94816639663 }, { "content": "#[allow(dead_code)]\n\npub fn requests_read_eq<AB: Read, EB: Read>(actual: Request<AB>, expected: Request<EB>) {\n\n let (ap, mut ab) = actual.into_parts();\n\n let (ep, mut eb) = expected.into_parts();\n\n\n\n let expected = format!(\"{:#?}\", ep);\n\n let actual = format!(\"{:#?}\", ap);\n\n\n\n cmp_strings(&expected, &actual);\n\n\n\n let mut act_bod = Vec::with_capacity(2 * 1024);\n\n ab.read_to_end(&mut act_bod).unwrap();\n\n\n\n let mut exp_bod = Vec::with_capacity(2 * 1024);\n\n eb.read_to_end(&mut exp_bod).unwrap();\n\n\n\n let act_body = String::from_utf8_lossy(&act_bod);\n\n let exp_body = String::from_utf8_lossy(&exp_bod);\n\n\n\n cmp_strings(&exp_body, &act_body);\n\n}\n", "file_path": "tests/util.rs", "rank": 15, "score": 50924.81677969513 }, { "content": "struct MultipartCursor {\n\n position: usize,\n\n part: MultipartPart,\n\n}\n\n\n\n/// A multipart payload that should be used as the body of a multipart\n\n/// insert request\n\npub struct Multipart<B> {\n\n body: B,\n\n prefix: bytes::Bytes,\n\n body_len: u64,\n\n total_len: u64,\n\n cursor: MultipartCursor,\n\n}\n\n\n\nimpl<B> Multipart<B> {\n\n #[cfg(feature = \"async-multipart\")]\n\n pin_utils::unsafe_pinned!(body: B);\n\n\n\n /// Wraps some body content and its metadata into a Multipart suitable for being\n", "file_path": "src/v1/objects/insert/multipart.rs", "rank": 16, "score": 46953.95372611095 }, { "content": "pub trait ObjectIdentifier<'a> {\n\n fn bucket(&self) -> &BucketName<'a>;\n\n fn object(&self) -> &ObjectName<'a>;\n\n}\n\n\n\nimpl<'a, T> ObjectIdentifier<'a> for T\n\nwhere\n\n T: AsRef<BucketName<'a>> + AsRef<ObjectName<'a>>,\n\n{\n\n fn bucket(&self) -> &BucketName<'a> {\n\n self.as_ref()\n\n }\n\n\n\n fn object(&self) -> &ObjectName<'a> {\n\n self.as_ref()\n\n }\n\n}\n\n\n\n/// A concrete object id which contains a valid bucket and object name\n\n/// which fully specifies an object\n", "file_path": "src/types.rs", "rank": 17, "score": 44365.98833673353 }, { "content": "#[test]\n\nfn patches() {\n\n let mut md = std::collections::BTreeMap::new();\n\n md.insert(\"yanked\".to_owned(), \"false\".to_owned());\n\n\n\n let md = objects::Metadata {\n\n metadata: Some(md),\n\n ..Default::default()\n\n };\n\n\n\n let patch_req = Object::patch(&ObjectId::new(\"bucket\", \"object\").unwrap(), &md, None).unwrap();\n\n\n\n let req_body = serde_json::to_vec(&md).unwrap();\n\n let expected_len = req_body.len();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::PATCH)\n\n .uri(\"https://storage.googleapis.com/storage/v1/b/bucket/o/object?prettyPrint=false\")\n\n .header(\"content-type\", \"application/json\")\n\n .header(\"content-length\", expected_len)\n\n .body(std::io::Cursor::new(req_body))\n\n .unwrap();\n\n\n\n util::requests_read_eq(patch_req, expected);\n\n}\n\n\n", "file_path": "tests/objects.rs", "rank": 18, "score": 43775.72082005427 }, { "content": "#[test]\n\nfn rewrites_token() {\n\n let rewrite_req = Object::rewrite(\n\n &ObjectId::new(\"source\", \"object/source.sh\").unwrap(),\n\n &ObjectId::new(\"target\", \"object/target.sh\").unwrap(),\n\n Some(\"tokeymctoken\".to_owned()),\n\n None,\n\n None,\n\n )\n\n .unwrap();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::POST)\n\n .uri(\"https://storage.googleapis.com/storage/v1/b/source/o/object%2Fsource.sh/rewriteTo/b/target/o/object%2Ftarget.sh?rewriteToken=tokeymctoken&prettyPrint=false\")\n\n .body(std::io::Cursor::new(Vec::new()))\n\n .unwrap();\n\n\n\n util::requests_read_eq(rewrite_req, expected);\n\n}\n\n\n", "file_path": "tests/objects.rs", "rank": 19, "score": 42280.28279407839 }, { "content": "#[test]\n\nfn rewrites_simple() {\n\n let rewrite_req = Object::rewrite(\n\n &ObjectId::new(\"source\", \"object\").unwrap(),\n\n &ObjectId::new(\"target\", \"object/target.sh\").unwrap(),\n\n None,\n\n None,\n\n None,\n\n )\n\n .unwrap();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::POST)\n\n .uri(\"https://storage.googleapis.com/storage/v1/b/source/o/object/rewriteTo/b/target/o/object%2Ftarget.sh?prettyPrint=false\")\n\n .body(std::io::Cursor::new(Vec::new()))\n\n .unwrap();\n\n\n\n util::requests_read_eq(rewrite_req, expected);\n\n}\n\n\n", "file_path": "tests/objects.rs", "rank": 20, "score": 42280.28279407839 }, { "content": "#[test]\n\nfn insert_vanilla() {\n\n let insert_req = Object::insert_simple(\n\n &(\n\n &BucketName::non_validated(\"bucket\"),\n\n &ObjectName::non_validated(\"object/with/deep/path\"),\n\n ),\n\n \"great content\",\n\n 13,\n\n None,\n\n )\n\n .unwrap();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::POST)\n\n .uri(\"https://www.googleapis.com/upload/storage/v1/b/bucket/o?name=object/with/deep/path&uploadType=media&prettyPrint=false\")\n\n .header(http::header::CONTENT_TYPE, \"application/octet-stream\")\n\n .header(http::header::CONTENT_LENGTH, 13)\n\n .body(\"great content\")\n\n .unwrap();\n\n\n\n util::requests_eq(&insert_req, &expected);\n\n}\n\n\n", "file_path": "tests/objects.rs", "rank": 21, "score": 42280.28279407839 }, { "content": "#[test]\n\nfn delete_vanilla() {\n\n let delete_req = Object::delete(\n\n &(\n\n &BucketName::non_validated(\"bucket\"),\n\n &ObjectName::non_validated(\"object/with/deep/path\"),\n\n ),\n\n None,\n\n )\n\n .unwrap();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::DELETE)\n\n .uri(\"https://www.googleapis.com/storage/v1/b/bucket/o/object%2Fwith%2Fdeep%2Fpath?prettyPrint=false\")\n\n .body(std::io::empty())\n\n .unwrap();\n\n\n\n util::requests_eq(&delete_req, &expected);\n\n}\n\n\n", "file_path": "tests/objects.rs", "rank": 22, "score": 42280.28279407839 }, { "content": "#[test]\n\nfn rewrites_metadata() {\n\n let mut md = std::collections::BTreeMap::new();\n\n md.insert(\"ohhi\".to_owned(), \"there\".to_owned());\n\n let md = objects::Metadata {\n\n metadata: Some(md),\n\n ..Default::default()\n\n };\n\n\n\n let rewrite_req = Object::rewrite(\n\n &ObjectId::new(\"source\", \"object/source.sh\").unwrap(),\n\n &ObjectId::new(\"target\", \"object/target.sh\").unwrap(),\n\n None,\n\n Some(&md),\n\n Some(objects::RewriteObjectOptional {\n\n max_bytes_rewritten_per_call: Some(20),\n\n ..Default::default()\n\n }),\n\n )\n\n .unwrap();\n\n\n", "file_path": "tests/objects.rs", "rank": 23, "score": 42280.28279407839 }, { "content": "#[test]\n\nfn vanilla_get() {\n\n let get_req = Object::get(\n\n &ObjectId::new(\"bucket\", \"test/with/path_separators\").unwrap(),\n\n None,\n\n )\n\n .unwrap();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::GET)\n\n .uri(\"https://www.googleapis.com/storage/v1/b/bucket/o/test%2Fwith%2Fpath_separators?alt=json&prettyPrint=false\")\n\n .body(std::io::empty())\n\n .unwrap();\n\n\n\n util::requests_eq(&get_req, &expected);\n\n}\n\n\n", "file_path": "tests/objects.rs", "rank": 24, "score": 42280.28279407839 }, { "content": "#[test]\n\nfn list_prefix_and_delimit() {\n\n let list_req = Object::list(\n\n &BucketName::non_validated(\"cache\"),\n\n Some(objects::ListOptional {\n\n delimiter: Some(\"/\"),\n\n prefix: Some(\"testing/\"),\n\n ..Default::default()\n\n }),\n\n )\n\n .unwrap();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::GET)\n\n .uri(\"https://www.googleapis.com/storage/v1/b/cache/o?prettyPrint=false&delimiter=%2F&prefix=testing%2F\")\n\n .body(std::io::empty())\n\n .unwrap();\n\n\n\n util::requests_eq(&list_req, &expected);\n\n}\n\n\n", "file_path": "tests/objects.rs", "rank": 25, "score": 40929.17658830393 }, { "content": "#[test]\n\nfn parses_list_response() {\n\n let body = r#\"{\"kind\":\"storage#objects\",\"prefixes\":[\"testing/subdir/\"],\"items\":[{\"kind\":\"storage#object\",\"id\":\"cache/testing/.gitignore/1563464155846959\",\"selfLink\":\"https://www.googleapis.com/storage/v1/b/cache/o/testing%2F.gitignore\",\"name\":\"testing/.gitignore\",\"bucket\":\"cache\",\"generation\":\"1563464155846959\",\"metageneration\":\"1\",\"contentType\":\"application/octet-stream\",\"timeCreated\":\"2019-07-18T15:35:55.846Z\",\"updated\":\"2019-07-18T15:35:55.846Z\",\"storageClass\":\"REGIONAL\",\"timeStorageClassUpdated\":\"2019-07-18T15:35:55.846Z\",\"size\":\"30\",\"md5Hash\":\"gVBKyp57x/mn4QvE+0fLvg==\",\"mediaLink\":\"https://www.googleapis.com/download/storage/v1/b/cache/o/testing%2F.gitignore?generation=1563464155846959&alt=media\",\"contentLanguage\":\"en\",\"crc32c\":\"f+2iuw==\",\"etag\":\"CK+yg+3lvuMCEAE=\"},{\"kind\":\"storage#object\",\"id\":\"cache/testing/test.zstd/1563439578444057\",\"selfLink\":\"https://www.googleapis.com/storage/v1/b/cache/o/testing%2Ftest.zstd\",\"name\":\"testing/test.zstd\",\"bucket\":\"cache\",\"generation\":\"1563439578444057\",\"metageneration\":\"1\",\"timeCreated\":\"2019-07-18T08:46:18.443Z\",\"updated\":\"2019-07-18T08:46:18.443Z\",\"storageClass\":\"REGIONAL\",\"timeStorageClassUpdated\":\"2019-07-18T08:46:18.443Z\",\"size\":\"688753933\",\"md5Hash\":\"UQVzf70LIALAl6hdKnNnnA==\",\"mediaLink\":\"https://www.googleapis.com/download/storage/v1/b/cache/o/testing%2Ftest.zstd?generation=1563439578444057&alt=media\",\"crc32c\":\"OFE4Lg==\",\"etag\":\"CJnizaWKvuMCEAE=\"}]}\"#;\n\n\n\n let response = http::Response::new(body);\n\n let list_response = objects::ListResponse::try_from(response).expect(\"parsed list response\");\n\n\n\n assert_eq!(2, list_response.objects.len());\n\n assert!(list_response.page_token.is_none());\n\n}\n\n\n", "file_path": "tests/objects.rs", "rank": 26, "score": 40929.17658830393 }, { "content": "#[cfg(feature = \"async-multipart\")]\n\n#[test]\n\nfn insert_multipart_async() {\n\n use futures_test::{io::AsyncReadTestExt, task::noop_context};\n\n use futures_util::{\n\n io::{AsyncRead, Cursor},\n\n pin_mut,\n\n task::Poll,\n\n };\n\n\n\n let body = TEST_CONTENT;\n\n\n\n let metadata = Metadata {\n\n name: Some(\"good_name\".to_owned()),\n\n content_type: Some(\"text/plain\".to_owned()),\n\n content_encoding: Some(\"gzip\".to_owned()),\n\n content_disposition: Some(\"attachment; filename=\\\"good name.jpg\\\"\".to_owned()),\n\n metadata: Some(\n\n [\"akey\"]\n\n .iter()\n\n .map(|k| (String::from(*k), format!(\"{}value\", k)))\n\n .collect(),\n", "file_path": "tests/objects.rs", "rank": 27, "score": 40929.17658830393 }, { "content": "#[test]\n\nfn insert_multipart_text() {\n\n let body = TEST_CONTENT;\n\n\n\n let metadata = Metadata {\n\n name: Some(\"good_name\".to_owned()),\n\n content_type: Some(\"text/plain\".to_owned()),\n\n content_encoding: Some(\"gzip\".to_owned()),\n\n content_disposition: Some(\"attachment; filename=\\\"good name.jpg\\\"\".to_owned()),\n\n metadata: Some(\n\n [\"akey\"]\n\n .iter()\n\n .map(|k| (String::from(*k), format!(\"{}value\", k)))\n\n .collect(),\n\n ),\n\n ..Default::default()\n\n };\n\n\n\n let insert_req = Object::insert_multipart(\n\n &BucketName::non_validated(\"bucket\"),\n\n std::io::Cursor::new(body),\n", "file_path": "tests/objects.rs", "rank": 28, "score": 40929.17658830393 }, { "content": "#[test]\n\nfn parses_patch_response() {\n\n let body = r#\"{\n\n \"kind\": \"storage#object\",\n\n \"id\": \"bucket/test-elf/1591708511706797\",\n\n \"selfLink\": \"https://www.googleapis.com/storage/v1/b/bucket/o/test-elf\",\n\n \"name\": \"test-elf\",\n\n \"bucket\": \"bucket\",\n\n \"generation\": \"1591708511706797\",\n\n \"metageneration\": \"2\",\n\n \"contentType\": \"application/x-elf\",\n\n \"timeCreated\": \"2020-06-09T13:15:11.706Z\",\n\n \"updated\": \"2020-06-09T13:20:53.073Z\",\n\n \"storageClass\": \"STANDARD\",\n\n \"timeStorageClassUpdated\": \"2020-06-09T13:15:11.706Z\",\n\n \"size\": \"11943404\",\n\n \"md5Hash\": \"oIyGCnAge5QkDf7UjVYwgQ==\",\n\n \"mediaLink\": \"https://content-storage.googleapis.com/download/storage/v1/b/bucket/o/test-elf?generation=1591708511706797&alt=media\",\n\n \"contentEncoding\": \"zstd\",\n\n \"contentDisposition\": \"attachment; filename=\\\"ark-client\\\"\",\n\n \"metadata\": {\n", "file_path": "tests/objects.rs", "rank": 29, "score": 40929.17658830393 }, { "content": "#[test]\n\nfn resumable_append() {\n\n let session = ResumableSession(\"https://killedbygoogle.com/\".parse().unwrap());\n\n let content = r#\"{\"data\":23}\"#;\n\n\n\n let append_req = Object::resumable_append(session.clone(), content, 11).unwrap();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::PUT)\n\n .uri(session)\n\n .header(http::header::CONTENT_LENGTH, 11i32)\n\n .body(content)\n\n .unwrap();\n\n\n\n util::requests_eq(&append_req, &expected);\n\n}\n", "file_path": "tests/resumable_insert.rs", "rank": 30, "score": 40929.17658830393 }, { "content": "#[test]\n\nfn resumable_init() {\n\n let insert_req = Object::resumable_insert_init(\n\n &(\n\n &BucketName::non_validated(\"bucket\"),\n\n &ObjectName::non_validated(\"object/with/deep/path\"),\n\n ),\n\n Some(\"application/json\"),\n\n )\n\n .unwrap();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::POST)\n\n .uri(\"https://www.googleapis.com/upload/storage/v1/b/bucket/o?uploadType=resumable&name=object/with/deep/path\")\n\n .header(http::header::CONTENT_LENGTH, 0)\n\n .header(http::header::HeaderName::from_static(\"x-upload-content-type\"),\n\n http::header::HeaderValue::from_str(\"application/json\").unwrap())\n\n .body(())\n\n .unwrap();\n\n\n\n util::requests_eq(&insert_req, &expected);\n\n}\n\n\n", "file_path": "tests/resumable_insert.rs", "rank": 31, "score": 40929.17658830393 }, { "content": "#[test]\n\nfn insert_json_content() {\n\n let insert_req = Object::insert_simple(\n\n &ObjectId::new(\"bucket\", \"json\").unwrap(),\n\n r#\"{\"data\":23}\"#,\n\n 11,\n\n Some(InsertObjectOptional {\n\n content_type: Some(\"application/json\"),\n\n content_encoding: Some(\"identity\"),\n\n ..Default::default()\n\n }),\n\n )\n\n .unwrap();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::POST)\n\n .uri(\"https://www.googleapis.com/upload/storage/v1/b/bucket/o?name=json&uploadType=media&prettyPrint=false&contentEncoding=identity\")\n\n .header(http::header::CONTENT_TYPE, \"application/json\")\n\n .header(http::header::CONTENT_LENGTH, 11)\n\n .body(r#\"{\"data\":23}\"#)\n\n .unwrap();\n\n\n\n util::requests_eq(&insert_req, &expected);\n\n}\n\n\n", "file_path": "tests/objects.rs", "rank": 32, "score": 40929.17658830393 }, { "content": "#[test]\n\nfn resumable_cancel() {\n\n let session = ResumableSession(\"https://killedbygoogle.com/\".parse().unwrap());\n\n\n\n let cancel_req = Object::resumable_cancel(session.clone()).unwrap();\n\n\n\n let expected = http::Request::builder()\n\n .method(http::Method::DELETE)\n\n .uri(session)\n\n .header(http::header::CONTENT_LENGTH, 0i32)\n\n .body(())\n\n .unwrap();\n\n\n\n util::requests_eq(&cancel_req, &expected);\n\n}\n\n\n", "file_path": "tests/resumable_insert.rs", "rank": 33, "score": 40929.17658830393 }, { "content": "#[test]\n\nfn multipart_read_paranoid() {\n\n // Ensure the Read implementation for Multipart works even with\n\n // a (hopefully) unrealistic case of copying 1 byte at a time\n\n let body = TEST_CONTENT;\n\n\n\n let metadata = Metadata {\n\n name: Some(\"a-really-descriptive-name\".to_owned()),\n\n content_type: Some(\"text/plain\".to_owned()),\n\n content_encoding: Some(\"deflate\".to_owned()),\n\n metadata: Some(\n\n [\"key_one\", \"key_two\", \"should_sort_first\"]\n\n .iter()\n\n .map(|k| (String::from(*k), format!(\"{}value\", k)))\n\n .collect(),\n\n ),\n\n ..Default::default()\n\n };\n\n\n\n let mut mp =\n\n objects::Multipart::wrap(std::io::Cursor::new(body), body.len() as u64, &metadata).unwrap();\n", "file_path": "tests/objects.rs", "rank": 34, "score": 40929.17658830393 }, { "content": "#[cfg(feature = \"async-multipart\")]\n\n#[test]\n\nfn insert_multipart_stream_bytes() {\n\n use bytes::{BufMut, Bytes, BytesMut};\n\n\n\n let metadata = Metadata {\n\n name: Some(\"good_name\".to_owned()),\n\n content_type: Some(\"text/plain\".to_owned()),\n\n content_encoding: Some(\"gzip\".to_owned()),\n\n content_disposition: Some(\"attachment; filename=\\\"good name.jpg\\\"\".to_owned()),\n\n metadata: Some(\n\n [\"akey\"]\n\n .iter()\n\n .map(|k| (String::from(*k), format!(\"{}value\", k)))\n\n .collect(),\n\n ),\n\n ..Default::default()\n\n };\n\n\n\n let insert_req = Object::insert_multipart(\n\n &BucketName::non_validated(\"bucket\"),\n\n Bytes::from(TEST_CONTENT),\n", "file_path": "tests/objects.rs", "rank": 35, "score": 39702.46890786933 }, { "content": "#[test]\n\nfn parses_empty_list_response() {\n\n let body = r#\"{\"kind\":\"storage#objects\"}\"#;\n\n\n\n let response = http::Response::new(body);\n\n let list_response = objects::ListResponse::try_from(response).expect(\"parsed list response\");\n\n\n\n assert_eq!(0, list_response.objects.len());\n\n assert!(list_response.page_token.is_none());\n\n}\n\n\n\nconst TEST_CONTENT: &str = include_str!(\"../CODE_OF_CONDUCT.md\");\n\n\n", "file_path": "tests/objects.rs", "rank": 36, "score": 39702.46890786933 }, { "content": "#[test]\n\nfn deserializes_partial_rewrite_response() {\n\n let body = r#\"{\n\n \"kind\": \"storage#rewriteResponse\",\n\n \"totalBytesRewritten\": \"435\",\n\n \"objectSize\": \"436\",\n\n \"done\": false,\n\n \"rewriteToken\": \"tokendata\"\n\n }\"#;\n\n\n\n let response = http::Response::new(body);\n\n let rewrite_response =\n\n objects::RewriteObjectResponse::try_from(response).expect(\"parsed rewrite response\");\n\n\n\n assert_eq!(rewrite_response.total_bytes_rewritten, 435);\n\n assert!(!rewrite_response.done);\n\n assert_eq!(rewrite_response.rewrite_token.unwrap(), \"tokendata\");\n\n}\n\n\n", "file_path": "tests/objects.rs", "rank": 37, "score": 39702.46890786933 }, { "content": "#[test]\n\nfn deserializes_complete_rewrite_response() {\n\n let body = r#\"{\n\n \"kind\": \"storage#rewriteResponse\",\n\n \"totalBytesRewritten\": \"435\",\n\n \"objectSize\": \"435\",\n\n \"done\": true,\n\n \"resource\": {\n\n \"kind\": \"storage#object\",\n\n \"id\": \"bucket/script.sh/1613655147314255\",\n\n \"selfLink\": \"https://www.googleapis.com/storage/v1/b/bucket/o/script.sh\",\n\n \"mediaLink\": \"https://content-storage.googleapis.com/download/storage/v1/b/bucket/o/script.sh?generation=1613655147314255&alt=media\",\n\n \"name\": \"script.sh\",\n\n \"bucket\": \"bucket\",\n\n \"generation\": \"1613655147314255\",\n\n \"metageneration\": \"1\",\n\n \"storageClass\": \"STANDARD\",\n\n \"size\": \"435\",\n\n \"md5Hash\": \"M8CAuwyX6GWwOnF5XxvqRw==\",\n\n \"crc32c\": \"3kHdqA==\",\n\n \"etag\": \"CM/44e7F8+4CEAE=\",\n", "file_path": "tests/objects.rs", "rank": 38, "score": 39702.46890786933 }, { "content": "#[allow(clippy::trivially_copy_pass_by_ref)]\n\nfn pretty_on(pretty_print: &bool) -> bool {\n\n *pretty_print\n\n}\n\n\n\n/// [Standard Query Parameters](https://cloud.google.com/storage/docs/json_api/v1/parameters#query)\n\n/// can be used in almost any API request to GCS\n\n#[derive(Serialize, Default)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct StandardQueryParameters<'a> {\n\n /// Selector specifying a subset of fields to include in the response,\n\n /// the primary use of this is for better performance and lower response\n\n /// sizes.\n\n /// For more information, see the [partial response](https://cloud.google.com/storage/docs/json_api/v1/how-tos/performance#partial)\n\n /// documentation.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub fields: Option<&'a str>,\n\n /// Returns the response in a human-readable format, with indentations and\n\n /// line breaks, if true. Note that while the default value is `true` for\n\n /// GCP, this crate uses a default of `false`\n\n #[serde(skip_serializing_if = \"pretty_on\")]\n", "file_path": "src/v1/common.rs", "rank": 39, "score": 33941.67668944393 }, { "content": " }\n\n}\n\n\n\n#[cfg(feature = \"signing\")]\n\nimpl From<ring::error::KeyRejected> for Error {\n\n fn from(re: ring::error::KeyRejected) -> Self {\n\n Error::KeyRejected(format!(\"{}\", re))\n\n }\n\n}\n\n\n\n#[cfg(feature = \"signing\")]\n\nimpl From<ring::error::Unspecified> for Error {\n\n fn from(_re: ring::error::Unspecified) -> Self {\n\n Error::Signing\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 40, "score": 31720.51299809069 }, { "content": " // I feel really bad about this\n\n true\n\n }\n\n}\n\n\n\nimpl From<http::Error> for Error {\n\n fn from(e: http::Error) -> Self {\n\n Error::Http(HttpError(e))\n\n }\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub struct HttpStatusError(pub http::StatusCode);\n\n\n\nimpl PartialEq for HttpStatusError {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.0 == other.0\n\n }\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 41, "score": 31719.865964080873 }, { "content": "impl fmt::Display for HttpStatusError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl From<http::StatusCode> for Error {\n\n fn from(e: http::StatusCode) -> Self {\n\n Error::HttpStatus(HttpStatusError(e))\n\n }\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub struct IoError(#[source] pub std::io::Error);\n\n\n\nimpl PartialEq for IoError {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.0.kind() == other.0.kind()\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 42, "score": 31719.386697984817 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize)]\n\npub struct ApiErrorInner {\n\n pub domain: Option<String>,\n\n pub reason: Option<String>,\n\n pub message: Option<String>,\n\n}\n\n\n\n#[derive(Debug, thiserror::Error, PartialEq, Deserialize)]\n\npub struct ApiError {\n\n pub code: u16,\n\n pub message: String,\n\n pub errors: Vec<ApiErrorInner>,\n\n}\n\n\n\nimpl fmt::Display for ApiError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{:#?}\", self)\n", "file_path": "src/error.rs", "rank": 43, "score": 31719.28955845669 }, { "content": "\n\nimpl fmt::Display for IoError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(e: std::io::Error) -> Self {\n\n Error::Io(IoError(e))\n\n }\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub struct JsonError(#[source] pub serde_json::Error);\n\n\n\nimpl fmt::Display for JsonError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n", "file_path": "src/error.rs", "rank": 44, "score": 31718.667945248555 }, { "content": "#[derive(Debug, thiserror::Error)]\n\npub struct UriError(#[source] http::uri::InvalidUri);\n\n\n\nimpl fmt::Display for UriError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl PartialEq for UriError {\n\n fn eq(&self, other: &Self) -> bool {\n\n // This is **TERRIBLE** but all of the error details are unnecessarily\n\n // private and it doesn't implement PartialEq ARGH\n\n self.0.to_string() == other.0.to_string()\n\n }\n\n}\n\n\n\nimpl From<http::uri::InvalidUri> for Error {\n\n fn from(e: http::uri::InvalidUri) -> Self {\n\n Error::InvalidUri(UriError(e))\n", "file_path": "src/error.rs", "rank": 45, "score": 31718.037292457746 }, { "content": "}\n\n\n\nimpl PartialEq for JsonError {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.0.classify() == other.0.classify()\n\n }\n\n}\n\n\n\nimpl From<serde_json::Error> for Error {\n\n fn from(e: serde_json::Error) -> Self {\n\n Error::Json(JsonError(e))\n\n }\n\n}\n\n\n\nimpl From<serde_urlencoded::ser::Error> for Error {\n\n fn from(e: serde_urlencoded::ser::Error) -> Self {\n\n Error::UrlEncode(e)\n\n }\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 46, "score": 31717.422690402593 }, { "content": " OpaqueHeaderValue(http::header::HeaderValue),\n\n #[error(\"I/O error occurred\")]\n\n Io(#[source] IoError),\n\n #[error(\"Unable to decode base64\")]\n\n Base64Decode(#[source] base64::DecodeError),\n\n #[error(\"Unable to encode url\")]\n\n UrlEncode(#[source] serde_urlencoded::ser::Error),\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub struct HttpError(#[source] pub http::Error);\n\n\n\nimpl fmt::Display for HttpError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\nimpl PartialEq for HttpError {\n\n fn eq(&self, _other: &Self) -> bool {\n", "file_path": "src/error.rs", "rank": 47, "score": 31717.294026265507 }, { "content": " Http(#[source] HttpError),\n\n #[error(\"HTTP status\")]\n\n HttpStatus(#[source] HttpStatusError),\n\n #[error(\"An HTTP response didn't have a valid {0}\")]\n\n UnknownHeader(http::header::HeaderName),\n\n #[error(\"GCS API error\")]\n\n Api(#[source] ApiError),\n\n #[error(\"JSON error\")]\n\n Json(#[source] JsonError),\n\n #[error(\"Response body doesn't contain enough data\")]\n\n InsufficientData,\n\n #[error(\"Key rejected: {0}\")]\n\n KeyRejected(String),\n\n #[error(\"An error occurred during signing\")]\n\n Signing,\n\n #[error(\"An expiration duration was too long: requested = {requested}, max = {max}\")]\n\n TooLongExpiration { requested: u64, max: u64 },\n\n #[error(\"Failed to parse url\")]\n\n UrlParse(#[source] url::ParseError),\n\n #[error(\"Unable to stringize or parse header value '{0:?}'\")]\n", "file_path": "src/error.rs", "rank": 48, "score": 31715.029737125195 }, { "content": "//! Error facilities\n\n\n\nuse std::fmt;\n\n\n\n/// Core error type for all errors possible from tame-gcs\n\n#[derive(thiserror::Error, Debug, PartialEq)]\n\npub enum Error {\n\n #[error(\"Expected {min}-{max} characters, found {len}\")]\n\n InvalidCharacterCount { len: usize, min: usize, max: usize },\n\n #[error(\"Expected {min}-{max} bytes, found {len}\")]\n\n InvalidLength { len: usize, min: usize, max: usize },\n\n #[error(\"Character '{1}' @ {0} is not allowed\")]\n\n InvalidCharacter(usize, char),\n\n #[error(\"Prefix {0} is not allowed\")]\n\n InvalidPrefix(&'static str),\n\n #[error(\"Sequence {0} is not allowed\")]\n\n InvalidSequence(&'static str),\n\n #[error(\"Failed to parse URI\")]\n\n InvalidUri(UriError),\n\n #[error(\"HTTP error\")]\n", "file_path": "src/error.rs", "rank": 49, "score": 31713.187440348876 }, { "content": "//! Helper facilities for calculating content digests and signing data\n\n\n\nuse crate::error::Error;\n\nuse std::fmt;\n\n\n\n/// The supported algorithms for creating a digest of content\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub enum DigestAlgorithm {\n\n Sha256,\n\n}\n\n\n\n/// The supported algorithms for signing payloads\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq)]\n\npub enum SigningAlgorithm {\n\n RsaSha256,\n\n}\n\n\n\n/// The supported key formats\n\npub enum Key<'a> {\n\n /// Unencrypted PKCS#8 RSA private key. See [ring](https://briansmith.org/rustdoc/ring/signature/struct.RsaKeyPair.html#method.from_pkcs8)\n", "file_path": "src/signing.rs", "rank": 50, "score": 31141.044526845628 }, { "content": " }\n\n }\n\n}\n\n\n\n/// Implements `Signer` via [`ring`](https://briansmith.org/rustdoc/ring/signature/index.html)\n\n#[cfg(feature = \"signing\")]\n\npub struct RingSigner;\n\n\n\n#[cfg(feature = \"signing\")]\n\nimpl Signer for RingSigner {\n\n fn sign(\n\n &self,\n\n algorithm: SigningAlgorithm,\n\n key: Key<'_>,\n\n data: &[u8],\n\n ) -> Result<Vec<u8>, Error> {\n\n match algorithm {\n\n SigningAlgorithm::RsaSha256 => {\n\n let key_pair = match key {\n\n Key::Pkcs8(key) => ring::signature::RsaKeyPair::from_pkcs8(key),\n", "file_path": "src/signing.rs", "rank": 51, "score": 31139.81654462089 }, { "content": "\n\n/// Implements `DigestCalculator` via [`ring`](https://briansmith.org/rustdoc/ring/digest/index.html)\n\n#[cfg(feature = \"signing\")]\n\npub struct RingDigest;\n\n\n\n#[cfg(feature = \"signing\")]\n\nimpl DigestCalulator for RingDigest {\n\n fn digest(&self, algorithm: DigestAlgorithm, data: &[u8], output_digest: &mut [u8]) {\n\n use ring::digest;\n\n\n\n match algorithm {\n\n DigestAlgorithm::Sha256 => {\n\n assert_eq!(\n\n output_digest.len(),\n\n 32,\n\n \"output digest has invalid length for Sha256\"\n\n );\n\n let digest = digest::digest(&digest::SHA256, data);\n\n output_digest.copy_from_slice(digest.as_ref());\n\n }\n", "file_path": "src/signing.rs", "rank": 52, "score": 31139.178162903518 }, { "content": " Key::Der(key) => ring::signature::RsaKeyPair::from_der(key),\n\n Key::Hmac(_) => {\n\n return Err(Error::KeyRejected(\n\n \"HMAC cannot be used with RSA signing\".to_owned(),\n\n ))\n\n }\n\n }?;\n\n\n\n let mut signature = vec![0; key_pair.public_modulus_len()];\n\n let rng = ring::rand::SystemRandom::new();\n\n\n\n key_pair.sign(\n\n &ring::signature::RSA_PKCS1_SHA256,\n\n &rng,\n\n data,\n\n &mut signature,\n\n )?;\n\n\n\n Ok(signature)\n\n }\n", "file_path": "src/signing.rs", "rank": 53, "score": 31133.27275220748 }, { "content": "\n\n let key_bytes = base64::decode_config(key_string.as_bytes(), base64::STANDARD)\n\n .map_err(Error::Base64Decode)?;\n\n\n\n Ok(Self {\n\n key: key_bytes,\n\n email: info.client_email,\n\n })\n\n }\n\n}\n\n\n\nimpl KeyProvider for ServiceAccount {\n\n fn key(&self) -> Key<'_> {\n\n Key::Pkcs8(&self.key)\n\n }\n\n\n\n fn authorizer(&self) -> &str {\n\n &self.email\n\n }\n\n}\n", "file_path": "src/signing.rs", "rank": 54, "score": 31133.091998221917 }, { "content": " }\n\n\n\n /// Attempts to load a service account from a JSON byte slice\n\n pub fn load_json<B: AsRef<[u8]>>(json_data: B) -> Result<Self, Error> {\n\n let info: ServiceAccountInfo = serde_json::from_slice(json_data.as_ref())?;\n\n\n\n let key_string = info\n\n .private_key\n\n .split(\"-----\")\n\n .nth(2)\n\n .ok_or_else(|| Error::KeyRejected(\"invalid key format\".to_owned()))?;\n\n\n\n // Strip out all of the newlines\n\n let key_string = key_string.split_whitespace().fold(\n\n String::with_capacity(key_string.len()),\n\n |mut s, line| {\n\n s.push_str(line);\n\n s\n\n },\n\n );\n", "file_path": "src/signing.rs", "rank": 55, "score": 31132.39262156562 }, { "content": " /// for more information\n\n Pkcs8(&'a [u8]),\n\n /// Uncencrypted RSA private key that isn't wrapped in PKCS#8. See [ring](https://briansmith.org/rustdoc/ring/signature/struct.RsaKeyPair.html#method.from_der)\n\n /// for more information\n\n Der(&'a [u8]),\n\n /// See [ring](https://briansmith.org/rustdoc/ring/hmac/index.html) for more information.\n\n Hmac(&'a [u8]),\n\n}\n\n\n\nimpl<'a> fmt::Debug for Key<'a> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let name = match self {\n\n Key::Pkcs8(_) => \"pkcs8\",\n\n Key::Der(_) => \"der\",\n\n Key::Hmac(_) => \"hmac\",\n\n };\n\n\n\n write!(f, \"{}\", name)\n\n }\n\n}\n\n\n\n/// Used to calculate a digest of payloads with a specific algorithm\n", "file_path": "src/signing.rs", "rank": 56, "score": 31131.518202170726 }, { "content": " }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[test]\n\n fn loads_svc_account() {\n\n use super::KeyProvider;\n\n\n\n let acct = super::ServiceAccount::load_json_file(\"./tests/test_account.json\").unwrap();\n\n\n\n match acct.key() {\n\n super::Key::Pkcs8(_) => {}\n\n key => panic!(\"invalid key format {:?}\", key),\n\n }\n\n\n\n assert_eq!(\n\n acct.authorizer(),\n\n \"[email protected]\"\n\n );\n\n }\n\n}\n", "file_path": "src/signing.rs", "rank": 57, "score": 31130.23376377926 }, { "content": " .expect(\"signed url\");\n\n\n\n let response = Client::new()\n\n .get(url_of_sadness(signed))\n\n .send()\n\n .await\n\n .expect(\"sent request\")\n\n .error_for_status()\n\n .expect(\"successful request\");\n\n\n\n response.bytes().await.expect(\"read body\");\n\n}\n\n\n\n#[ignore]\n\n#[tokio::test]\n\nasync fn gets_failure_responses_for_expired_urls() {\n\n let url_signer = signed_url::UrlSigner::with_ring();\n\n\n\n let input = Input::new();\n\n\n", "file_path": "tests/signed_url.rs", "rank": 65, "score": 29472.472901186884 }, { "content": " let signed = url_signer\n\n .generate(\n\n &input.svc_account,\n\n &(&input.bucket(), &input.object()),\n\n signed_url::SignedUrlOptional {\n\n duration: std::time::Duration::from_secs(1),\n\n ..Default::default()\n\n },\n\n )\n\n .expect(\"signed url\");\n\n\n\n std::thread::sleep(std::time::Duration::from_millis(1500));\n\n\n\n let response = Client::new()\n\n .get(url_of_sadness(signed))\n\n .send()\n\n .await\n\n .expect(\"sent request\");\n\n\n\n // We should get a failure response when trying to access a resource past its expiration\n\n assert_eq!(response.status(), 400);\n\n}\n", "file_path": "tests/signed_url.rs", "rank": 67, "score": 29471.5751956613 }, { "content": "#![cfg(feature = \"signing\")]\n\n\n\nuse reqwest::Client;\n\nuse tame_gcs::{signed_url, signing, BucketName, ObjectName};\n\n\n", "file_path": "tests/signed_url.rs", "rank": 68, "score": 29471.234419570344 }, { "content": " ObjectName::try_from(ret.object.as_str()).expect(\"invalid object name\");\n\n\n\n ret\n\n }\n\n\n\n fn bucket(&self) -> BucketName<'_> {\n\n BucketName::try_from(self.bucket.as_str()).unwrap()\n\n }\n\n\n\n fn object(&self) -> ObjectName<'_> {\n\n ObjectName::try_from(self.object.as_str()).unwrap()\n\n }\n\n}\n\n\n", "file_path": "tests/signed_url.rs", "rank": 76, "score": 29463.1191021581 }, { "content": " fn try_from(response: http::Response<B>) -> Result<Self, Self::Error> {\n\n let (_parts, body) = response.into_parts();\n\n let metadata: super::Metadata = serde_json::from_slice(body.as_ref())?;\n\n Ok(Self { metadata })\n\n }\n\n}\n\n\n\nimpl super::Object {\n\n /// Gets an object's metadata\n\n ///\n\n /// Required IAM Permissions: `storage.objects.get`, `storage.objects.getIamPolicy`*\n\n ///\n\n /// [Complete API Documentation](https://cloud.google.com/storage/docs/json_api/v1/objects/get)\n\n pub fn get<'a, OID>(\n\n id: &OID,\n\n optional: Option<GetObjectOptional<'_>>,\n\n ) -> Result<http::Request<std::io::Empty>, Error>\n\n where\n\n OID: ObjectIdentifier<'a> + ?Sized,\n\n {\n", "file_path": "src/v1/objects/get.rs", "rank": 77, "score": 18.34774280548185 }, { "content": " }\n\n}\n\n\n\nimpl super::Object {\n\n /// Deletes an object and its metadata. Deletions are permanent if versioning\n\n /// is not enabled for the bucket, or if the generation parameter is used.\n\n ///\n\n /// Required IAM Permissions: `storage.objects.delete`\n\n ///\n\n /// [Complete API documentation](https://cloud.google.com/storage/docs/json_api/v1/objects/delete)\n\n pub fn delete<'a, OID>(\n\n id: &OID,\n\n optional: Option<DeleteObjectOptional<'_>>,\n\n ) -> Result<http::Request<std::io::Empty>, Error>\n\n where\n\n OID: ObjectIdentifier<'a> + ?Sized,\n\n {\n\n let mut uri = crate::__make_obj_url!(\"https://www.googleapis.com/storage/v1/b/{}/o/{}\", id);\n\n\n\n let query = optional.unwrap_or_default();\n", "file_path": "src/v1/objects/delete.rs", "rank": 78, "score": 17.492079147295247 }, { "content": "pub struct ObjectId<'a> {\n\n pub bucket: BucketName<'a>,\n\n pub object: ObjectName<'a>,\n\n}\n\n\n\nimpl<'a> ObjectId<'a> {\n\n pub fn new<B, O>(bucket: B, object: O) -> Result<Self, Error>\n\n where\n\n B: std::convert::TryInto<BucketName<'a>, Error = Error> + ?Sized,\n\n O: std::convert::TryInto<ObjectName<'a>, Error = Error> + ?Sized,\n\n {\n\n Ok(Self {\n\n bucket: bucket.try_into()?,\n\n object: object.try_into()?,\n\n })\n\n }\n\n}\n\n\n\nimpl<'a> AsRef<BucketName<'a>> for ObjectId<'a> {\n\n fn as_ref(&self) -> &BucketName<'a> {\n", "file_path": "src/types.rs", "rank": 79, "score": 17.225242040016067 }, { "content": " pub user_project: Option<&'a str>,\n\n}\n\n\n\npub struct DeleteObjectResponse;\n\n\n\nimpl ApiResponse<&[u8]> for DeleteObjectResponse {}\n\nimpl ApiResponse<bytes::Bytes> for DeleteObjectResponse {}\n\n\n\nimpl<B> TryFrom<http::Response<B>> for DeleteObjectResponse\n\nwhere\n\n B: AsRef<[u8]>,\n\n{\n\n type Error = Error;\n\n\n\n fn try_from(response: http::Response<B>) -> Result<Self, Self::Error> {\n\n if response.status() == http::StatusCode::NO_CONTENT {\n\n Ok(Self)\n\n } else {\n\n Err(Self::Error::from(response.status()))\n\n }\n", "file_path": "src/v1/objects/delete.rs", "rank": 80, "score": 15.68406194442061 }, { "content": " type Error = Error;\n\n\n\n fn try_from(response: http::Response<B>) -> Result<Self, Self::Error> {\n\n let (_parts, body) = response.into_parts();\n\n Ok(serde_json::from_slice(body.as_ref())?)\n\n }\n\n}\n\n\n\nimpl super::Object {\n\n /// Rewrites a source object to a destination object. Optionally overrides metadata.\n\n ///\n\n /// Required IAM Permissions:\n\n /// * `storage.objects.create` (for the destination bucket)\n\n /// * `storage.objects.delete` (for the destination bucket)\n\n /// * `storage.objects.get` (for the source bucket)\n\n ///\n\n /// [Complete API Documentation](https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite)\n\n pub fn rewrite<'a, OID>(\n\n source: &OID,\n\n destination: &OID,\n", "file_path": "src/v1/objects/rewrite.rs", "rank": 81, "score": 15.582724235434686 }, { "content": " use bytes::Buf;\n\n\n\n let buf_len = std::cmp::min(self.buffer.len(), buf.len());\n\n let mut slice = self.buffer.split_to(buf_len);\n\n slice.copy_to_slice(&mut buf[..buf_len]);\n\n\n\n Ok(buf_len)\n\n }\n\n}\n\n\n\nimpl super::Object {\n\n /// Downloads an object\n\n ///\n\n /// Required IAM Permissions: `storage.objects.get`, `storage.objects.getIamPolicy`*\n\n ///\n\n /// [Complete API Documentation](https://cloud.google.com/storage/docs/json_api/v1/objects/get)\n\n pub fn download<'a, OID>(\n\n id: &OID,\n\n optional: Option<DownloadObjectOptional<'_>>,\n\n ) -> Result<http::Request<std::io::Empty>, Error>\n", "file_path": "src/v1/objects/download.rs", "rank": 82, "score": 15.580622943413106 }, { "content": " ///\n\n /// [Complete API Documentation](https://cloud.google.com/storage/docs/json_api/v1/objects/insert)\n\n pub fn insert_simple<'a, OID, B>(\n\n id: &OID,\n\n content: B,\n\n length: u64,\n\n optional: Option<InsertObjectOptional<'_>>,\n\n ) -> Result<http::Request<B>, Error>\n\n where\n\n OID: ObjectIdentifier<'a> + ?Sized,\n\n {\n\n let mut uri = format!(\n\n \"https://www.googleapis.com/upload/storage/v1/b/{}/o?name={}&uploadType=media\",\n\n percent_encoding::percent_encode(id.bucket().as_ref(), crate::util::PATH_ENCODE_SET,),\n\n percent_encoding::percent_encode(id.object().as_ref(), crate::util::QUERY_ENCODE_SET,),\n\n );\n\n\n\n let query = optional.unwrap_or_default();\n\n\n\n let req_builder = http::Request::builder()\n", "file_path": "src/v1/objects/insert.rs", "rank": 83, "score": 15.056470801798467 }, { "content": " pub objects: Vec<super::Metadata>,\n\n /// The list of prefixes of objects matching-but-not-listed up to\n\n /// and including the requested delimiter.\n\n pub prefixes: Vec<String>,\n\n /// The continuation token, included only if there are more items to return.\n\n /// Provide this value as the page_token of a subsequent request in order\n\n /// to return the next page of results.\n\n pub page_token: Option<String>,\n\n}\n\n\n\nimpl ApiResponse<&[u8]> for ListResponse {}\n\nimpl ApiResponse<bytes::Bytes> for ListResponse {}\n\n\n\nimpl<B> TryFrom<http::Response<B>> for ListResponse\n\nwhere\n\n B: AsRef<[u8]>,\n\n{\n\n type Error = Error;\n\n\n\n fn try_from(response: http::Response<B>) -> Result<Self, Self::Error> {\n", "file_path": "src/v1/objects/list.rs", "rank": 84, "score": 14.426851825712394 }, { "content": "}\n\n\n\nimpl<'a> TryFrom<String> for BucketName<'a> {\n\n type Error = Error;\n\n\n\n fn try_from(n: String) -> Result<Self, Self::Error> {\n\n Self::validate(&n)?;\n\n\n\n Ok(Self {\n\n name: Cow::Owned(n),\n\n })\n\n }\n\n}\n\n\n\n/// A wrapper for strings meant to be used as object names, to validate\n\n/// that they follow [Object Name Requirements](https://cloud.google.com/storage/docs/naming#objectnames)\n\n#[derive(Debug)]\n\npub struct ObjectName<'a> {\n\n name: Cow<'a, str>,\n\n}\n", "file_path": "src/types.rs", "rank": 85, "score": 14.357994540130212 }, { "content": "#[macro_use(Deserialize, Serialize)]\n\nextern crate serde;\n\n\n\n#[cfg(feature = \"v1\")]\n\nmod v1;\n\n#[cfg(feature = \"v1\")]\n\npub use crate::v1::*;\n\n\n\npub mod error;\n\nmod response;\n\npub mod signed_url;\n\npub mod signing;\n\npub mod types;\n\npub mod util;\n\n\n\n// Reexport the http crate since everything this crate does\n\n// is put in terms of http request/response\n\npub use http;\n\n\n\npub use error::Error;\n", "file_path": "src/lib.rs", "rank": 86, "score": 14.32267001450953 }, { "content": " #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub projection: Option<Projection>,\n\n /// The project to be billed for this request. Required for Requester Pays buckets.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub user_project: Option<&'a str>,\n\n}\n\n\n\npub struct GetObjectResponse {\n\n pub metadata: super::Metadata,\n\n}\n\n\n\nimpl ApiResponse<&[u8]> for GetObjectResponse {}\n\nimpl ApiResponse<bytes::Bytes> for GetObjectResponse {}\n\n\n\nimpl<B> TryFrom<http::Response<B>> for GetObjectResponse\n\nwhere\n\n B: AsRef<[u8]>,\n\n{\n\n type Error = Error;\n\n\n", "file_path": "src/v1/objects/get.rs", "rank": 87, "score": 13.32602458644454 }, { "content": " /// Updates a data blob's associated metadata.\n\n ///\n\n /// Required IAM Permissions: `storage.objects.get`, `storage.objects.update`\n\n ///\n\n /// [Complete API documentation](https://cloud.google.com/storage/docs/json_api/v1/objects/patch)\n\n pub fn patch<'a, OID>(\n\n id: &OID,\n\n metadata: &super::Metadata,\n\n optional: Option<PatchObjectOptional<'_>>,\n\n ) -> Result<http::Request<std::io::Cursor<Vec<u8>>>, Error>\n\n where\n\n OID: ObjectIdentifier<'a> + ?Sized,\n\n {\n\n let mut uri =\n\n crate::__make_obj_url!(\"https://storage.googleapis.com/storage/v1/b/{}/o/{}\", id);\n\n\n\n let query = optional.unwrap_or_default();\n\n let query_params = serde_urlencoded::to_string(query)?;\n\n if !query_params.is_empty() {\n\n uri.push('?');\n", "file_path": "src/v1/objects/patch.rs", "rank": 88, "score": 13.016167428591451 }, { "content": "//! Helper types for working with GCS\n\n\n\nuse crate::error::Error;\n\nuse std::borrow::Cow;\n\n\n\n/// A wrapper around strings meant to be used as bucket names,\n\n/// to validate they conform to [Bucket Name Requirements](https://cloud.google.com/storage/docs/naming#requirements)\n\n#[derive(Debug)]\n\npub struct BucketName<'a> {\n\n name: Cow<'a, str>,\n\n}\n\n\n\nimpl<'a> BucketName<'a> {\n\n /// Creates a [`BucketName`] without validating it, meaning\n\n /// that invalid names will result in API failures when\n\n /// requests are actually made to GCS instead.\n\n pub fn non_validated<S: AsRef<str> + ?Sized>(name: &'a S) -> Self {\n\n Self {\n\n name: Cow::Borrowed(name.as_ref()),\n\n }\n", "file_path": "src/types.rs", "rank": 89, "score": 12.965939255359222 }, { "content": "use crate::{\n\n common::{Conditionals, PredefinedAcl, Projection, StandardQueryParameters},\n\n error::{self, Error},\n\n response::ApiResponse,\n\n types::ObjectIdentifier,\n\n};\n\n\n\nmod multipart;\n\nmod resumable;\n\n\n\npub use multipart::*;\n\npub use resumable::*;\n\n\n\n/// Optional parameters when inserting an object.\n\n/// See [here](https://cloud.google.com/storage/docs/json_api/v1/objects/insert#parameters)\n\n#[derive(Default, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct InsertObjectOptional<'a> {\n\n #[serde(flatten)]\n\n pub standard_params: StandardQueryParameters<'a>,\n", "file_path": "src/v1/objects/insert.rs", "rank": 90, "score": 12.614148433986209 }, { "content": "use crate::{\n\n common::{Projection, StandardQueryParameters},\n\n error::Error,\n\n response::ApiResponse,\n\n types::BucketName,\n\n};\n\n\n\n#[derive(Default, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct ListOptional<'a> {\n\n #[serde(flatten)]\n\n pub standard_params: StandardQueryParameters<'a>,\n\n /// Returns results in a directory-like mode. items will contain\n\n /// only objects whose names, aside from the prefix, do not contain\n\n /// delimiter. Objects whose names, aside from the prefix, contain\n\n /// delimiter will have their name, truncated after the delimiter,\n\n /// returned in prefixes. Duplicate prefixes are omitted.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub delimiter: Option<&'a str>,\n\n /// If true, objects that end in exactly one instance of delimiter\n", "file_path": "src/v1/objects/list.rs", "rank": 91, "score": 12.609308926234505 }, { "content": " pub metadata: super::Metadata,\n\n}\n\n\n\nimpl ApiResponse<&[u8]> for PatchObjectResponse {}\n\nimpl ApiResponse<bytes::Bytes> for PatchObjectResponse {}\n\n\n\nimpl<B> TryFrom<http::Response<B>> for PatchObjectResponse\n\nwhere\n\n B: AsRef<[u8]>,\n\n{\n\n type Error = Error;\n\n\n\n fn try_from(response: http::Response<B>) -> Result<Self, Self::Error> {\n\n let (_parts, body) = response.into_parts();\n\n let metadata: super::Metadata = serde_json::from_slice(body.as_ref())?;\n\n Ok(Self { metadata })\n\n }\n\n}\n\n\n\nimpl super::Object {\n", "file_path": "src/v1/objects/patch.rs", "rank": 92, "score": 12.4756533401731 }, { "content": "use crate::{\n\n common::{Conditionals, Projection, StandardQueryParameters},\n\n error::Error,\n\n response::ApiResponse,\n\n types::ObjectIdentifier,\n\n};\n\nuse std::io;\n\n\n\n#[derive(Default, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct DownloadObjectOptional<'a> {\n\n #[serde(flatten)]\n\n pub standard_params: StandardQueryParameters<'a>,\n\n /// If present, selects a specific revision of this object\n\n /// (as opposed to the latest version, the default).\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub generation: Option<i64>,\n\n #[serde(flatten)]\n\n pub conditionals: Conditionals,\n\n /// Set of properties to return. Defaults to `noAcl`, unless the object\n", "file_path": "src/v1/objects/download.rs", "rank": 93, "score": 12.450845644169595 }, { "content": "/// associated with an Object.\n\n#[derive(Default, Serialize, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Metadata {\n\n /// The ID of the object, including the bucket name, object name, and generation number.\n\n #[serde(skip_serializing)]\n\n pub id: Option<String>,\n\n /// The link to this object.\n\n #[serde(skip_serializing)]\n\n pub self_link: Option<String>,\n\n /// The name of the object. Required if not specified by URL parameter. **writable**\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub name: Option<String>,\n\n /// The name of the bucket containing this object.\n\n #[serde(skip_serializing)]\n\n pub bucket: Option<String>,\n\n /// The content generation of this object. Used for object versioning.\n\n #[serde(default, skip_serializing, deserialize_with = \"from_str_opt\")]\n\n pub generation: Option<i64>,\n\n /// The version of the metadata for this object at this generation.\n", "file_path": "src/v1/objects.rs", "rank": 94, "score": 12.254017746327909 }, { "content": " #[serde(deserialize_with = \"crate::objects::from_str\")]\n\n pub total_bytes_rewritten: u64,\n\n /// The total size of the original source object\n\n #[serde(deserialize_with = \"crate::objects::from_str\")]\n\n pub object_size: u64,\n\n /// Indicates if the rewrite is finished or not\n\n pub done: bool,\n\n /// If done is false, this will be Some() and it must be specified in each\n\n /// additional rewrite call until done is true\n\n pub rewrite_token: Option<String>,\n\n #[serde(rename = \"resource\")]\n\n pub metadata: Option<super::Metadata>,\n\n}\n\n\n\nimpl ApiResponse<bytes::Bytes> for RewriteObjectResponse {}\n\n\n\nimpl<B> TryFrom<http::Response<B>> for RewriteObjectResponse\n\nwhere\n\n B: AsRef<[u8]>,\n\n{\n", "file_path": "src/v1/objects/rewrite.rs", "rank": 95, "score": 12.183635747275066 }, { "content": " /// Initiates a resumable upload session.\n\n ///\n\n /// * Accepted Media MIME types: `*/*`\n\n ///\n\n /// Note: A resumable upload must be completed within a week of being initiated.\n\n ///\n\n /// **CAUTION**: Be careful when sharing the resumable session URI, because\n\n /// it can be used by anyone to upload data to the target bucket without any\n\n /// further authentication.\n\n /// \n\n /// Required IAM Permissions: `storage.objects.create`, `storage.objects.delete`\n\n ///\n\n /// Note: `storage.objects.delete` is only needed if an object with the same\n\n /// name already exists.\n\n ///\n\n /// [Complete API Documentation](https://cloud.google.com/storage/docs/performing-resumable-uploads#initiate-session)\n\n pub fn resumable_insert_init<'a, OID>(\n\n id: &OID,\n\n content_type: Option<&str>,\n\n ) -> Result<http::Request<()>, Error>\n", "file_path": "src/v1/objects/insert/resumable.rs", "rank": 96, "score": 12.175282576552139 }, { "content": "use crate::{\n\n common::{Conditionals, Projection, StandardQueryParameters},\n\n error::Error,\n\n response::ApiResponse,\n\n types::ObjectIdentifier,\n\n};\n\n\n\n#[derive(Default, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct RewriteObjectOptional<'a> {\n\n #[serde(flatten)]\n\n pub standard_params: StandardQueryParameters<'a>,\n\n /// Resource name of the Cloud KMS key that will be used to encrypt the\n\n /// object. The Cloud KMS key must be located in same location as the object.\n\n ///\n\n /// If the parameter is not specified, the method uses the destination\n\n /// bucket's default encryption key, if any, or the Google-managed encryption\n\n /// key.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub destination_kms_key_name: Option<String>,\n", "file_path": "src/v1/objects/rewrite.rs", "rank": 97, "score": 12.140395597457967 }, { "content": "use crate::{\n\n common::{Conditionals, StandardQueryParameters},\n\n error::Error,\n\n response::ApiResponse,\n\n types::ObjectIdentifier,\n\n};\n\n\n\n#[derive(Default, Serialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct PatchObjectOptional<'a> {\n\n #[serde(flatten)]\n\n pub standard_params: StandardQueryParameters<'a>,\n\n #[serde(flatten)]\n\n pub conditionals: Conditionals,\n\n /// The project to be billed for this request. Required for Requester Pays buckets.\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub user_project: Option<&'a str>,\n\n}\n\n\n\npub struct PatchObjectResponse {\n", "file_path": "src/v1/objects/patch.rs", "rank": 98, "score": 12.088462731858446 }, { "content": "use super::*;\n\nuse crate::objects::{Metadata, Object};\n\n\n\n#[derive(Clone)]\n\npub struct ResumableSession(pub http::Uri);\n\n\n\nimpl From<ResumableSession> for http::Uri {\n\n fn from(rs: ResumableSession) -> Self {\n\n rs.0\n\n }\n\n}\n\n\n\n/// The response from an [`Object::init_resumable_insert`] request is the\n\n/// `session_uri`.\n\npub struct InitResumableInsertResponse {\n\n pub resumable_session: ResumableSession,\n\n}\n\n\n\nimpl ApiResponse<&[u8]> for InitResumableInsertResponse {}\n\nimpl ApiResponse<bytes::Bytes> for InitResumableInsertResponse {}\n", "file_path": "src/v1/objects/insert/resumable.rs", "rank": 99, "score": 12.03176987320698 } ]
Rust
remote-trait-object-macro/src/service/dispatcher.rs
junha1/baselink-rs
51480ea1635a7c88ad4a13a76d8133aa9110233f
use super::MacroArgs; use crate::create_env_path; use proc_macro2::{Span, TokenStream as TokenStream2}; pub(super) fn generate_dispatcher( source_trait: &syn::ItemTrait, args: &MacroArgs, ) -> Result<TokenStream2, TokenStream2> { if args.no_skeleton { return Ok(TokenStream2::new()); } let env_path = create_env_path(); let trait_ident = source_trait.ident.clone(); let box_dispatcher_ident = quote::format_ident!("{}BoxDispatcher", trait_ident); let arc_dispatcher_ident = quote::format_ident!("{}ArcDispatcher", trait_ident); let rwlock_dispatcher_ident = quote::format_ident!("{}RwLockDispatcher", trait_ident); let serde_format = &args.serde_format; let mut if_else_clauses = TokenStream2::new(); let mut if_else_clauses_rwlock = TokenStream2::new(); let mut is_this_trait_mutable = false; for item in source_trait.items.iter() { let method = match item { syn::TraitItem::Method(x) => x, non_method => { return Err(syn::Error::new_spanned( non_method, "Service trait must have only methods", ) .to_compile_error()) } }; let id_ident = super::id::id_method_ident(source_trait, method); let mut the_let_pattern = syn::PatTuple { attrs: Vec::new(), paren_token: syn::token::Paren(Span::call_site()), elems: syn::punctuated::Punctuated::new(), }; let mut type_annotation = syn::TypeTuple { paren_token: syn::token::Paren(Span::call_site()), elems: syn::punctuated::Punctuated::new(), }; let mut the_args: syn::punctuated::Punctuated<syn::Expr, syn::token::Comma> = syn::punctuated::Punctuated::new(); let no_self = "All your method must take &self or &mut self (Object safety)"; let mut_self = match method .sig .inputs .first() .ok_or_else(|| syn::Error::new_spanned(method, no_self).to_compile_error())? { syn::FnArg::Typed(_) => { return Err(syn::Error::new_spanned(method, no_self).to_compile_error()) } syn::FnArg::Receiver(syn::Receiver { mutability: Some(_), .. }) => true, _ => false, }; is_this_trait_mutable |= mut_self; for (j, arg_source) in method.sig.inputs.iter().skip(1).enumerate() { let the_iden = quote::format_ident!("a{}", j + 1); the_let_pattern.elems.push(syn::Pat::Ident(syn::PatIdent { attrs: Vec::new(), by_ref: None, mutability: None, ident: the_iden, subpat: None, })); the_let_pattern .elems .push_punct(syn::token::Comma(Span::call_site())); let arg_type = match arg_source { syn::FnArg::Typed(syn::PatType { attrs: _, pat: _, colon_token: _, ty: t, }) => &**t, _ => panic!(), }; if let Some(unrefed_type) = crate::helper::is_ref(arg_type) .map_err(|e| syn::Error::new_spanned(arg_source, &e).to_compile_error())? { type_annotation.elems.push(unrefed_type); } else { type_annotation.elems.push(arg_type.clone()); } type_annotation .elems .push_punct(syn::token::Comma(Span::call_site())); let arg_ident = quote::format_ident!("a{}", j + 1); let the_arg = if crate::helper::is_ref(arg_type) .map_err(|e| syn::Error::new_spanned(arg_source, &e).to_compile_error())? .is_some() { quote! { &#arg_ident } } else { quote! { #arg_ident } }; the_args.push(syn::parse2(the_arg).unwrap()); } let stmt_deserialize = quote! { let #the_let_pattern: #type_annotation = <#serde_format as #env_path::SerdeFormat>::from_slice(args).unwrap(); }; let method_name = method.sig.ident.clone(); let stmt_call = quote! { let result = self.object.#method_name(#the_args); }; let stmt_call_rwlock = if mut_self { quote! { let result = self.object.write().#method_name(#the_args); } } else { quote! { let result = self.object.read().#method_name(#the_args); } }; let the_return = quote! { return <#serde_format as #env_path::SerdeFormat>::to_vec(&result).unwrap(); }; if_else_clauses.extend(quote! { if method == #id_ident.load(#env_path::ID_ORDERING) { #stmt_deserialize #stmt_call #the_return } }); if_else_clauses_rwlock.extend(quote! { if method == #id_ident.load(#env_path::ID_ORDERING) { #stmt_deserialize #stmt_call_rwlock #the_return } }); } if_else_clauses.extend(quote! { panic!("Invalid remote-trait-object call. Fatal Error.") }); if_else_clauses_rwlock.extend(quote! { panic!("Invalid remote-trait-object call. Fatal Error.") }); let box_dispatcher = if is_this_trait_mutable { quote! { #[doc(hidden)] pub struct #box_dispatcher_ident { object: parking_lot::RwLock<Box<dyn #trait_ident>> } impl #box_dispatcher_ident { fn new(object: Box<dyn #trait_ident>) -> Self { Self { object: parking_lot::RwLock::new(object) } } } impl #env_path::Dispatch for #box_dispatcher_ident { fn dispatch_and_call(&self, method: #env_path::MethodId, args: &[u8]) -> Vec<u8> { #if_else_clauses_rwlock } } impl #env_path::IntoSkeleton<dyn #trait_ident> for Box<dyn #trait_ident> { fn into_skeleton(self) -> #env_path::Skeleton { #env_path::create_skeleton(std::sync::Arc::new(#box_dispatcher_ident::new(self))) } } } } else { quote! { #[doc(hidden)] pub struct #box_dispatcher_ident { object: Box<dyn #trait_ident> } impl #box_dispatcher_ident { fn new(object: Box<dyn #trait_ident>) -> Self { Self { object } } } impl #env_path::Dispatch for #box_dispatcher_ident { fn dispatch_and_call(&self, method: #env_path::MethodId, args: &[u8]) -> Vec<u8> { #if_else_clauses } } impl #env_path::IntoSkeleton<dyn #trait_ident> for Box<dyn #trait_ident> { fn into_skeleton(self) -> #env_path::Skeleton { #env_path::create_skeleton(std::sync::Arc::new(#box_dispatcher_ident::new(self))) } } } }; let arc_dispatcher = if is_this_trait_mutable { quote! {} } else { quote! { #[doc(hidden)] pub struct #arc_dispatcher_ident { object: std::sync::Arc<dyn #trait_ident> } impl #arc_dispatcher_ident { fn new(object: std::sync::Arc<dyn #trait_ident>) -> Self { Self { object } } } impl #env_path::Dispatch for #arc_dispatcher_ident { fn dispatch_and_call(&self, method: #env_path::MethodId, args: &[u8]) -> Vec<u8> { #if_else_clauses } } impl #env_path::IntoSkeleton<dyn #trait_ident> for std::sync::Arc<dyn #trait_ident> { fn into_skeleton(self) -> #env_path::Skeleton { #env_path::create_skeleton(std::sync::Arc::new(#arc_dispatcher_ident::new(self))) } } } }; let rwlock_dispatcher = quote! { #[doc(hidden)] pub struct #rwlock_dispatcher_ident { object: std::sync::Arc<parking_lot::RwLock<dyn #trait_ident>> } impl #rwlock_dispatcher_ident { fn new(object: std::sync::Arc<parking_lot::RwLock<dyn #trait_ident>>) -> Self { Self { object } } } impl #env_path::Dispatch for #rwlock_dispatcher_ident { fn dispatch_and_call(&self, method: #env_path::MethodId, args: &[u8]) -> Vec<u8> { #if_else_clauses_rwlock } } impl #env_path::IntoSkeleton<dyn #trait_ident> for std::sync::Arc<parking_lot::RwLock<dyn #trait_ident>> { fn into_skeleton(self) -> #env_path::Skeleton { #env_path::create_skeleton(std::sync::Arc::new(#rwlock_dispatcher_ident::new(self))) } } }; Ok(quote! { #box_dispatcher #arc_dispatcher #rwlock_dispatcher }) }
use super::MacroArgs; use crate::create_env_path; use proc_macro2::{Span, TokenStream as TokenStream2}; pub(super) fn generate_dispatcher( source_trait: &syn::ItemTrait, args: &MacroArgs, ) -> Result<TokenStream2, TokenStream2> { if args.no_skeleton { return Ok(TokenStream2::new()); } let env_path = create_env_path(); let trait_ident = source_trait.ident.clone(); let box_dispatcher_ident = quote::format_ident!("{}BoxDispatcher", trait_ident); let arc_dispatcher_ident = quote::format_ident!("{}ArcDispatcher", trait_ident); let rwlock_dispatcher_ident = quote::format_ident!("{}RwLockDispatcher", trait_ident); let serde_format = &args.serde_format; let mut if_else_clauses = TokenStream2::new(); let mut if_else_clauses_rwlock = TokenStream2::new(); let mut is_this_trait_mutable = false; for item in source_trait.items.iter() { let method = match item { syn::TraitItem::Method(x) => x, non_method => { return Err(syn::Error::new_spanned( non_method, "Service trait must have only methods", ) .to_compile_error()) } }; let id_ident = super::id::id_method_ident(source_trait, method); let mut the_let_pattern = syn::PatTuple { attrs: Vec::new(), paren_token: syn::token::Paren(Span::call_site()), elems: syn::punctuated::Punctuated::new(), }; let mut type_annotation = syn::TypeTuple { paren_token: syn::token::Paren(Span::call_site()), elems: syn::punctuated::Punctuated::new(), }; let mut the_args: syn::punctuated::P
e_annotation.elems.push(arg_type.clone()); } type_annotation .elems .push_punct(syn::token::Comma(Span::call_site())); let arg_ident = quote::format_ident!("a{}", j + 1); let the_arg = if crate::helper::is_ref(arg_type) .map_err(|e| syn::Error::new_spanned(arg_source, &e).to_compile_error())? .is_some() { quote! { &#arg_ident } } else { quote! { #arg_ident } }; the_args.push(syn::parse2(the_arg).unwrap()); } let stmt_deserialize = quote! { let #the_let_pattern: #type_annotation = <#serde_format as #env_path::SerdeFormat>::from_slice(args).unwrap(); }; let method_name = method.sig.ident.clone(); let stmt_call = quote! { let result = self.object.#method_name(#the_args); }; let stmt_call_rwlock = if mut_self { quote! { let result = self.object.write().#method_name(#the_args); } } else { quote! { let result = self.object.read().#method_name(#the_args); } }; let the_return = quote! { return <#serde_format as #env_path::SerdeFormat>::to_vec(&result).unwrap(); }; if_else_clauses.extend(quote! { if method == #id_ident.load(#env_path::ID_ORDERING) { #stmt_deserialize #stmt_call #the_return } }); if_else_clauses_rwlock.extend(quote! { if method == #id_ident.load(#env_path::ID_ORDERING) { #stmt_deserialize #stmt_call_rwlock #the_return } }); } if_else_clauses.extend(quote! { panic!("Invalid remote-trait-object call. Fatal Error.") }); if_else_clauses_rwlock.extend(quote! { panic!("Invalid remote-trait-object call. Fatal Error.") }); let box_dispatcher = if is_this_trait_mutable { quote! { #[doc(hidden)] pub struct #box_dispatcher_ident { object: parking_lot::RwLock<Box<dyn #trait_ident>> } impl #box_dispatcher_ident { fn new(object: Box<dyn #trait_ident>) -> Self { Self { object: parking_lot::RwLock::new(object) } } } impl #env_path::Dispatch for #box_dispatcher_ident { fn dispatch_and_call(&self, method: #env_path::MethodId, args: &[u8]) -> Vec<u8> { #if_else_clauses_rwlock } } impl #env_path::IntoSkeleton<dyn #trait_ident> for Box<dyn #trait_ident> { fn into_skeleton(self) -> #env_path::Skeleton { #env_path::create_skeleton(std::sync::Arc::new(#box_dispatcher_ident::new(self))) } } } } else { quote! { #[doc(hidden)] pub struct #box_dispatcher_ident { object: Box<dyn #trait_ident> } impl #box_dispatcher_ident { fn new(object: Box<dyn #trait_ident>) -> Self { Self { object } } } impl #env_path::Dispatch for #box_dispatcher_ident { fn dispatch_and_call(&self, method: #env_path::MethodId, args: &[u8]) -> Vec<u8> { #if_else_clauses } } impl #env_path::IntoSkeleton<dyn #trait_ident> for Box<dyn #trait_ident> { fn into_skeleton(self) -> #env_path::Skeleton { #env_path::create_skeleton(std::sync::Arc::new(#box_dispatcher_ident::new(self))) } } } }; let arc_dispatcher = if is_this_trait_mutable { quote! {} } else { quote! { #[doc(hidden)] pub struct #arc_dispatcher_ident { object: std::sync::Arc<dyn #trait_ident> } impl #arc_dispatcher_ident { fn new(object: std::sync::Arc<dyn #trait_ident>) -> Self { Self { object } } } impl #env_path::Dispatch for #arc_dispatcher_ident { fn dispatch_and_call(&self, method: #env_path::MethodId, args: &[u8]) -> Vec<u8> { #if_else_clauses } } impl #env_path::IntoSkeleton<dyn #trait_ident> for std::sync::Arc<dyn #trait_ident> { fn into_skeleton(self) -> #env_path::Skeleton { #env_path::create_skeleton(std::sync::Arc::new(#arc_dispatcher_ident::new(self))) } } } }; let rwlock_dispatcher = quote! { #[doc(hidden)] pub struct #rwlock_dispatcher_ident { object: std::sync::Arc<parking_lot::RwLock<dyn #trait_ident>> } impl #rwlock_dispatcher_ident { fn new(object: std::sync::Arc<parking_lot::RwLock<dyn #trait_ident>>) -> Self { Self { object } } } impl #env_path::Dispatch for #rwlock_dispatcher_ident { fn dispatch_and_call(&self, method: #env_path::MethodId, args: &[u8]) -> Vec<u8> { #if_else_clauses_rwlock } } impl #env_path::IntoSkeleton<dyn #trait_ident> for std::sync::Arc<parking_lot::RwLock<dyn #trait_ident>> { fn into_skeleton(self) -> #env_path::Skeleton { #env_path::create_skeleton(std::sync::Arc::new(#rwlock_dispatcher_ident::new(self))) } } }; Ok(quote! { #box_dispatcher #arc_dispatcher #rwlock_dispatcher }) }
unctuated<syn::Expr, syn::token::Comma> = syn::punctuated::Punctuated::new(); let no_self = "All your method must take &self or &mut self (Object safety)"; let mut_self = match method .sig .inputs .first() .ok_or_else(|| syn::Error::new_spanned(method, no_self).to_compile_error())? { syn::FnArg::Typed(_) => { return Err(syn::Error::new_spanned(method, no_self).to_compile_error()) } syn::FnArg::Receiver(syn::Receiver { mutability: Some(_), .. }) => true, _ => false, }; is_this_trait_mutable |= mut_self; for (j, arg_source) in method.sig.inputs.iter().skip(1).enumerate() { let the_iden = quote::format_ident!("a{}", j + 1); the_let_pattern.elems.push(syn::Pat::Ident(syn::PatIdent { attrs: Vec::new(), by_ref: None, mutability: None, ident: the_iden, subpat: None, })); the_let_pattern .elems .push_punct(syn::token::Comma(Span::call_site())); let arg_type = match arg_source { syn::FnArg::Typed(syn::PatType { attrs: _, pat: _, colon_token: _, ty: t, }) => &**t, _ => panic!(), }; if let Some(unrefed_type) = crate::helper::is_ref(arg_type) .map_err(|e| syn::Error::new_spanned(arg_source, &e).to_compile_error())? { type_annotation.elems.push(unrefed_type); } else { typ
random
[ { "content": "fn id_method_entry_ident(the_trait: &syn::ItemTrait, method: &syn::TraitItemMethod) -> Ident {\n\n quote::format_ident!(\"ID_METHOD_ENTRY_{}_{}\", the_trait.ident, method.sig.ident)\n\n}\n\n\n", "file_path": "remote-trait-object-macro/src/service/id.rs", "rank": 0, "score": 147821.7501007116 }, { "content": "fn id_method_setter_ident(the_trait: &syn::ItemTrait, method: &syn::TraitItemMethod) -> Ident {\n\n quote::format_ident!(\"id_method_setter_{}_{}\", the_trait.ident, method.sig.ident)\n\n}\n\n\n\npub(super) fn generate_id(\n\n source_trait: &syn::ItemTrait,\n\n _args: &MacroArgs,\n\n) -> Result<TokenStream2, TokenStream2> {\n\n let env_path = create_env_path();\n\n let lit_trait_name = syn::LitStr::new(&format!(\"{}\", source_trait.ident), Span::call_site());\n\n let mut method_id_table = TokenStream2::new();\n\n\n\n for (i, item) in source_trait.items.iter().enumerate() {\n\n let method = match item {\n\n syn::TraitItem::Method(x) => x,\n\n non_method => {\n\n return Err(syn::Error::new_spanned(\n\n non_method,\n\n \"Service trait must have only methods\",\n\n )\n", "file_path": "remote-trait-object-macro/src/service/id.rs", "rank": 1, "score": 147821.7501007116 }, { "content": "pub fn id_method_ident(the_trait: &syn::ItemTrait, method: &syn::TraitItemMethod) -> Ident {\n\n quote::format_ident!(\"ID_METHOD_{}_{}\", the_trait.ident, method.sig.ident)\n\n}\n\n\n", "file_path": "remote-trait-object-macro/src/service/id.rs", "rank": 2, "score": 146036.06228406032 }, { "content": "pub fn service(args: TokenStream2, input: TokenStream2) -> Result<TokenStream2, TokenStream2> {\n\n let args: MacroArgsRaw = syn::parse2(args).map_err(|e| e.to_compile_error())?;\n\n let args = args.fill_default_values();\n\n\n\n let source_trait = match syn::parse2::<syn::ItemTrait>(input.clone()) {\n\n Ok(x) => x,\n\n Err(_) => {\n\n return Err(\n\n syn::Error::new_spanned(input, \"You can use #[service] only on a trait\")\n\n .to_compile_error(),\n\n )\n\n }\n\n };\n\n\n\n let id = id::generate_id(&source_trait, &args)?;\n\n let dispatcher = dispatcher::generate_dispatcher(&source_trait, &args)?;\n\n let proxy = proxy::generate_proxy(&source_trait, &args)?;\n\n let from_skeleton = from_skeleton::generate_from_skeleton(&source_trait, &args)?;\n\n\n\n Ok(quote! {\n\n #source_trait\n\n #id\n\n #dispatcher\n\n #proxy\n\n #from_skeleton\n\n })\n\n}\n", "file_path": "remote-trait-object-macro/src/service.rs", "rank": 3, "score": 123781.59847954765 }, { "content": "#[proc_macro_attribute]\n\npub fn service(args: TokenStream, input: TokenStream) -> TokenStream {\n\n match service::service(TokenStream2::from(args), TokenStream2::from(input)) {\n\n Ok(x) => TokenStream::from(x),\n\n Err(x) => TokenStream::from(x),\n\n }\n\n}\n\n\n\n/// This macro consumes the target trait, and will print the expanded code. Use this when you want to see the result of macro.\n", "file_path": "remote-trait-object-macro/src/lib.rs", "rank": 4, "score": 120812.39992941424 }, { "content": "#[proc_macro_attribute]\n\npub fn service_debug(args: TokenStream, input: TokenStream) -> TokenStream {\n\n match service::service(TokenStream2::from(args), TokenStream2::from(input)) {\n\n Ok(x) => println!(\"{}\", x),\n\n Err(x) => println!(\"{}\", x),\n\n }\n\n TokenStream::new()\n\n}\n", "file_path": "remote-trait-object-macro/src/lib.rs", "rank": 5, "score": 119031.71648705754 }, { "content": "#[test]\n\nfn service_object_as_return() {\n\n init_logger();\n\n\n\n let port = Arc::new(TestPort::new());\n\n let proxy_a = create_proxy_a(port.clone());\n\n\n\n let proxy_b: Box<dyn B> = proxy_a\n\n .service_object_as_return()\n\n .unwrap_import()\n\n .into_proxy();\n\n assert_eq!(proxy_b.get(), 0);\n\n proxy_b.inc();\n\n assert_eq!(proxy_b.get(), 1);\n\n proxy_b.inc();\n\n assert_eq!(proxy_b.get(), 2);\n\n\n\n drop(proxy_a);\n\n drop(proxy_b);\n\n drop(port)\n\n}\n\n\n", "file_path": "remote-trait-object/src/tests/complex_trait.rs", "rank": 6, "score": 114054.48817647403 }, { "content": "// Id of methods in services.\n\n// Note that here the two strings mean (trait name, method name)\n\n// Also you can skip calling this, then the method id will be set up for default value\n\n// decided by the order of declaration.\n\ntype MethodIdentifierSetter = fn(id: MethodId);\n\n#[distributed_slice]\n\npub static MID_REG: [(&'static str, &'static str, MethodIdentifierSetter)] = [..];\n\n\n\n/// This will be provided by the user who cares the compatability between already-compiled service traits.\n\n#[derive(PartialEq, Serialize, Deserialize, Debug, Clone)]\n\npub struct IdMap {\n\n // This is system-wide; All module will get same ones\n\n pub method_map: Option<HashMap<(String, String), MethodId>>,\n\n}\n\n\n", "file_path": "remote-trait-object/src/service/id.rs", "rank": 7, "score": 109012.1326677104 }, { "content": "pub fn with_export(c: &mut Criterion) {\n\n c.bench_function(\"with_export_100\", |b| {\n\n b.iter(|| massive_with_export(black_box(100)))\n\n });\n\n}\n\n\n\ncriterion_group!(benches, no_export, with_export);\n\ncriterion_main!(benches);\n", "file_path": "remote-trait-object-tests/benches/bench1.rs", "rank": 8, "score": 100680.77077889987 }, { "content": "pub fn no_export(c: &mut Criterion) {\n\n c.bench_function(\"no_export_100\", |b| {\n\n b.iter(|| massive_no_export(black_box(100)))\n\n });\n\n}\n\n\n", "file_path": "remote-trait-object-tests/benches/bench1.rs", "rank": 9, "score": 100680.77077889987 }, { "content": "fn do_some_imports(x: Box<dyn Hello>) {\n\n let mut v = x.hello();\n\n let a: Box<dyn Hello> = v.pop().unwrap().into_proxy();\n\n let b: Arc<dyn Hello> = v.pop().unwrap().into_proxy();\n\n let c: Arc<RwLock<dyn Hello>> = v.pop().unwrap().into_proxy();\n\n}\n\n```\n\n**/\n\n/// [`service`]: attr.service.html\n\n/// [Service compatibility]: ./index.html#service_compatibility\n\n/// [raw export and import]: raw_exchange/index.html\n\npub struct ServiceToImport<T: ?Sized + Service> {\n\n handle: HandleToExchange,\n\n port: Weak<dyn Port>,\n\n _marker: PhantomData<T>,\n\n}\n\n\n\nimpl<T: ?Sized + Service> ServiceToImport<T> {\n\n /// Converts itself into a smart pointer of the trait, which is a _proxy object_.\n\n pub fn into_proxy<P: ImportProxy<T>>(self) -> P {\n", "file_path": "remote-trait-object/src/service/serde_support.rs", "rank": 10, "score": 96222.84874239932 }, { "content": "/// Exports a skeleton and returns a handle to it.\n\n///\n\n/// Once you create an instance of skeleton, you will eventually export it calling this.\n\n/// Take the handle to the other side's context and call [`import_service_from_handle`] to import it into a proxy object.\n\n/// If not, the service object will remain in the Context forever doing nothing.\n\npub fn export_service_into_handle(\n\n context: &crate::context::Context,\n\n service: Skeleton,\n\n) -> HandleToExchange {\n\n context\n\n .get_port()\n\n .upgrade()\n\n .unwrap()\n\n .register_service(service.raw)\n\n}\n\n\n", "file_path": "remote-trait-object/src/service/export_import.rs", "rank": 11, "score": 88600.36752245112 }, { "content": "#[test]\n\nfn service_object_as_argument() {\n\n init_logger();\n\n\n\n let port = Arc::new(TestPort::new());\n\n let proxy_a = create_proxy_a(port.clone());\n\n\n\n let service_object_b = Box::new(SimpleB::new()) as Box<dyn B>;\n\n proxy_a.service_object_as_argument(ServiceRef::Export(ServiceToExport::new(service_object_b)));\n\n\n\n drop(proxy_a);\n\n drop(port)\n\n}\n\n\n", "file_path": "remote-trait-object/src/tests/complex_trait.rs", "rank": 12, "score": 85818.91841696986 }, { "content": "#[test]\n\nfn recursive_service_object() {\n\n init_logger();\n\n\n\n let port = Arc::new(TestPort::new());\n\n let mut proxy_a = create_proxy_a(port.clone());\n\n let mut proxy_as = Vec::new();\n\n proxy_as.push(Arc::clone(&proxy_a));\n\n\n\n for i in 0..10 {\n\n assert_eq!(proxy_a.get_recursion_count(), i);\n\n proxy_a = proxy_a\n\n .recursive_service_object()\n\n .unwrap_import()\n\n .into_proxy();\n\n proxy_as.push(Arc::clone(&proxy_a));\n\n }\n\n assert_eq!(proxy_a.get_recursion_count(), 10);\n\n\n\n let proxy_b: Box<dyn B> = proxy_a\n\n .service_object_as_return()\n", "file_path": "remote-trait-object/src/tests/complex_trait.rs", "rank": 13, "score": 85818.91841696986 }, { "content": "pub fn create_null_service() -> Box<dyn NullService> {\n\n Box::new(NullServiceImpl)\n\n}\n\n\n", "file_path": "remote-trait-object/src/service/null.rs", "rank": 14, "score": 83948.66306054048 }, { "content": "/// Those necessary components for the macro is specially exported in the remote-trait-object.\n\n/// The macro will always specify full path using this.\n\nfn create_env_path() -> syn::Path {\n\n syn::parse2(quote! {remote_trait_object::macro_env}).unwrap()\n\n}\n\n\n\n/// It generates all necessary helper `struct`s that makes the trait be able to be used as a service.\n\n///\n\n/// It takes three arguments optionally\n\n/// - `serde_format = _` - Specify a type that implements `trait SerdeFormat`. The default is [serde_cbor](https://github.com/pyfisch/cbor)\n\n/// - `no_proxy` - If provided, the trait will be used only as a service object.\n\n/// - `no_skeleton` - If provided, the trait will be used only as a proxy object.\n\n///\n\n/// There will be many new public `struct`s, but you don't have to know about them.\n", "file_path": "remote-trait-object-macro/src/lib.rs", "rank": 15, "score": 78018.3939980562 }, { "content": "/// Imports a handle into a proxy object.\n\n///\n\n/// Once you receive an instance of [`HandleToExchange`], you will eventually import it calling this.\n\n/// Such handles must be from the other side's context.\n\npub fn import_service_from_handle<T: ?Sized + Service, P: ImportProxy<T>>(\n\n context: &crate::context::Context,\n\n handle: HandleToExchange,\n\n) -> P {\n\n P::import_proxy(context.get_port(), handle)\n\n}\n\n\n", "file_path": "remote-trait-object/src/service/export_import.rs", "rank": 16, "score": 76070.31425548652 }, { "content": "/// A special function that sets static & global identifiers for the methods.\n\n///\n\n/// It will be explained in more detail in the next version :)\n\n///\n\n/// This is supposed to be called only once during the entire lifetime of the process.\n\n/// However it is ok to call multiple times if the IdMap is identical, especially in the\n\n/// tests where each test share that static id list\n\n/// # Examples\n\n/// ```\n\n/// use remote_trait_object::macro_env::*;\n\n/// #[allow(non_upper_case_globals)]\n\n/// static ID_METHOD_MyTrait_mymethod: MethodIdAtomic = MethodIdAtomic::new(1);\n\n/// #[linkme::distributed_slice(MID_REG)]\n\n/// #[allow(non_upper_case_globals)]\n\n/// static ID_METHOD_ENTRY_MyTrait_mymethod: (&'static str, &'static str, fn(id: MethodId)) =\n\n/// (\"MyTrait\", \"mymethod\", id_method_setter_MyTrait_mymethod);\n\n/// #[allow(non_snake_case)]\n\n/// fn id_method_setter_MyTrait_mymethod(id: MethodId) {\n\n/// ID_METHOD_MyTrait_mymethod.store(id, ID_ORDERING);\n\n/// }\n\n/// #[test]\n\n/// fn setup() {\n\n/// let id_map: HashMap<(String, String), MethodId> =\n\n/// [((\"MyTrait\".to_owned(), \"mymethod\".to_owned()), 123)].iter().cloned().collect();\n\n/// let id_map = IdMap {\n\n/// method_map: Some(id_map),\n\n/// };\n\n/// setup_identifiers(&id_map);\n\n/// assert_eq!(ID_METHOD_MyTrait_mymethod.load(ID_ORDERING), 123);\n\n/// }\n\n/// ```\n\npub fn setup_identifiers(descriptor: &IdMap) {\n\n // distributed_slices integrity test\n\n {\n\n let mut bucket: HashSet<(String, String)> = HashSet::new();\n\n for (ident1, ident2, _) in MID_REG {\n\n bucket.insert(((*ident1).to_owned(), (*ident2).to_owned()));\n\n }\n\n assert_eq!(\n\n bucket.len(),\n\n MID_REG.len(),\n\n \"The service traits that this binary involved are not named;\n\n You have provided multiple traits with an identical name\"\n\n );\n\n }\n\n\n\n // method ids have default values decided by the order, so it is ok to leave them in an ordinary case.\n\n if let Some(map) = descriptor.method_map.as_ref() {\n\n for (trait_name, method_name, setter) in MID_REG {\n\n setter(\n\n *map.get(&((*trait_name).to_owned(), (*method_name).to_owned()))\n\n .expect(\"Invalid handle descriptor\"),\n\n );\n\n }\n\n }\n\n}\n", "file_path": "remote-trait-object/src/service/id.rs", "rank": 17, "score": 74986.62767456958 }, { "content": "fn lit_index(index: usize) -> syn::Lit {\n\n // We put a distinctive offset for the easy debug.\n\n syn::Lit::Int(syn::LitInt::new(\n\n &format!(\"{}\", index + 70),\n\n Span::call_site(),\n\n ))\n\n}\n\n\n", "file_path": "remote-trait-object-macro/src/service/id.rs", "rank": 18, "score": 72103.58189879237 }, { "content": "// This belongs to macro_env\n\npub fn get_dispatch(skeleton: &Skeleton) -> &dyn Dispatch {\n\n skeleton.raw.as_ref()\n\n}\n\n\n\n// These traits are associated with some specific service trait.\n\n// These tratis will be implement by `dyn ServiceTrait` where `T = dyn ServiceTrait` as well.\n\n// Macro will implement this trait with the target(expanding) service trait.\n\n\n", "file_path": "remote-trait-object/src/service/export_import.rs", "rank": 19, "score": 69492.13574651789 }, { "content": "/// Create a proxy object that always panic for all methods.\n\n///\n\n/// This is same as using [`create_null()`] and [`import_service_from_handle()`] but you don't even have to specify the [`Context`] here.\n\n///\n\n/// [`create_null()`]: ./struct.HandleToExchange.html#method.create_null\n\n/// [`import_service_from_handle()`]: ./fn.import_service_from_handle.html\n\n/// [`Context`]: ../struct.Context.html\n\npub fn import_null_proxy<T: ?Sized + Service, P: ImportProxy<T>>() -> P {\n\n P::import_proxy(\n\n crate::port::null_weak_port(),\n\n HandleToExchange::create_null(),\n\n )\n\n}\n", "file_path": "remote-trait-object/src/service/export_import.rs", "rank": 20, "score": 69330.90124643903 }, { "content": "/// NullServive is the only actual service trait that remote-trait-object provides by default.\n\n/// It will be useful when you want to establish a remote-trait-object connection with with_initial_service(),\n\n/// but such initial service is needed by only one side.\n\npub trait NullService: Service {}\n\n\n", "file_path": "remote-trait-object/src/service/null.rs", "rank": 21, "score": 67625.08530518039 }, { "content": "// This belongs to macro_env\n\npub fn create_skeleton(raw: Arc<dyn Dispatch>) -> Skeleton {\n\n Skeleton { raw }\n\n}\n\n\n", "file_path": "remote-trait-object/src/service/export_import.rs", "rank": 22, "score": 67100.39742789557 }, { "content": " trait Foo: Service {}\n\n\n\n struct FooImpl;\n\n impl Foo for FooImpl {}\n\n impl Service for FooImpl {}\n\n impl Dispatch for FooImpl {\n\n fn dispatch_and_call(&self, _method: MethodId, _args: &[u8]) -> Vec<u8> {\n\n unimplemented!()\n\n }\n\n }\n\n\n\n impl IntoSkeleton<dyn Foo> for Arc<dyn Foo> {\n\n fn into_skeleton(self) -> crate::macro_env::Skeleton {\n\n crate::macro_env::create_skeleton(Arc::new(FooImpl))\n\n }\n\n }\n\n\n\n /// This test checks SArc<dyn Test> is serialized as HandleToExchange or not\n\n #[test]\n\n fn test_serialize() {\n", "file_path": "remote-trait-object/src/service/serde_support.rs", "rank": 23, "score": 66161.28739598513 }, { "content": " trait Foo: Service {\n\n fn get_handle_to_exchange(&self) -> HandleToExchange;\n\n }\n\n struct FooImpl {\n\n handle_to_exchange: HandleToExchange,\n\n }\n\n impl Foo for FooImpl {\n\n fn get_handle_to_exchange(&self) -> HandleToExchange {\n\n self.handle_to_exchange\n\n }\n\n }\n\n impl Service for FooImpl {}\n\n impl ImportProxy<dyn Foo> for Box<dyn Foo> {\n\n fn import_proxy(_port: Weak<dyn Port>, handle: HandleToExchange) -> Box<dyn Foo> {\n\n Box::new(FooImpl {\n\n handle_to_exchange: handle,\n\n })\n\n }\n\n }\n\n\n", "file_path": "remote-trait-object/src/service/serde_support.rs", "rank": 24, "score": 66161.28739598513 }, { "content": "#[service]\n\npub trait Piano: Service {\n\n fn play(&mut self);\n\n}\n\n\n", "file_path": "remote-trait-object/src/service.rs", "rank": 25, "score": 65054.26954458406 }, { "content": " #[remote_trait_object_macro::service]\n\n pub trait MetaService: Service {}\n\n\n\n pub struct MetaServiceImpl {}\n\n\n\n impl MetaServiceImpl {\n\n pub fn new() -> Self {\n\n Self {}\n\n }\n\n }\n\n\n\n impl Service for MetaServiceImpl {}\n\n\n\n impl MetaService for MetaServiceImpl {}\n\n}\n\nuse meta_service::{MetaService, MetaServiceImpl};\n\n\n\n/// A configuration of a `remote-trait-object` context.\n\n#[derive(Clone, Debug)]\n\npub struct Config {\n\n /// A name that will be appended to the names of various threads spawned by `remote-trait-object`, for an easy debug.\n", "file_path": "remote-trait-object/src/context.rs", "rank": 26, "score": 64207.54575137357 }, { "content": "#[service]\n\npub trait Pizza: Service {}\n\n\n", "file_path": "remote-trait-object/src/service/serde_support.rs", "rank": 27, "score": 63387.28650718915 }, { "content": "#[service(no_skeleton)]\n\npub trait Hello: Service {\n\n fn hello(&self) -> Vec<ServiceToImport<dyn Hello>>;\n\n}\n\n\n", "file_path": "remote-trait-object/src/service/serde_support.rs", "rank": 28, "score": 63387.22073722677 }, { "content": "#[test]\n\nfn macro1() {\n\n let port = Arc::new(TestPort::new());\n\n let port_weak = Arc::downgrade(&port);\n\n\n\n let object = Box::new(MyObject { mul: 4 }) as Box<dyn Service1>;\n\n let handle = port.register_service(object.into_skeleton().raw);\n\n let proxy = <Box<dyn Service1> as ImportProxy<dyn Service1>>::import_proxy(port_weak, handle);\n\n\n\n assert_eq!(\n\n proxy.f1(1, &2, &[3, 4], (5, 6), &(7, \"8\".to_owned())),\n\n (1 + 2 + 3 + 4 + 5 + 6 + 7 + 8) * 4\n\n );\n\n assert_eq!(\n\n proxy.f2(\"Hello\", &Some(123)),\n\n (\"Hello_123_4\".to_owned(), \"Bye\".to_owned())\n\n );\n\n assert_eq!(\n\n proxy.f2(\"Hello\", &None),\n\n (\"Hello_None_4\".to_owned(), \"ByeBye\".to_owned())\n\n );\n\n drop(proxy);\n\n assert_eq!(port.register_len(), 0);\n\n}\n\n\n", "file_path": "remote-trait-object/src/tests.rs", "rank": 29, "score": 62963.68199466398 }, { "content": "struct MacroArgs {\n\n pub serde_format: syn::Path,\n\n pub no_proxy: bool,\n\n pub no_skeleton: bool,\n\n}\n\n\n\nimpl MacroArgsRaw {\n\n fn update(&mut self, ts: TokenStream2) -> syn::parse::Result<()> {\n\n if let Ok(arg) = syn::parse2::<syn::Ident>(ts.clone()) {\n\n return if arg == quote::format_ident!(\"no_proxy\") {\n\n if self.no_proxy.replace(()).is_some() {\n\n Err(syn::parse::Error::new_spanned(ts, \"Duplicated arguments\"))\n\n } else {\n\n Ok(())\n\n }\n\n } else if arg == quote::format_ident!(\"no_skeleton\") {\n\n if self.no_skeleton.replace(()).is_some() {\n\n Err(syn::parse::Error::new_spanned(ts, \"Duplicated arguments\"))\n\n } else {\n\n Ok(())\n", "file_path": "remote-trait-object-macro/src/service.rs", "rank": 30, "score": 62656.34877487403 }, { "content": "#[service]\n\npub trait PizzaStore: Service {\n\n fn order_pizza(&self) -> ServiceRef<dyn Pizza>;\n\n}\n\n```\n\n**/\n\n/// And this `PizzaStore` can be both\n\n/// - implemented and exported - you will be using `Import` variant for an argument, and `Export` variant for the return value.\n\n/// - imported and locally invoked - you will be using `Export` variant for an argument, and `Import` variant for the return value.\n\n///\n\n/// ## Example\n\n/**\n\n```ignore\n\n// EXPORTER SIDE\n\nimpl PizzaStore for SomeType {\n\n fn order_pizza(&self) -> ServiceRef<dyn Pizza> {\n\n ServiceRef::create_export(Box::new(SomePizza) as Box<dyn Pizza>)\n\n }\n\n}\n\n\n\n// IMPORTER SIDE\n", "file_path": "remote-trait-object/src/service/serde_support.rs", "rank": 31, "score": 62601.54298790428 }, { "content": "pub trait Service: Send + Sync {}\n\n\n", "file_path": "remote-trait-object/src/service.rs", "rank": 32, "score": 62534.004932928845 }, { "content": "#[test]\n\nfn ping1() {\n\n let barrier = Arc::new(Barrier::new(1));\n\n let ((_ctx1, hello1), (_ctx2, hello2)) = run(Arc::clone(&barrier));\n\n let hello1: Box<dyn Hello> = hello1.into_proxy();\n\n let hello2: Box<dyn Hello> = hello2.into_proxy();\n\n\n\n let ping1: Box<dyn Ping> = hello1.hey().unwrap_import().into_proxy();\n\n let ping2: Box<dyn Ping> = hello2.hey().unwrap_import().into_proxy();\n\n\n\n ping1.ping();\n\n ping2.ping();\n\n\n\n drop(hello1);\n\n drop(hello2);\n\n}\n\n\n", "file_path": "remote-trait-object-tests/src/ping.rs", "rank": 33, "score": 61774.67385911887 }, { "content": "#[allow(clippy::type_complexity)]\n\nfn run(\n\n barrier: Arc<Barrier>,\n\n) -> (\n\n (Context, ServiceToImport<dyn Hello>),\n\n (Context, ServiceToImport<dyn Hello>),\n\n) {\n\n let crate::transport::TransportEnds {\n\n recv1,\n\n send1,\n\n recv2,\n\n send2,\n\n } = crate::transport::create();\n\n (\n\n Context::with_initial_service(\n\n Config::default_setup(),\n\n send1,\n\n recv1,\n\n ServiceToExport::new(Box::new(SimpleHello {\n\n barrier: Arc::clone(&barrier),\n\n }) as Box<dyn Hello>),\n\n ),\n\n Context::with_initial_service(\n\n Config::default_setup(),\n\n send2,\n\n recv2,\n\n ServiceToExport::new(Box::new(SimpleHello { barrier }) as Box<dyn Hello>),\n\n ),\n\n )\n\n}\n\n\n", "file_path": "remote-trait-object-tests/src/ping.rs", "rank": 34, "score": 61774.67385911887 }, { "content": "#[test]\n\nfn test() {\n\n let crate::transport::TransportEnds {\n\n recv1,\n\n send1,\n\n recv2,\n\n send2,\n\n } = crate::transport::create();\n\n\n\n let _context_pizza_town = Context::with_initial_service_export(\n\n Config::default_setup(),\n\n send1,\n\n recv1,\n\n ServiceToExport::new(Box::new(SomePizzaStore) as Box<dyn PizzaStore>),\n\n );\n\n\n\n let (_context_customer, pizza_store): (_, ServiceToImport<dyn PizzaStore>) =\n\n Context::with_initial_service_import(Config::default_setup(), send2, recv2);\n\n let pizza_store_proxy: Box<dyn PizzaStore> = pizza_store.into_proxy();\n\n\n\n let my_credit_card = Box::new(SomeCreditCard { money: 11 }) as Box<dyn CreditCard>;\n", "file_path": "remote-trait-object-tests/src/simple.rs", "rank": 35, "score": 61774.67385911887 }, { "content": "#[test]\n\nfn macro_no_skeleton() {\n\n let port = Arc::new(TestPort::new());\n\n let port_weak = Arc::downgrade(&port);\n\n\n\n let object = Box::new(SimpleHello) as Box<dyn Hello>;\n\n\n\n let handle = port.register_service(object.into_skeleton().raw);\n\n let proxy =\n\n <Box<dyn HelloWithRef> as ImportProxy<dyn HelloWithRef>>::import_proxy(port_weak, handle);\n\n\n\n let source = vec![1, 2, 3, 4];\n\n let source2 = vec![(&source[0], &source[1]), (&source[2], &source[3])];\n\n let source3 = vec![&source2[0], &source2[1]];\n\n\n\n assert_eq!(proxy.f(&source3), 10);\n\n drop(proxy);\n\n assert_eq!(port.register_len(), 0);\n\n}\n", "file_path": "remote-trait-object/src/tests.rs", "rank": 36, "score": 61774.67385911887 }, { "content": "fn test_runner(f: impl Fn(Box<dyn Store>)) {\n\n let crate::transport::TransportEnds {\n\n recv1,\n\n send1,\n\n recv2,\n\n send2,\n\n } = crate::transport::create();\n\n let store_runner = std::thread::Builder::new()\n\n .name(\"Store Runner\".to_owned())\n\n .spawn(move || run_store((send2, recv2)))\n\n .unwrap();\n\n\n\n let (rto_context, store): (Context, ServiceToImport<dyn Store>) =\n\n Context::with_initial_service_import(Config::default_setup(), send1, recv1);\n\n let store: Box<dyn Store> = store.into_proxy();\n\n\n\n f(store);\n\n\n\n rto_context.disable_garbage_collection();\n\n drop(rto_context);\n", "file_path": "remote-trait-object-tests/src/test_store/man.rs", "rank": 37, "score": 61454.264210018315 }, { "content": "fn init_logger() {\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n", "file_path": "remote-trait-object/src/tests/complex_trait.rs", "rank": 38, "score": 61423.776114160464 }, { "content": "#[derive(Default)]\n\nstruct MacroArgsRaw {\n\n pub serde_format: Option<syn::Path>,\n\n pub no_proxy: Option<()>,\n\n pub no_skeleton: Option<()>,\n\n}\n\n\n", "file_path": "remote-trait-object-macro/src/service.rs", "rank": 39, "score": 60772.035995067 }, { "content": "#[test]\n\n#[should_panic(expected = \"You invoked a method of a null proxy object.\")]\n\nfn null_proxy() {\n\n let barrier = Arc::new(Barrier::new(1));\n\n let ((ctx1, _), (_ctx2, _)) = run(Arc::clone(&barrier));\n\n let null_handle = remote_trait_object::raw_exchange::HandleToExchange::create_null();\n\n let null_proxy: Box<dyn Ping> =\n\n remote_trait_object::raw_exchange::import_service_from_handle(&ctx1, null_handle);\n\n null_proxy.ping();\n\n}\n\n\n", "file_path": "remote-trait-object-tests/src/ping.rs", "rank": 40, "score": 60662.85957967671 }, { "content": "#[test]\n\n#[should_panic(expected = \"You invoked a method of a null proxy object.\")]\n\nfn null_proxy2() {\n\n let null_proxy: Box<dyn Ping> = remote_trait_object::raw_exchange::import_null_proxy();\n\n null_proxy.ping();\n\n}\n", "file_path": "remote-trait-object-tests/src/ping.rs", "rank": 41, "score": 60662.85957967671 }, { "content": "#[test]\n\nfn recognize_ref() {\n\n let t = syn::parse_str::<syn::Type>(\"Vec<u32>\").unwrap();\n\n assert!(is_ref(&t).unwrap().is_none());\n\n let t = syn::parse_str::<syn::Type>(\"&Vec<u32>\").unwrap();\n\n let tu = syn::parse_str::<syn::Type>(\"Vec<u32>\").unwrap();\n\n assert_eq!(is_ref(&t).unwrap().unwrap(), tu);\n\n let t = syn::parse_str::<syn::Type>(\"&i32\").unwrap();\n\n let tu = syn::parse_str::<syn::Type>(\"i32\").unwrap();\n\n assert_eq!(is_ref(&t).unwrap().unwrap(), tu);\n\n let t = syn::parse_str::<syn::Type>(\"&str\").unwrap();\n\n let tu = syn::parse_str::<syn::Type>(\"String\").unwrap();\n\n assert_eq!(is_ref(&t).unwrap().unwrap(), tu);\n\n let t = syn::parse_str::<syn::Type>(\"&[u8]\").unwrap();\n\n let tu = syn::parse_str::<syn::Type>(\"Vec<_>\").unwrap();\n\n assert_eq!(is_ref(&t).unwrap().unwrap(), tu);\n\n let t = syn::parse_str::<syn::Type>(\"&mut i32\").unwrap();\n\n assert!(is_ref(&t).is_err())\n\n}\n", "file_path": "remote-trait-object-macro/src/helper.rs", "rank": 42, "score": 60659.39908426893 }, { "content": "#[test]\n\nfn packet_header_size() {\n\n let x = PacketHeader {\n\n slot: SlotId(0),\n\n service_object_id: 0,\n\n method: 0,\n\n };\n\n assert_eq!(bincode::serialize(&x).unwrap().len(), PACKET_HEADER_SIZE);\n\n}\n\n\n", "file_path": "remote-trait-object/src/packet.rs", "rank": 43, "score": 60659.39908426893 }, { "content": "#[test]\n\nfn ping_concurrent1() {\n\n let n = 6;\n\n for _ in 0..100 {\n\n let barrier = Arc::new(Barrier::new(n + 1));\n\n let ((_ctx1, hello1), (_ctx2, hello2)) = run(Arc::clone(&barrier));\n\n let hello1: Box<dyn Hello> = hello1.into_proxy();\n\n let hello2: Box<dyn Hello> = hello2.into_proxy();\n\n\n\n let pings: Vec<Box<dyn Ping>> = (0..n)\n\n .map(|_| hello2.hey().unwrap_import().into_proxy())\n\n .collect();\n\n let joins: Vec<thread::JoinHandle<()>> = pings\n\n .into_iter()\n\n .map(|ping| {\n\n thread::spawn(move || {\n\n ping.ping_barrier();\n\n })\n\n })\n\n .collect();\n\n barrier.wait();\n\n for join in joins {\n\n join.join().unwrap();\n\n }\n\n\n\n drop(hello1);\n\n drop(hello2);\n\n }\n\n}\n\n\n", "file_path": "remote-trait-object-tests/src/ping.rs", "rank": 44, "score": 60659.39908426893 }, { "content": "#[test]\n\nfn ping_concurrent2() {\n\n let n = 6;\n\n for _ in 0..100 {\n\n let barrier = Arc::new(Barrier::new(n + 1));\n\n let ((_ctx1, hello1), (_ctx2, hello2)) = run(Arc::clone(&barrier));\n\n let hello1: Box<dyn Hello> = hello1.into_proxy();\n\n let hello2: Box<dyn Hello> = hello2.into_proxy();\n\n\n\n let ping: Arc<dyn Ping> = hello2.hey().unwrap_import().into_proxy();\n\n\n\n let joins: Vec<thread::JoinHandle<()>> = (0..n)\n\n .map(|_| {\n\n let ping_ = Arc::clone(&ping);\n\n thread::spawn(move || {\n\n ping_.ping_barrier();\n\n })\n\n })\n\n .collect();\n\n barrier.wait();\n\n for join in joins {\n\n join.join().unwrap();\n\n }\n\n\n\n drop(hello1);\n\n drop(hello2);\n\n }\n\n}\n\n\n", "file_path": "remote-trait-object-tests/src/ping.rs", "rank": 45, "score": 60659.39908426893 }, { "content": "fn receive_loop(\n\n transport_recv: Box<dyn TransportRecv>,\n\n to_slot_receivers: Vec<Sender<Result<Packet, TransportError>>>,\n\n) {\n\n loop {\n\n match transport_recv.recv(None) {\n\n Ok(x) => {\n\n let packet = Packet::new_from_buffer(x);\n\n let slot_id = packet.view().slot();\n\n to_slot_receivers[slot_id.as_usize()]\n\n .send(Ok(packet))\n\n .expect(\"Slot receivers are managed in Client. Client must be dropped after this thread\");\n\n }\n\n Err(TransportError::Termination) => return,\n\n Err(_err) => {\n\n // TODO: Broadcast the error to all **active** call slots\n\n return;\n\n }\n\n };\n\n }\n\n}\n", "file_path": "remote-trait-object/src/port/client.rs", "rank": 46, "score": 60659.39908426893 }, { "content": "#[rto_macro::service]\n\ntrait Hello: Service {\n\n fn f(&self, v: &[(i32, i32)]) -> i32;\n\n}\n\n\n", "file_path": "remote-trait-object/src/tests.rs", "rank": 47, "score": 59776.59685477198 }, { "content": "#[rto_macro::service]\n\ntrait A: Service {\n\n fn service_object_as_argument(&self, b: ServiceRef<dyn B>);\n\n fn service_object_as_return(&self) -> ServiceRef<dyn B>;\n\n fn recursive_service_object(&self) -> ServiceRef<dyn A>;\n\n fn get_recursion_count(&self) -> u32;\n\n}\n\n\n", "file_path": "remote-trait-object/src/tests/complex_trait.rs", "rank": 48, "score": 59624.70502722642 }, { "content": "struct SingleArg<T: Parse> {\n\n pub arg_name: syn::Ident,\n\n pub arg_value: T,\n\n}\n\n\n\nimpl<T: Parse> Parse for SingleArg<T> {\n\n fn parse(input: ParseStream) -> syn::parse::Result<Self> {\n\n let arg_name = input.parse()?;\n\n input.parse::<Token![=]>()?;\n\n let arg_value = input.parse()?;\n\n Ok(Self {\n\n arg_name,\n\n arg_value,\n\n })\n\n }\n\n}\n\n\n", "file_path": "remote-trait-object-macro/src/service.rs", "rank": 49, "score": 58997.7510509411 }, { "content": "/// A serde de/serialization format that will be used for a service.\n\npub trait SerdeFormat {\n\n #[allow(clippy::result_unit_err)]\n\n fn to_vec<S: serde::Serialize>(s: &S) -> Result<Vec<u8>, ()>;\n\n #[allow(clippy::result_unit_err)]\n\n fn from_slice<D: serde::de::DeserializeOwned>(data: &[u8]) -> Result<D, ()>;\n\n}\n\n\n\n/// In most case the format isn't important because the users won't see the raw data directly anyway.\n\n/// Thus we provide a default format for the macro.\n\npub struct Cbor;\n\n\n\nimpl SerdeFormat for Cbor {\n\n fn to_vec<S: serde::Serialize>(s: &S) -> Result<Vec<u8>, ()> {\n\n serde_cbor::to_vec(s).map_err(|_| ())\n\n }\n\n\n\n fn from_slice<D: serde::de::DeserializeOwned>(data: &[u8]) -> Result<D, ()> {\n\n serde_cbor::from_slice(data).map_err(|_| ())\n\n }\n\n}\n", "file_path": "remote-trait-object/src/service.rs", "rank": 50, "score": 58760.357222438506 }, { "content": "#[rto_macro::service(no_skeleton)]\n\ntrait HelloWithRef: Service {\n\n fn f(&self, v: &[&(&i32, &i32)]) -> i32;\n\n}\n\n\n", "file_path": "remote-trait-object/src/tests.rs", "rank": 51, "score": 58757.06771717134 }, { "content": "/// Conversion into a [`Skeleton`], from a smart pointer of a service object.\n\n///\n\n/// By attaching `[remote_trait_object::service]` on a trait, smart pointers of the trait will automatically implement this.\n\n/// This is required if you want to create a [`Skeleton`] or [`ServiceToExport`].\n\n///\n\n/// [`ServiceToExport`]: ../struct.ServiceToExport.html\n\n// Unused T is for avoiding violation of the orphan rule\n\n// T will be local type for the crate, and that makes it possible to\n\n// impl IntoSkeleton<dyn MyService> for Arc<dyn MyService>\n\npub trait IntoSkeleton<T: ?Sized + Service> {\n\n fn into_skeleton(self) -> Skeleton;\n\n}\n\n\n", "file_path": "remote-trait-object/src/service/export_import.rs", "rank": 52, "score": 58747.95311780518 }, { "content": "#[rto_macro::service]\n\ntrait B: Service {\n\n fn inc(&self);\n\n fn get(&self) -> i32;\n\n}\n\n\n", "file_path": "remote-trait-object/src/tests/complex_trait.rs", "rank": 53, "score": 58688.861070571205 }, { "content": "fn receiver<H>(\n\n config: Config,\n\n handler: Arc<H>,\n\n transport_send: Arc<dyn TransportSend>,\n\n transport_recv: Box<dyn TransportRecv>,\n\n) where\n\n H: Handler + 'static,\n\n{\n\n let count = Arc::new(AtomicI32::new(0));\n\n loop {\n\n match transport_recv.recv(None) {\n\n Ok(request) => {\n\n let packet = Packet::new_from_buffer(request);\n\n let handler = Arc::clone(&handler);\n\n let transport_send = Arc::clone(&transport_send);\n\n\n\n count.fetch_add(1, Ordering::Release);\n\n let count = Arc::clone(&count);\n\n config\n\n .thread_pool\n", "file_path": "remote-trait-object/src/port/server.rs", "rank": 54, "score": 58384.06707109626 }, { "content": "#[rto_macro::service]\n\npub trait Service1: Service {\n\n fn f1(&self, a1: i32, a2: &i32, a3: &[i32], a4: (i32, i32), a5: &(i32, String)) -> i32;\n\n fn f2(&self, s1: &str, a2: &Option<i32>) -> (String, String);\n\n}\n\n\n", "file_path": "remote-trait-object/src/tests.rs", "rank": 55, "score": 56796.531013336964 }, { "content": "#[doc(hidden)]\n\npub trait FromSkeleton<T: ?Sized + Service>: Sized {\n\n fn from_skeleton(skeleton: Skeleton) -> Self;\n\n}\n\n\n", "file_path": "remote-trait-object/src/service/export_import.rs", "rank": 56, "score": 56765.01569991117 }, { "content": "/// Conversion into a smart pointer of a service object, from [`HandleToExchange`].\n\n///\n\n/// By attaching `[remote_trait_object::service]` on a trait, smart pointers of the trait will automatically implement this.\n\n/// This is required if you want to create a proxy object from [`HandleToExchange`] or [`ServiceToImport`].\n\n///\n\n/// [`ServiceToImport`]: ../struct.ServiceToImport.html\n\n// Unused T is for avoiding violation of the orphan rule, like `IntoSkeleton`\n\npub trait ImportProxy<T: ?Sized + Service>: Sized {\n\n fn import_proxy(port: Weak<dyn Port>, handle: HandleToExchange) -> Self;\n\n}\n\n\n", "file_path": "remote-trait-object/src/service/export_import.rs", "rank": 57, "score": 56070.04726812508 }, { "content": "#[service]\n\npub trait Ping: Service {\n\n fn ping(&self);\n\n fn ping_mut(&mut self);\n\n fn ping_barrier(&self);\n\n}\n\n\n", "file_path": "remote-trait-object-tests/src/ping.rs", "rank": 58, "score": 55835.289567309774 }, { "content": "#[service]\n\npub trait PizzaStore: Service {\n\n fn order_pizza(&self, credit_card: ServiceRef<dyn CreditCard>) -> Result<String, ()>;\n\n}\n", "file_path": "remote-trait-object/src/lib.rs", "rank": 59, "score": 55835.289567309774 }, { "content": "#[service]\n\npub trait Hello: Service {\n\n fn hey(&self) -> ServiceRef<dyn Ping>;\n\n}\n\n\n", "file_path": "remote-trait-object-tests/src/ping.rs", "rank": 60, "score": 55835.289567309774 }, { "content": "#[service]\n\npub trait CreditCard: Service {\n\n fn pay(&mut self, ammount: u64) -> Result<(), ()>;\n\n}\n", "file_path": "remote-trait-object/src/lib.rs", "rank": 61, "score": 55835.289567309774 }, { "content": "#[service]\n\npub trait PizzaStore: Service {\n\n fn order_pizza(&self, credit_card: ServiceRef<dyn CreditCard>) -> Result<String, ()>;\n\n}\n", "file_path": "remote-trait-object-tests/src/simple.rs", "rank": 62, "score": 54927.11045838219 }, { "content": "#[service]\n\npub trait CreditCard: Service {\n\n fn pay(&mut self, ammount: u64) -> Result<(), ()>;\n\n}\n", "file_path": "remote-trait-object-tests/src/simple.rs", "rank": 63, "score": 54927.11045838219 }, { "content": "pub fn create() -> TransportEnds {\n\n let (a_sender, a_receiver) = bounded(256);\n\n let (a_termination_sender, a_termination_receiver) = bounded(1);\n\n let (b_sender, b_receiver) = bounded(256);\n\n let (b_termination_sender, b_termination_receiver) = bounded(1);\n\n\n\n let send1 = IntraSend(b_sender);\n\n let recv1 = IntraRecv {\n\n data_receiver: a_receiver,\n\n terminator_receiver: a_termination_receiver,\n\n terminator: a_termination_sender,\n\n };\n\n\n\n let send2 = IntraSend(a_sender);\n\n let recv2 = IntraRecv {\n\n data_receiver: b_receiver,\n\n terminator_receiver: b_termination_receiver,\n\n terminator: b_termination_sender,\n\n };\n\n\n\n TransportEnds {\n\n recv1,\n\n send1,\n\n recv2,\n\n send2,\n\n }\n\n}\n", "file_path": "remote-trait-object-tests/src/transport.rs", "rank": 64, "score": 54442.28652550184 }, { "content": "/// Exporter sides's interface to the service object. This will be implemented\n\n/// by each service trait's unique wrapper in the macro\n\npub trait Dispatch: Send + Sync {\n\n fn dispatch_and_call(&self, method: MethodId, args: &[u8]) -> Vec<u8>;\n\n}\n\n\n\nimpl<F> Dispatch for F\n\nwhere\n\n F: Fn(MethodId, &[u8]) -> Vec<u8> + Send + Sync,\n\n{\n\n fn dispatch_and_call(&self, method: MethodId, args: &[u8]) -> Vec<u8> {\n\n self(method, args)\n\n }\n\n}\n\n\n\n/// The `Service` trait is a marker that is used as a supertrait for a service trait,\n\n/// indicating that the trait is for a service.\n\n///\n\n/// It is bound to `Send` and `Sync`, and that's all.\n\n/// Please put this as a supertrait for every service trait, and implement it\n\n/// for all concrete service implementers.\n\n///\n\n/**\n\n## Example\n\n```\n\nuse remote_trait_object::*;\n\n\n", "file_path": "remote-trait-object/src/service.rs", "rank": 65, "score": 54171.84581690388 }, { "content": "#[service(serde_format = Bincode)]\n\npub trait Store: Service {\n\n fn order_pizza(&self, menu: Pizza, money: u32) -> String;\n\n fn order_coke(&self, flavor: &str, money: u32) -> String;\n\n fn order_pizza_credit_card(\n\n &self,\n\n menu: Pizza,\n\n credit_card: ServiceRef<dyn CreditCard>,\n\n ) -> String;\n\n fn register_card(&mut self, credit_card: ServiceRef<dyn CreditCard>);\n\n}\n\n\n\n// Some variations of traits for tests\n\n\n\n/// This fails to compile without `no_skeleton`\n", "file_path": "remote-trait-object-tests/src/test_store/types.rs", "rank": 66, "score": 54067.569741714295 }, { "content": "fn handle_single_call<H: Handler>(\n\n packet: Packet,\n\n handler: Arc<H>,\n\n transport_send: Arc<dyn TransportSend>,\n\n count: Arc<AtomicI32>,\n\n) {\n\n let response = handler.handle(packet.view());\n\n let mut response_packet = Packet::new_response_from_request(packet.view());\n\n response_packet.append_data(&response);\n\n if let Err(_err) = transport_send.send(response_packet.buffer(), None) {\n\n // TODO: report the error to the context\n\n count.fetch_sub(1, Ordering::Release);\n\n return;\n\n };\n\n count.fetch_sub(1, Ordering::Release);\n\n}\n\n\n", "file_path": "remote-trait-object/src/port/server.rs", "rank": 67, "score": 53511.295228080686 }, { "content": "#[remote_trait_object_macro::service]\n\npub trait PizzaStore : rto::Service {\n\n fn order_pizza(&mut self, menu: &str, money: u64);\n\n fn ask_pizza_price(&self, menu: &str) -> u64;\n\n}\n\n```\n\n\n\n### Service Compatibility\n\nAlthough it is common to use the same trait for both proxy object and service object, it is possible to import a service into another trait.\n\n\n\nTODO: We have not strictly designed the compatibility model but will be provided in the next version.\n\n\n\nRoughly, in current version, trait `P` is considered to be compatible to be proxy of trait `S`, only if\n\n1. `P` has exactly the same methods as `S` declared in the same order, that differ only in types of parameter and return value.\n\n2. Such different types must be compatible.\n\n3. Types are considered to be compatible if both are serialized and deserialized with exactly the same value.\n\n\n\n`remote-trait-object` always guarantees 3. between [`ServiceToExport`], [`ServiceToImport`] and [`ServiceRef`].\n\n\n\n## Export & Import services\n\nOne of the core features of `remote-trait-object` is its simple and straightforward but extensive export & import of services.\n", "file_path": "remote-trait-object/src/lib.rs", "rank": 68, "score": 53263.7415119216 }, { "content": "#[service]\n\npub trait CreditCard: Service {\n\n fn pay(&mut self, money: u32) -> Result<(), ()>;\n\n}\n\n\n\n/// We use a different format for test\n", "file_path": "remote-trait-object-tests/src/test_store/types.rs", "rank": 69, "score": 53253.34475019649 }, { "content": "#[service(no_skeleton, serde_format = Bincode)]\n\npub trait WeirdSmallStore: Service {\n\n fn order_pizza(&self, menu: Pizza, money: &&&&&&&&&&&&&&u32) -> String;\n\n}\n", "file_path": "remote-trait-object-tests/src/test_store/types.rs", "rank": 70, "score": 52480.12968360749 }, { "content": "pub fn massive_no_export(n: usize) {\n\n fn f(n: usize, store: Box<dyn Store>) {\n\n for _ in 0..n {\n\n assert_eq!(\n\n store.order_pizza(Pizza::Pepperoni, 13),\n\n \"Here's a delicious pepperoni pizza\"\n\n );\n\n }\n\n }\n\n test_runner(|store: Box<dyn Store>| f(n, store));\n\n}\n\n\n", "file_path": "remote-trait-object-tests/src/test_store/man.rs", "rank": 71, "score": 51010.88621656496 }, { "content": "pub fn massive_with_export(n: usize) {\n\n fn f(n: usize, store: Box<dyn Store>) {\n\n for _ in 0..n {\n\n let card = Box::new(MyCreditCard { balance: 13 }) as Box<dyn CreditCard>;\n\n assert_eq!(\n\n store.order_pizza_credit_card(Pizza::Pepperoni, ServiceRef::create_export(card)),\n\n \"Here's a delicious pepperoni pizza\"\n\n );\n\n }\n\n }\n\n test_runner(|store: Box<dyn Store>| f(n, store));\n\n}\n", "file_path": "remote-trait-object-tests/src/test_store/man.rs", "rank": 72, "score": 51010.88621656496 }, { "content": "/// Weak::new() is not implemented for ?Sized.\n\n/// See https://github.com/rust-lang/rust/issues/50513\n\npub fn null_weak_port() -> Weak<dyn Port> {\n\n Weak::<BasicPort>::new() as Weak<dyn Port>\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct BasicPort {\n\n registry: Arc<ServiceForwarder>,\n\n /// client is None only in the drop function.\n\n client: Option<Client>,\n\n /// If this is on, the port will not request delete\n\n /// This is useful when the port-port connection is terminating and you don't really\n\n /// care about the garabage collection.\n\n no_drop: AtomicBool,\n\n}\n\n\n\nimpl Port for BasicPort {\n\n fn call(&self, packet: PacketView) -> Packet {\n\n self.client.as_ref().unwrap().call(packet)\n\n }\n\n\n", "file_path": "remote-trait-object/src/port.rs", "rank": 73, "score": 49616.134823991684 }, { "content": "fn receiver_loop<Forwarder: Forward, Receiver: TransportRecv>(\n\n transport_recv: Receiver,\n\n request_send: Sender<Result<Vec<u8>, TransportError>>,\n\n response_send: Sender<Result<Vec<u8>, TransportError>>,\n\n) {\n\n loop {\n\n let message = match transport_recv.recv(None) {\n\n Err(err) => {\n\n request_send.send(Err(err.clone())).unwrap();\n\n response_send.send(Err(err)).unwrap();\n\n return;\n\n }\n\n Ok(data) => data,\n\n };\n\n\n\n let packet_view = PacketView::new(&message);\n\n trace!(\"Receive message in multiplex {}\", packet_view);\n\n let forward_result = Forwarder::forward(packet_view);\n\n\n\n match forward_result {\n", "file_path": "remote-trait-object/src/transport/multiplex.rs", "rank": 74, "score": 48783.732736859136 }, { "content": "#[cfg(test)]\n\npub fn create_store() -> Box<dyn Store> {\n\n Box::new(MyPizzaStore {\n\n vat: 1,\n\n registered_card: None,\n\n })\n\n}\n", "file_path": "remote-trait-object-tests/src/test_store/store.rs", "rank": 75, "score": 47994.85316120159 }, { "content": "pub fn run_store(transport: (IntraSend, IntraRecv)) {\n\n let (transport_send, transport_recv) = transport;\n\n let rto_context = Context::with_initial_service_export(\n\n Config::default_setup(),\n\n transport_send,\n\n transport_recv,\n\n ServiceToExport::new(Box::new(MyPizzaStore {\n\n vat: 1,\n\n registered_card: None,\n\n }) as Box<dyn Store>),\n\n );\n\n rto_context.wait(None).unwrap();\n\n}\n\n\n", "file_path": "remote-trait-object-tests/src/test_store/store.rs", "rank": 76, "score": 46534.68639319844 }, { "content": "fn create_proxy_a(port: Arc<dyn Port>) -> Arc<dyn A> {\n\n let a: Arc<dyn A> = Arc::new(SimpleA::new());\n\n let handle = port.register_service(a.into_skeleton().raw);\n\n ImportProxy::import_proxy(Arc::downgrade(&port), handle)\n\n}\n\n\n", "file_path": "remote-trait-object/src/tests/complex_trait.rs", "rank": 77, "score": 45909.34153088571 }, { "content": "pub fn path_of_single_ident(ident: syn::Ident) -> syn::Path {\n\n syn::Path {\n\n leading_colon: None,\n\n segments: {\n\n let mut punc = syn::punctuated::Punctuated::new();\n\n punc.push(syn::PathSegment {\n\n ident,\n\n arguments: syn::PathArguments::None,\n\n });\n\n punc\n\n },\n\n }\n\n}\n\n\n", "file_path": "remote-trait-object-macro/src/helper.rs", "rank": 78, "score": 44968.773458739975 }, { "content": "/// In addition, it coverts str->String and [] -> Vec\n\npub fn is_ref(the_type: &syn::Type) -> Result<Option<syn::Type>, String> {\n\n if *the_type\n\n == syn::parse2::<syn::Type>(quote! {\n\n &str\n\n })\n\n .unwrap()\n\n {\n\n return Ok(Some(\n\n syn::parse2::<syn::Type>(quote! {\n\n String\n\n })\n\n .unwrap(),\n\n ));\n\n }\n\n\n\n match the_type {\n\n syn::Type::Reference(x) => {\n\n if x.lifetime.is_some() {\n\n return Err(\"Lifetime exists\".to_owned());\n\n }\n", "file_path": "remote-trait-object-macro/src/helper.rs", "rank": 79, "score": 41724.302747557034 }, { "content": "struct NullServiceImpl;\n\n\n\nimpl NullService for NullServiceImpl {}\n\n\n\nimpl Service for NullServiceImpl {}\n\n\n\n// Contents below are something that would have been generated by macro.\n\n// They are slightly different from the actual expansion result of NullService (which happens to succeed), since the macro\n\n// doesn't take account of such special case.\n\n\n\npub struct NullServiceBoxDispatcher {}\n\nimpl NullServiceBoxDispatcher {\n\n fn new(_object: Box<dyn NullService>) -> Self {\n\n Self {}\n\n }\n\n}\n\nimpl crate::macro_env::Dispatch for NullServiceBoxDispatcher {\n\n fn dispatch_and_call(&self, _method: crate::macro_env::MethodId, _args: &[u8]) -> Vec<u8> {\n\n panic!(\"Invalid remote-trait-object call. Fatal Error.\")\n\n }\n", "file_path": "remote-trait-object/src/service/null.rs", "rank": 80, "score": 40152.30989044177 }, { "content": "pub mod export_import;\n\npub mod handle;\n\npub mod id;\n\nmod null;\n\npub mod serde_support;\n\n\n\nuse crate::forwarder::ServiceObjectId;\n\nuse crate::port::Port;\n\nuse std::sync::Weak;\n\n\n\npub use handle::Handle;\n\npub use null::{create_null_service, NullService};\n\npub type MethodId = u32;\n\n\n\n/// Exporter sides's interface to the service object. This will be implemented\n\n/// by each service trait's unique wrapper in the macro\n", "file_path": "remote-trait-object/src/service.rs", "rank": 81, "score": 36140.74896630648 }, { "content": "}\n\n\n\nimpl Handle {\n\n /// This method is the core of Handle, which serves as a \"call stub\" for the service trait's method.\n\n /// It carries out user's remote call in a generic way.\n\n /// Invoking this method is role of the macro, by putting appropriate instantiation of this generic\n\n /// for each service trait's method, according to the method signature of each.\n\n pub fn call<F: SerdeFormat, S: serde::Serialize, D: serde::de::DeserializeOwned>(\n\n &self,\n\n method: MethodId,\n\n args: &S,\n\n ) -> D {\n\n assert_ne!(\n\n self.id, NULL_ID,\n\n \"You invoked a method of a null proxy object.\"\n\n );\n\n\n\n super::serde_support::port_thread_local::set_port(self.port.clone());\n\n let args = F::to_vec(args).unwrap();\n\n let packet = Packet::new_request(self.id, method, &args);\n", "file_path": "remote-trait-object/src/service/handle.rs", "rank": 82, "score": 34947.305824677715 }, { "content": "use super::*;\n\n\n\n/// NullServive is the only actual service trait that remote-trait-object provides by default.\n\n/// It will be useful when you want to establish a remote-trait-object connection with with_initial_service(),\n\n/// but such initial service is needed by only one side.\n", "file_path": "remote-trait-object/src/service/null.rs", "rank": 83, "score": 34946.230685743154 }, { "content": "use super::MethodId;\n\nuse linkme::distributed_slice;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::{HashMap, HashSet};\n\n\n\npub const ID_ORDERING: std::sync::atomic::Ordering = std::sync::atomic::Ordering::SeqCst;\n\npub type MethodIdAtomic = std::sync::atomic::AtomicU32;\n\n\n\n// linkme crate smartly collects all the registrations generated by the proc-macro\n\n// into a sinlge array in the link time.\n\n// Note that too long linkme-related variable name would cause serious compiler error in MacOS\n\n// So we deliberately make it have a short name\n\n\n\n// Id of methods in services.\n\n// Note that here the two strings mean (trait name, method name)\n\n// Also you can skip calling this, then the method id will be set up for default value\n\n// decided by the order of declaration.\n", "file_path": "remote-trait-object/src/service/id.rs", "rank": 84, "score": 34944.48917259815 }, { "content": "use super::*;\n\nuse crate::forwarder::NULL_ID;\n\nuse crate::packet::Packet;\n\nuse crate::raw_exchange::HandleToExchange;\n\nuse crate::service::{MethodId, SerdeFormat};\n\n\n\n/// Proxy service will carry this.\n\n#[derive(Debug)]\n\npub struct Handle {\n\n pub id: ServiceObjectId,\n\n pub port: Weak<dyn Port>,\n\n}\n\n\n\nimpl Handle {\n\n pub fn new(imported_id: HandleToExchange, port: Weak<dyn Port>) -> Self {\n\n Handle {\n\n id: imported_id.0,\n\n port,\n\n }\n\n }\n", "file_path": "remote-trait-object/src/service/handle.rs", "rank": 85, "score": 34943.80828328084 }, { "content": " }\n\n}\n\npub struct NullServiceRwLockDispatcher {}\n\nimpl NullServiceRwLockDispatcher {\n\n fn new(_object: std::sync::Arc<parking_lot::RwLock<dyn NullService>>) -> Self {\n\n Self {}\n\n }\n\n}\n\nimpl crate::macro_env::Dispatch for NullServiceRwLockDispatcher {\n\n fn dispatch_and_call(&self, _method: crate::macro_env::MethodId, _args: &[u8]) -> Vec<u8> {\n\n panic!(\"Invalid remote-trait-object call. Fatal Error.\")\n\n }\n\n}\n\nimpl crate::macro_env::IntoSkeleton<dyn NullService>\n\n for std::sync::Arc<parking_lot::RwLock<dyn NullService>>\n\n{\n\n fn into_skeleton(self) -> crate::macro_env::Skeleton {\n\n crate::macro_env::create_skeleton(std::sync::Arc::new(NullServiceRwLockDispatcher::new(\n\n self,\n\n )))\n", "file_path": "remote-trait-object/src/service/null.rs", "rank": 86, "score": 34943.8045546085 }, { "content": "}\n\nimpl crate::macro_env::IntoSkeleton<dyn NullService> for Box<dyn NullService> {\n\n fn into_skeleton(self) -> crate::macro_env::Skeleton {\n\n crate::macro_env::create_skeleton(std::sync::Arc::new(NullServiceBoxDispatcher::new(self)))\n\n }\n\n}\n\npub struct NullServiceArcDispatcher {}\n\nimpl NullServiceArcDispatcher {\n\n fn new(_object: std::sync::Arc<dyn NullService>) -> Self {\n\n Self {}\n\n }\n\n}\n\nimpl crate::macro_env::Dispatch for NullServiceArcDispatcher {\n\n fn dispatch_and_call(&self, _method: crate::macro_env::MethodId, _args: &[u8]) -> Vec<u8> {\n\n panic!(\"Invalid remote-trait-object call. Fatal Error.\")\n\n }\n\n}\n\nimpl crate::macro_env::IntoSkeleton<dyn NullService> for std::sync::Arc<dyn NullService> {\n\n fn into_skeleton(self) -> crate::macro_env::Skeleton {\n\n crate::macro_env::create_skeleton(std::sync::Arc::new(NullServiceArcDispatcher::new(self)))\n", "file_path": "remote-trait-object/src/service/null.rs", "rank": 87, "score": 34943.431230278526 }, { "content": " MacroArgs {\n\n serde_format: self.serde_format.unwrap_or_else(|| {\n\n syn::parse2(quote! {remote_trait_object::macro_env::DefaultSerdeFormat}).unwrap()\n\n }),\n\n no_proxy: self.no_proxy.map(|_| true).unwrap_or(false),\n\n no_skeleton: self.no_skeleton.map(|_| true).unwrap_or(false),\n\n }\n\n }\n\n}\n\n\n\nimpl Parse for MacroArgsRaw {\n\n fn parse(input: ParseStream) -> syn::parse::Result<Self> {\n\n let mut result = MacroArgsRaw::default();\n\n let args = Punctuated::<syn::Expr, Token![,]>::parse_terminated(input)?;\n\n for arg in args {\n\n result.update(quote! {#arg})?;\n\n }\n\n Ok(result)\n\n }\n\n}\n\n\n", "file_path": "remote-trait-object-macro/src/service.rs", "rank": 88, "score": 34942.936489269465 }, { "content": " let response = self.port.upgrade().unwrap().call(packet.view());\n\n let result = F::from_slice(response.data()).unwrap();\n\n super::serde_support::port_thread_local::remove_port();\n\n result\n\n }\n\n}\n\n\n\nimpl Drop for Handle {\n\n /// Dropping handle will be signaled to the exporter (_delete request_), so that it can remove the service object as well.\n\n fn drop(&mut self) {\n\n if self.id != NULL_ID {\n\n self.port\n\n .upgrade()\n\n .expect(\"You must drop the proxy object before the RTO context is dropped\")\n\n .delete_request(self.id);\n\n }\n\n }\n\n}\n", "file_path": "remote-trait-object/src/service/handle.rs", "rank": 89, "score": 34941.11368401195 }, { "content": " }\n\n } else {\n\n Err(syn::parse::Error::new_spanned(ts, \"Unsupported argument\"))\n\n };\n\n }\n\n\n\n let arg: SingleArg<TokenStream2> = syn::parse2(ts.clone())?;\n\n if arg.arg_name == quote::format_ident!(\"serde_format\") {\n\n let value = syn::parse2(arg.arg_value)?;\n\n if self.serde_format.replace(value).is_some() {\n\n Err(syn::parse::Error::new_spanned(ts, \"Duplicated arguments\"))\n\n } else {\n\n Ok(())\n\n }\n\n } else {\n\n Err(syn::parse::Error::new_spanned(ts, \"Unsupported argument\"))\n\n }\n\n }\n\n\n\n fn fill_default_values(self) -> MacroArgs {\n", "file_path": "remote-trait-object-macro/src/service.rs", "rank": 90, "score": 34939.54584754339 }, { "content": "use proc_macro2::TokenStream as TokenStream2;\n\nuse syn::parse::{Parse, ParseStream};\n\nuse syn::punctuated::Punctuated;\n\nuse syn::Token;\n\n\n\npub mod dispatcher;\n\npub mod from_skeleton;\n\npub mod id;\n\npub mod proxy;\n\n\n", "file_path": "remote-trait-object-macro/src/service.rs", "rank": 91, "score": 34939.278697463036 }, { "content": " }\n\n}\n\n#[derive(Debug)]\n\npub struct NullServiceProxy {\n\n handle: crate::macro_env::Handle,\n\n}\n\nimpl NullService for NullServiceProxy {}\n\nimpl crate::macro_env::Service for NullServiceProxy {}\n\nimpl crate::macro_env::ImportProxy<dyn NullService> for Box<dyn NullService> {\n\n fn import_proxy(\n\n port: std::sync::Weak<dyn crate::macro_env::Port>,\n\n handle: crate::macro_env::HandleToExchange,\n\n ) -> Self {\n\n Box::new(NullServiceProxy {\n\n handle: crate::macro_env::Handle::new(handle, port),\n\n })\n\n }\n\n}\n\nimpl crate::macro_env::ImportProxy<dyn NullService> for std::sync::Arc<dyn NullService> {\n\n fn import_proxy(\n", "file_path": "remote-trait-object/src/service/null.rs", "rank": 92, "score": 34939.04210834336 }, { "content": " port: std::sync::Weak<dyn crate::macro_env::Port>,\n\n handle: crate::macro_env::HandleToExchange,\n\n ) -> Self {\n\n std::sync::Arc::new(NullServiceProxy {\n\n handle: crate::macro_env::Handle::new(handle, port),\n\n })\n\n }\n\n}\n\nimpl crate::macro_env::ImportProxy<dyn NullService>\n\n for std::sync::Arc<parking_lot::RwLock<dyn NullService>>\n\n{\n\n fn import_proxy(\n\n port: std::sync::Weak<dyn crate::macro_env::Port>,\n\n handle: crate::macro_env::HandleToExchange,\n\n ) -> Self {\n\n std::sync::Arc::new(parking_lot::RwLock::new(NullServiceProxy {\n\n handle: crate::macro_env::Handle::new(handle, port),\n\n }))\n\n }\n\n}\n", "file_path": "remote-trait-object/src/service/null.rs", "rank": 93, "score": 34938.15987081291 }, { "content": "struct Steinway;\n\nimpl Service for Steinway {}\n\nimpl Piano for Steinway {\n\n fn play(&mut self) {\n\n println!(\"Do Re Mi\");\n\n }\n\n}\n\n```\n\n**/\n", "file_path": "remote-trait-object/src/service.rs", "rank": 94, "score": 34935.27605280238 }, { "content": " \"Service trait must have only methods\",\n\n )\n\n .to_compile_error())\n\n }\n\n };\n\n let id_ident = super::id::id_method_ident(source_trait, method);\n\n\n\n let mut the_method = syn::parse_str::<syn::ImplItemMethod>(\"fn dummy() -> () {}\").unwrap();\n\n the_method.sig = method.sig.clone();\n\n let mut arguments_in_tuple = syn::ExprTuple {\n\n attrs: Vec::new(),\n\n paren_token: syn::token::Paren(Span::call_site()),\n\n elems: syn::punctuated::Punctuated::new(),\n\n };\n\n for arg in &method.sig.inputs {\n\n match arg {\n\n syn::FnArg::Receiver(_) => continue, // &self\n\n syn::FnArg::Typed(pattern) => {\n\n if let syn::Pat::Ident(the_arg) = &*pattern.pat {\n\n arguments_in_tuple\n", "file_path": "remote-trait-object-macro/src/service/proxy.rs", "rank": 95, "score": 24.940169260733153 }, { "content": " \"Service trait must have only methods\",\n\n )\n\n .to_compile_error())\n\n }\n\n };\n\n let id_ident = super::id::id_method_ident(source_trait, method);\n\n\n\n let mut the_method = syn::parse_str::<syn::ImplItemMethod>(\"fn dummy() -> () {}\").unwrap();\n\n the_method.sig = method.sig.clone();\n\n let mut arguments_in_tuple = syn::ExprTuple {\n\n attrs: Vec::new(),\n\n paren_token: syn::token::Paren(Span::call_site()),\n\n elems: syn::punctuated::Punctuated::new(),\n\n };\n\n for arg in &method.sig.inputs {\n\n match arg {\n\n syn::FnArg::Receiver(_) => continue, // &self\n\n syn::FnArg::Typed(pattern) => {\n\n if let syn::Pat::Ident(the_arg) = &*pattern.pat {\n\n arguments_in_tuple\n", "file_path": "remote-trait-object-macro/src/service/from_skeleton.rs", "rank": 96, "score": 24.940169260733157 }, { "content": " #[doc(hidden)]\n\n /// This type is generated by the remote-trait-object macro.\n\n /// It should never be used directly by you, so please ignore it.\n\n pub struct #struct_ident {\n\n handle: #env_path::Handle\n\n }\n\n };\n\n let mut imported_struct_impl = syn::parse2::<syn::ItemImpl>(quote! {\n\n impl #trait_ident for #struct_ident {\n\n }\n\n })\n\n .unwrap();\n\n let serde_format = &args.serde_format;\n\n\n\n for item in source_trait.items.iter() {\n\n let method = match item {\n\n syn::TraitItem::Method(x) => x,\n\n non_method => {\n\n return Err(syn::Error::new_spanned(\n\n non_method,\n", "file_path": "remote-trait-object-macro/src/service/proxy.rs", "rank": 98, "score": 18.951041368919842 }, { "content": " #[doc(hidden)]\n\n /// This type is generated by the remote-trait-object macro.\n\n /// It should never be used directly by you, so please ignore it.\n\n pub struct #struct_ident {\n\n skeleton: #env_path::Skeleton\n\n }\n\n };\n\n let mut imported_struct_impl = syn::parse2::<syn::ItemImpl>(quote! {\n\n impl #trait_ident for #struct_ident {\n\n }\n\n })\n\n .unwrap();\n\n let serde_format = &args.serde_format;\n\n\n\n for item in source_trait.items.iter() {\n\n let method = match item {\n\n syn::TraitItem::Method(x) => x,\n\n non_method => {\n\n return Err(syn::Error::new_spanned(\n\n non_method,\n", "file_path": "remote-trait-object-macro/src/service/from_skeleton.rs", "rank": 99, "score": 18.951041368919842 } ]
Rust
src/response.rs
ghairfield/TinyHTTP
9559caa86bb4afb75e6ca2d5a912469b6f881650
use chrono::{DateTime, Local, TimeZone, Utc}; use std::collections::HashMap; use std::fs::{File, Metadata}; use std::io::prelude::*; use std::path::Path; use std::time::SystemTime; use crate::configuration::CONFIG; use crate::protocol::*; use crate::request; #[derive(Debug, Clone, PartialEq)] pub struct ResponseError { pub message: String, pub line: u32, pub column: u32, } pub struct Response { pub status: StatusCode, pub version: RequestVersion, pub fields: HashMap<String, String>, pub content: Vec<u8>, } impl Default for Response { fn default() -> Self { Response { status: StatusCode::Unknown, version: RequestVersion::HTTP1, fields: HashMap::<String, String>::new(), content: Vec::<u8>::new(), } } } impl Response { pub fn new(h: &request::Header) -> Self { let mut response = Response::default(); if !h.is_valid() { response.status = StatusCode::BadRequest; return response; } let m = h.get_method(); match m { RequestMethod::Get => response.get_request(&h), RequestMethod::Head => response.head_request(&h), RequestMethod::Post => response.post_request(&h), _ => response.unsupported_request(&h), } response } pub fn respond(&mut self) -> Vec<u8> { let mut resp_header; let mut r; if self.status == StatusCode::BadRequest || self.status == StatusCode::Unauthorized || self.status == StatusCode::Forbidden || self.status == StatusCode::NotFound { r = format! { "{} {}\r\n\r\n", version_to_string(&self.version), status_to_string(&self.status) }; resp_header = r.as_bytes().to_vec(); } else { r = format!( "{} {}\r\n", version_to_string(&self.version), status_to_string(&self.status) ); for (key, value) in &self.fields { r.push_str(&format!("{}{}\r\n", key, value)); } r.push_str("\r\n"); resp_header = r.as_bytes().to_vec(); if !self.content.is_empty() { resp_header.append(&mut self.content); } } resp_header } fn get_last_modified(meta: &Metadata) -> Result<String, ResponseError> { let lm = match meta.modified() { Ok(lm) => lm, Err(_) => { return Err(ResponseError { message: "Could not get modifed data on file".to_string(), line: line!(), column: column!(), }) } }; let sec_from_epoch = lm.duration_since(SystemTime::UNIX_EPOCH).unwrap(); let local_dt = Local.timestamp(sec_from_epoch.as_secs() as i64, 0); let utc_dt: DateTime<Utc> = DateTime::from(local_dt); Ok(format!("{}", utc_dt.format("%a, %d %b %Y %H:%M:%S GMT"))) } fn get_resource(&mut self, p: &str) -> Result<(), ResponseError> { let doc_root = &CONFIG.doc_root; let index = match &CONFIG.root_file { Some(x) => &x, None => match &CONFIG.default_root_file { Some(x) => x, _ => { return Err(ResponseError { message: "Could not find a default root file!".to_string(), line: line!(), column: column!(), }) } }, }; if p == "/" { let mut file = match File::open(&format!("{}/{}", doc_root, index)) { Ok(file) => file, Err(x) => { return Err(ResponseError { message: format!("Could not open file! {}", x), line: line!(), column: column!(), }) } }; let meta = match file.metadata() { Ok(meta) => meta, Err(_) => { return Err(ResponseError { message: "Could not get meta data on file".to_string(), line: line!(), column: column!(), }) } }; if let Ok(time) = Response::get_last_modified(&meta) { self.fields .insert(field_to_string(&RequestField::LastModified), time); } match file.read_to_end(&mut self.content) { Ok(size) => { self.fields.insert( field_to_string(&RequestField::ContentLength), size.to_string(), ); } Err(x) => { return Err(ResponseError { message: format!("Could not read file! {}", x), line: line!(), column: column!(), }) } }; } else { let p = format!("{}{}", CONFIG.doc_root, p); let path = Path::new(&p); println!("Path: {:?}", p); if path.exists() { let mut file = match File::open(path) { Ok(file) => file, Err(x) => { return Err(ResponseError { message: format!("Could not open file! {}", x), line: line!(), column: column!(), }) } }; let meta = match file.metadata() { Ok(meta) => meta, Err(_) => { return Err(ResponseError { message: "Could not get meta data on file".to_string(), line: line!(), column: column!(), }) } }; if let Ok(time) = Response::get_last_modified(&meta) { self.fields .insert(field_to_string(&RequestField::LastModified), time); } match file.read_to_end(&mut self.content) { Ok(size) => { self.fields.insert( field_to_string(&RequestField::ContentLength), size.to_string(), ); } Err(x) => { return Err(ResponseError { message: format!("Could not read file! {}", x), line: line!(), column: column!(), }) } } } } Ok(()) } fn get_request(&mut self, req: &request::Header) { match self.get_resource(req.get_path()) { Ok(_) => self.status = StatusCode::OK, Err(_) => { self.status = StatusCode::NotFound; } } } fn head_request(&mut self, req: &request::Header) { match self.get_resource(req.get_path()) { Ok(_) => self.status = StatusCode::OK, Err(_) => { self.status = StatusCode::NotFound; } } self.content.clear(); } fn post_request(&mut self, _req: &request::Header) { todo!() } fn unsupported_request(&mut self, _req: &request::Header) { todo!() } }
use chrono::{DateTime, Local, TimeZone, Utc}; use std::collections::HashMap; use std::fs::{File, Metadata}; use std::io::prelude::*; use std::path::Path; use std::time::SystemTime; use crate::configuration::CONFIG; use crate::protocol::*; use crate::request; #[derive(Debug, Clone, PartialEq)] pub struct ResponseError { pub message: String, pub line: u32, pub column: u32, } pub struct Response { pub status: StatusCode, pub version: RequestVersion, pub fields: HashMap<String, String>, pub content: Vec<u8>, } impl Default for Response { fn default() -> Self { Response { status: StatusCode::Unknown, version: RequestVersion::HTTP1, fields: HashMap::<String, String>::new(), content: Vec::<u8>::new(), } } } impl Response { pub fn new(h: &request::Header) -> Self { let mut response = Response::default(); if !h.is_valid() { response.status = StatusCode::BadRequest; return response; } let m = h.get_method(); match m { RequestMethod::Get => response.get_request(&h), RequestMethod::Head => response.head_request(&h), RequestMethod::Post => response.post_request(&h), _ => response.unsupported_request(&h), } response } pub fn respond(&mut self) -> Vec<u8> { let mut resp_header; let mut r; if self.status == StatusCode::BadRequest || self.status == StatusCode::Unauthorized || self.status == StatusCode::Forbidden || self.status == StatusCode::NotFound { r = format! { "{} {}\r\n\r\n", version_to_string(&self.version), status_to_string(&self.status) }; resp_header = r.as_bytes().to_vec(); } else { r = format!( "{} {}\r\n", version_to_string(&self.version), status_to_string(&self.status) ); for (key, value) in &self.fields { r.push_str(&format!("{}{}\r\n", key, value)); } r.push_str("\r\n"); resp_header = r.as_bytes().to_vec(); if !self.content.is_empty() { resp_header.append(&mut self.content); } } resp_header } fn get_last_modified(meta: &Metadata) -> Result<String, ResponseError> { let lm = match meta.modified() { Ok(lm) => lm, Err(_) => { return Err(ResponseError { message: "Could not get modifed data on file".to_string(), line: line!(), column: column!(), }) } }; let sec_from_epoch = lm.duration_since(SystemTime::UNIX_EPOCH).unwrap(); let local_dt = Local.timestamp(sec_from_epoch.as_secs() as i64, 0); let utc_dt: DateTime<Utc> = DateTime::from(local_dt); Ok(format!("{}", utc_dt.format("%a, %d %b %Y %H:%M:%S GMT"))) } fn get_resource(&mut self, p: &str) -> Result<(), ResponseError> { let doc_root = &CONFIG.doc_root; let index = match &CONFIG.root_file { Some(x) => &x, None => match &CONFIG.default_root_file { Some(x) => x, _ => { return Err(ResponseError { message: "Could not find a default root file!".to_string(), line: line!(), column: column!(), }) } }, }; if p == "/" { let mut file = match File::open(&format!("{}/{}", doc_root, index)) { Ok(file) => file, Err(x) => { return Err(ResponseError { message: format!("Could not open file! {}", x), line: line!(), column: column!(), }) } }; let meta = match file.metadata() { Ok(meta) => meta, Err(_) => { return Err(ResponseError { message: "Could not get meta data on file".to_string(), line: line!(), column: column!(), }) } }; if let Ok(time) = Response::get_last_modified(&meta) { self.fields .insert(field_to_string(&RequestField::LastModified), time); } match file.read_to_end(&mut self.content) { Ok(size) => { self.fields.insert( field_to_string(&RequestField::ContentLength), size.to_string(), ); } Err(x) => { return Err(ResponseError { message: format!("Could not read file! {}", x), line: line!(), column: column!(), }) } }; } else { let p = format!("{}{}", CONFIG.doc_root, p); let path = Path::new(&p); println!("Path: {:?}", p); if path.exists() { let mut file = match File::open(path) { Ok(file) => file, Err(x) => { return Err(ResponseError { message: format!("Could not open file! {}", x), line: line!(), column: column!(), }) } }; let meta = match file.metadata() { Ok(meta) => meta, Err(_) => { return Err(ResponseError { message: "Could not get meta data on file".to_string(), line: line!(), column: column!(), }) } }; if let Ok(time) = Response::get_last_modified(&meta) { self.fields .insert(field_to_string(&RequestField::LastModified), time); } match file.read_to_end(&mut self.content) { Ok(size) => { self.fields.insert( field_to_string(&RequestField::ContentLength), size.to_string(), ); } Err(x) => { return Err(ResponseError { message: format!("Could not read file! {}", x), line: line!(), column: column!(), }) } } } } Ok(()) } fn get_request(&mut self, req: &request::Header) { match self.get_resource(req.get_path()) { Ok(_) => self.status = StatusCode::OK, Err(_) => { self.status = StatusCode::NotFound; } } } fn head_request(&mut self, req: &request::Header) {
fn post_request(&mut self, _req: &request::Header) { todo!() } fn unsupported_request(&mut self, _req: &request::Header) { todo!() } }
match self.get_resource(req.get_path()) { Ok(_) => self.status = StatusCode::OK, Err(_) => { self.status = StatusCode::NotFound; } } self.content.clear(); }
function_block-function_prefix_line
[ { "content": "/// Get the string representation of a HTTP version\n\npub fn version_to_string(r: &RequestVersion) -> String {\n\n match r {\n\n RequestVersion::SimpleRequest => \"Simple Request\".to_string(),\n\n RequestVersion::HTTP1 => \"HTTP/1.0\".to_string(),\n\n RequestVersion::HTTP11 => \"HTTP/1.1\".to_string(),\n\n RequestVersion::Unknown => \"Unknown\".to_string(),\n\n }\n\n}\n\n\n", "file_path": "src/protocol.rs", "rank": 0, "score": 104449.14936664322 }, { "content": "/// Get the HTTP field type as a string\n\npub fn field_to_string(r: &RequestField) -> String {\n\n match r {\n\n RequestField::Allow => \"Allow: \".to_string(),\n\n RequestField::Authorization => \"Authorization: \".to_string(),\n\n RequestField::ContentEncoding => \"Content-Encoding: \".to_string(),\n\n RequestField::ContentLength => \"Content-Length: \".to_string(),\n\n RequestField::ContentType => \"Content-Type: \".to_string(),\n\n RequestField::Date => \"Date: \".to_string(),\n\n RequestField::Expires => \"Expires: \".to_string(),\n\n RequestField::FromField => \"From: \".to_string(),\n\n RequestField::IfModifiedSince => \"If-Modified-Since: \".to_string(),\n\n RequestField::LastModified => \"Last-Modified: \".to_string(),\n\n RequestField::Location => \"Location: \".to_string(),\n\n RequestField::Pragma => \"Pragma: \".to_string(),\n\n RequestField::Referer => \"Refer: \".to_string(),\n\n RequestField::Server => \"Server: \".to_string(),\n\n RequestField::UserAgent => \"User-Agent: \".to_string(),\n\n RequestField::WwwAuthenticate => \"WWW-Authenticate: \".to_string(),\n\n RequestField::Unknown => \"Unknown: \".to_string(),\n\n }\n\n}\n", "file_path": "src/protocol.rs", "rank": 1, "score": 104449.07159060675 }, { "content": "/// Get the string representation of a request type.\n\npub fn method_to_string(r: &RequestMethod) -> String {\n\n match r {\n\n RequestMethod::Get => \"GET\".to_string(),\n\n RequestMethod::Head => \"HEAD\".to_string(),\n\n RequestMethod::Post => \"POST\".to_string(),\n\n RequestMethod::Put => \"PUT\".to_string(),\n\n RequestMethod::Link => \"LINK\".to_string(),\n\n RequestMethod::Unlink => \"UNLINK\".to_string(),\n\n RequestMethod::Delete => \"DELETE\".to_string(),\n\n RequestMethod::Unknown => \"Unknown\".to_string(),\n\n }\n\n}\n\n\n", "file_path": "src/protocol.rs", "rank": 2, "score": 81851.38640430899 }, { "content": "/// Main entry point of the server. All configuration is done via `Config.toml`\n\n/// located in the root directory.\n\npub fn tiny_http() -> Result<()> {\n\n //println!(\"Config is: {:?}\", *CONFIG);\n\n\n\n match listen() {\n\n Ok(_) => Ok(()),\n\n Err(_) => Err(TinyHttpError),\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 76586.33715109073 }, { "content": "pub fn status_to_string(s: &StatusCode) -> String {\n\n match s {\n\n StatusCode::OK => \"200 OK\".to_string(),\n\n StatusCode::Created => \"201 Created\".to_string(),\n\n StatusCode::Accepted => \"202 Accepted\".to_string(),\n\n StatusCode::NoContent => \"204 No Content\".to_string(),\n\n StatusCode::MovedPermanently => \"301 Moved Permanently\".to_string(),\n\n StatusCode::MovedTemporarily => \"202 Moved Temporarily\".to_string(),\n\n StatusCode::NotModified => \"304 Not Modified\".to_string(),\n\n StatusCode::BadRequest => \"400 Bad Request\".to_string(),\n\n StatusCode::Unauthorized => \"401 Unauthorized\".to_string(),\n\n StatusCode::Forbidden => \"403 Forbidden\".to_string(),\n\n StatusCode::NotFound => \"404 Not Found\".to_string(),\n\n StatusCode::InternalServerError => \"500 Internal Server Error\".to_string(),\n\n StatusCode::NotImplemented => \"501 Not Implemented\".to_string(),\n\n StatusCode::BadGateway => \"502 Bad Gateway\".to_string(),\n\n StatusCode::ServiceUnavailable => \"503 Service Unavailable\".to_string(),\n\n _ => \"Unknown\".to_string(),\n\n }\n\n}\n\n\n", "file_path": "src/protocol.rs", "rank": 4, "score": 73495.84191015222 }, { "content": "// Listen for incomming connections from a client. Once a connection is\n\n// established a new thread (per connection).\n\nfn listen() -> Result<()> {\n\n let listen = TcpListener::bind(format!(\"{}:{}\", CONFIG.host, CONFIG.port)).unwrap();\n\n\n\n // TODO\n\n // listen.set_ttl(X)\n\n // listen.set_nonblocking(true).expect(\"Cannot set non-blocking\")\n\n\n\n for stream in listen.incoming() {\n\n match stream {\n\n Ok(stream) => {\n\n thread::spawn(move || {\n\n new_connection(stream);\n\n });\n\n }\n\n Err(e) => {\n\n // TODO log error\n\n println!(\"Error connecting to client! {}\", e);\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 56494.962293875855 }, { "content": "fn new_connection(mut conn: TcpStream) {\n\n println!(\"New connection from {}\", conn.peer_addr().unwrap());\n\n\n\n let mut buf = [0_u8; 2048];\n\n let result = conn.read(&mut buf);\n\n\n\n match result {\n\n Ok(_size) => {\n\n // TODO Who knows if we are going to need size yet??\n\n let header = Header::new(&buf);\n\n let mut res = Response::new(&header);\n\n if let Some(x) = CONFIG.print_header_information {\n\n if x {\n\n header.print();\n\n }\n\n }\n\n let r = res.respond();\n\n conn.write_all(&r).unwrap();\n\n conn.flush().unwrap();\n\n println!(\"----Responded----\");\n", "file_path": "src/lib.rs", "rank": 6, "score": 46812.33961379113 }, { "content": "type Result<T> = std::result::Result<T, TinyHttpError>;\n\n\n\npub struct TinyHttpError;\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 37163.465222334264 }, { "content": "fn main() {\n\n match tiny_http::tiny_http() {\n\n Ok(_) => (),\n\n Err(_) => panic!(\"An error occured in the server!\"),\n\n }\n\n}\n", "file_path": "examples/main.rs", "rank": 8, "score": 28030.788149148102 }, { "content": "\n\n // If we get here the request is valid\n\n header.valid = true;\n\n header\n\n }\n\n\n\n /// Print the contents of the header field\n\n /// in plain text to stdout. Used for development\n\n pub fn print(&self) {\n\n let method = protocol::method_to_string(&self.method);\n\n let version = protocol::version_to_string(&self.version);\n\n\n\n println!(\n\n \"Request Line: {}, Path: {}, Version {}\",\n\n method, self.path, version\n\n );\n\n\n\n println!(\"---- Known Fields ----\");\n\n for (key, value) in &self.fields {\n\n let k = protocol::field_to_string(key);\n", "file_path": "src/request.rs", "rank": 15, "score": 21.614877348404175 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Config {\n\n fn new() -> Self {\n\n let mut config = Config::default();\n\n\n\n let mut file = match File::open(\"Config.toml\") {\n\n Ok(file) => file,\n\n Err(_) => return config,\n\n };\n\n\n\n let mut c = String::new();\n\n file.read_to_string(&mut c).unwrap();\n\n\n\n config = toml::from_str(&c).unwrap();\n\n\n\n config\n\n }\n\n}\n", "file_path": "src/configuration.rs", "rank": 17, "score": 20.747372597395245 }, { "content": "/// This is the simple request, other fields and definitions are\n\n/// optional and augment the request.\n\n#[derive(Debug, Clone)]\n\npub struct ParsingError {\n\n pub message: String,\n\n pub line: u32,\n\n pub column: u32,\n\n}\n\n\n\n/// When a request is initiated, the contents of\n\n/// that request are stored here.\n\n///\n\n/// If `valid` is false, then there is no guarantee\n\n/// what else is valid in the header. Some, all or\n\n/// none of the fields may be completed, but they\n\n/// are guaranteed to be initialized.\n\n///\n\n/// Also if `request_version` is RequestVersion::SimpleRequest\n\n/// then the only attributes here that will of been implemented\n\n/// are `method`, `version` and `path`.\n", "file_path": "src/request.rs", "rank": 19, "score": 20.59270117263395 }, { "content": " pub root_file: Option<String>,\n\n pub max_buffer: Option<usize>,\n\n pub custom_404: Option<String>,\n\n pub print_header_information: Option<bool>,\n\n}\n\n\n\n/// Default program options\n\nimpl Default for Config {\n\n fn default() -> Self {\n\n Config {\n\n host: \"127.0.0.1\".to_string(),\n\n port: 8080,\n\n doc_root: \"http\".to_string(),\n\n image_list: Vec::<String>::new(),\n\n file_list: Vec::<String>::new(),\n\n default_root_file: Some(\"index.html\".to_string()),\n\n root_file: None,\n\n max_buffer: Some(2048),\n\n custom_404: None,\n\n print_header_information: Some(false),\n", "file_path": "src/configuration.rs", "rank": 20, "score": 19.667348734006456 }, { "content": "\n\n// Create a empty header\n\nimpl Default for Header {\n\n fn default() -> Self {\n\n Header {\n\n valid: false,\n\n method: protocol::RequestMethod::Unknown,\n\n version: protocol::RequestVersion::Unknown,\n\n path: String::new(),\n\n fields: HashMap::new(),\n\n unknown_fields: HashMap::new(),\n\n post_fields: HashMap::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl Header {\n\n pub fn new(buf: &[u8]) -> Self {\n\n let request = str::from_utf8(&buf).unwrap().to_string();\n\n let mut header = Header::default();\n", "file_path": "src/request.rs", "rank": 21, "score": 18.753664377292985 }, { "content": "### What didn't work (or how I could of planned better)\n\n\n\nI started out working with file as strings using the `read_to_string` method.\n\nInitially this is how I brought together all of the headers and content, a \n\nlarge string that I then coverted to `u8` by using `as_btyes`. It wasn't until\n\na few days before the assignment was due that I figured out it would not work\n\non photos and such. I ended up changing things to `u8` and is something I will\n\nremember anytime I have to work with non-text files.\n\n\n\nAfter our third assignment I realized I didn't know enough about lifetimes to\n\nincorporate them into my project. One area I wish I could expand on would be\n\nthe file system. As it currently stands, each request TinyHTTP querys the file\n\nsystem for the file and read it. Obviously this is poor design and I would of \n\nliked to incorporate a cache for the files. Maybe this is something I will \n\ntackle over the break.\n\n\n\nOne of Rust's infurating issues is time. I've never used a programming language\n\nthat how no convinent way to work with time as Rust. I find this dissapointing\n\nespecially since Rust sells itself as a systems language. Some of the issues \n\nwere that time in HTTP/0.9 uses RFC1123 date and time where HTTP/1.1 uses \n\nRFC2282. \n\n\n\nAnother place I think I went wrong is the error handling. At first I created an\n\nerror `struct` for each part (request, response, lib) but that was a mistake. \n\nIn hindsight I think I would of created one for the entire crate. It made \n\npassing errors a little overly-robust. \n\n\n", "file_path": "README.md", "rank": 26, "score": 12.994180160365289 }, { "content": "use ::std::io::prelude::*;\n\nuse lazy_static::lazy_static;\n\nuse serde::Deserialize;\n\nuse std::fs::File;\n\n\n\nlazy_static! {\n\n // Global configuration variable.\n\n pub static ref CONFIG: Config = Config::new();\n\n}\n\n\n\n/// Configure options. These are pulled from Config.toml\n\n/// in the root directory.\n\n#[derive(Deserialize, Debug)]\n\npub struct Config {\n\n pub host: String,\n\n pub port: u16,\n\n pub doc_root: String,\n\n pub image_list: Vec<String>,\n\n pub file_list: Vec<String>,\n\n pub default_root_file: Option<String>,\n", "file_path": "src/configuration.rs", "rank": 27, "score": 12.738772775928801 }, { "content": " /// Get the path of the request\n\n pub fn get_path(&self) -> &str {\n\n &self.path\n\n }\n\n\n\n /// Get the method of the request\n\n pub fn get_method(&self) -> protocol::RequestMethod {\n\n self.method\n\n }\n\n\n\n /*\n\n * I'm not sure what the best way to deal with this is.\n\n *\n\n * Really this function is a `todo!()` function since right now TinyHTTP\n\n * doesn't look at the request header fields.\n\n *\n\n * In the future this is planned to be used.\n\n *\n\n * Get a request header field's value.\n\n #[allow(dead_code)]\n", "file_path": "src/request.rs", "rank": 28, "score": 12.564838042546041 }, { "content": "/// A request of this type would be a HTTP/0.9 request.\n\n#[derive(Debug)]\n\npub struct Header {\n\n /// Is request in valid format?\n\n valid: bool,\n\n /// Request type e.g GET, HEAD, POST\n\n method: protocol::RequestMethod,\n\n /// Request version e.g SimpleRequest, HTTP/1.0, HTTP/1.1\n\n version: protocol::RequestVersion,\n\n /// URI\n\n path: String,\n\n /// HTTP/1.0 Known fields\n\n fields: HashMap<protocol::RequestField, String>,\n\n /// Possible fields from HTTP/1.1 request, non-documented fileds.\n\n /// See [RFC 1945, Section 10. Header Field Definitions]\n\n unknown_fields: HashMap<String, String>,\n\n /// POST fields. It should only be used with a POST request\n\n /// See [RFC 1945 Secion 8.3 POST]\n\n post_fields: HashMap<String, String>,\n\n}\n", "file_path": "src/request.rs", "rank": 29, "score": 12.479757560537392 }, { "content": " println!(\"Field: {} -- Value: {}\", k, value);\n\n }\n\n println!(\"---- Unknown Fields ----\");\n\n for (key, value) in &self.unknown_fields {\n\n println!(\"Field: {} -- Value: {}\", key, value);\n\n }\n\n if self.method == protocol::RequestMethod::Post {\n\n println!(\"---- POST Fields ----\");\n\n for (key, value) in &self.post_fields {\n\n println!(\"Name: {} -- Value: {}\", key, value);\n\n }\n\n }\n\n }\n\n\n\n /// Get the validity of the request. If this returns false, then all\n\n /// other header fields *might be* invalid.\n\n pub fn is_valid(&self) -> bool {\n\n self.valid\n\n }\n\n\n", "file_path": "src/request.rs", "rank": 30, "score": 12.421676706627313 }, { "content": "### HTTP/1.0\n\n\n\n- [X] Accept a connection\n\n- [X] Respond to a request\n\n- [X] Close the connection (each time, ignores `Keep-alive`)\n\n- [X] Method Definitions\n\n - [X] GET\n\n - [X] HEAD\n\n - [X] POST\n\n- [ ] Additional Request Methods (extended HTTP/1.0)\n\n - [ ] PUT\n\n - [ ] DELETE\n\n - [ ] LINK\n\n - [ ] UNLINK\n\n- [ ] Status Codes\n\n - [ ] Informational 1xx\n\n - [X] Successful 2xx\n\n - [ ] Redirection 3xx\n\n - [X] Client Error 4xx\n\n - [X] Server Error 5xx\n\n- [ ] Header Fields\n\n - [ ] Allow\n\n - [ ] Authorization\n\n - [ ] Content-Encoding\n\n - [X] Content-Length\n\n - [ ] Content-Type\n\n - [ ] Date\n\n - [ ] Expires\n\n - [ ] From\n\n - [ ] If-Modified-Since\n\n - [X] Last-Modified\n\n - [ ] Location\n\n - [ ] Pragma\n\n - [X] Referer\n\n - [ ] Server\n\n - [X] User-Agent (recorded)\n\n - [ ] WWW-Authenticate\n\n- [ ] Additional Header Field Definitions (extended HTTP/1.0)\n\n - [ ] Accept\n\n - [ ] Accept-Charset\n\n - [ ] Accept-Encoding\n\n - [ ] Accept-Language\n\n - [ ] Content-Language\n\n - [ ] Link\n\n - [ ] MIME-Version\n\n - [ ] Retry-After\n\n - [ ] Title\n\n - [ ] URI\n\n\n\n# Learning\n\n\n\n### What worked\n\n\n\nWorking with TCP in Rust is supprising easy on a basic level. There were a few\n\ntimes I refactored the code which made the project more readable, but I think\n\noverall the quality of programming is low. Given more time this wouldn't be an\n\nissue.\n\n\n", "file_path": "README.md", "rank": 32, "score": 10.864250834202483 }, { "content": " pub fn get_header_field(&self, r: protocol::RequestField) -> Option<&str> {\n\n self.fields.get(&r).map(|x| &x[..])\n\n }\n\n */\n\n\n\n // According to RFC1945 any unrecognized header fields are to\n\n // be treated as `Entity-Header` fields. Also the spec allows\n\n // for experimental headers as long as both parties in\n\n // communication recognize them.\n\n //\n\n // What ever the field is, we store it. Unknown fields are\n\n // stored separately than known fields.\n\n fn parse_fields(&mut self, parts: &[&str]) {\n\n for i in parts {\n\n let x: Vec<&str> = i.split(\": \").collect();\n\n\n\n if x.len() == 1 {\n\n // Is there a better way to deal with this??\n\n // We could be here for 2 reasons:\n\n // 1: This is a POST request with fields, which we capture.\n", "file_path": "src/request.rs", "rank": 33, "score": 9.85483338110783 }, { "content": " // extended HTTP/1.0\n\n // exclusive HTTP/1.1\n\n // others\n\n Unknown,\n\n}\n\n\n\n/// Response codes for HTML/1.0\n\n#[allow(dead_code)]\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]\n\npub enum StatusCode {\n\n OK = 200,\n\n Created = 201,\n\n Accepted = 202,\n\n NoContent = 204,\n\n MovedPermanently = 301,\n\n MovedTemporarily = 302,\n\n NotModified = 304,\n\n BadRequest = 400,\n\n Unauthorized = 401,\n\n Forbidden = 403,\n\n NotFound = 404,\n\n InternalServerError = 500,\n\n NotImplemented = 501,\n\n BadGateway = 502,\n\n ServiceUnavailable = 503,\n\n Unknown,\n\n}\n\n\n\n/// Get the string representation of a request type.\n", "file_path": "src/protocol.rs", "rank": 35, "score": 9.409212685965294 }, { "content": " }\n\n }\n\n }\n\n\n\n // Convert a request field to a known type.\n\n fn field_to_type(f: &str) -> protocol::RequestField {\n\n match f {\n\n \"Allow\" => protocol::RequestField::Allow,\n\n \"Authorization\" => protocol::RequestField::Authorization,\n\n \"Content-Encoding\" => protocol::RequestField::ContentEncoding,\n\n \"Content-Length\" => protocol::RequestField::ContentLength,\n\n \"Content-Type\" => protocol::RequestField::ContentType,\n\n \"Date\" => protocol::RequestField::Date,\n\n \"Expires\" => protocol::RequestField::Expires,\n\n \"From\" => protocol::RequestField::FromField,\n\n \"If-Modified-Since\" => protocol::RequestField::IfModifiedSince,\n\n \"Last-Modified\" => protocol::RequestField::LastModified,\n\n \"Location\" => protocol::RequestField::Location,\n\n \"Pragma\" => protocol::RequestField::Pragma,\n\n \"Referer\" => protocol::RequestField::Referer,\n\n \"Server\" => protocol::RequestField::Server,\n\n \"User-Agent\" => protocol::RequestField::UserAgent,\n\n \"WWW-Authenticate\" => protocol::RequestField::WwwAuthenticate,\n\n _ => protocol::RequestField::Unknown,\n\n }\n\n }\n\n} // impl Header\n", "file_path": "src/request.rs", "rank": 36, "score": 9.003456515757213 }, { "content": " \"UNLINK\" => header.method = protocol::RequestMethod::Unlink,\n\n \"DELETE\" => header.method = protocol::RequestMethod::Delete,\n\n _ => return header,\n\n }\n\n\n\n header.path = method[1].to_string();\n\n\n\n if method.len() == 2 {\n\n header.version = protocol::RequestVersion::SimpleRequest;\n\n } else if method[2] == \"HTTP/1.0\" {\n\n header.version = protocol::RequestVersion::HTTP1;\n\n } else if method[2] == \"HTTP/1.1\" {\n\n header.version = protocol::RequestVersion::HTTP11;\n\n } else {\n\n return header;\n\n }\n\n\n\n // Remove the method line since we parsed it already\n\n parts.remove(0);\n\n header.parse_fields(&parts);\n", "file_path": "src/request.rs", "rank": 37, "score": 8.96852123316868 }, { "content": "# TinyHTTP\n\n\n\nCS410P Spring 2021 Tiny Rust HTTP/1.0 Server in Rust.\n\n\n\nGreg Hairfield - CS410P - Spring 2021\n\n\n\nA quasi [HTTP/1.0](https://www.ietf.org/rfc/rfc1945.txt) server written in\n\nRust. \n\n\n\n# Description\n\n\n\nOriginally this was to be a HTTP/0.9 complient server and moved on to create\n\na HTTP/1.0 quasi complient server. A HTTP/0.9 server doesn't really \n\nhave much to it other than a HTML file server and I was looking for more of\n\na challenge. Making the HTTP/1.0 server might of been more than I could chew\n\nat the end of the quarter. While the server does work with `GET`, `POST` and\n\n`HEAD` request (which is all HTTP/1.0 requires) I didn't extend any of the\n\nother request methods. \n\n\n\nThis project is obviously a toy and should never be used in the real world.\n\nIt is meant to be a fun project to learn Rust and get a better understanding\n\nof HTTP and how it works. \n\n\n\n# Usage\n\n\n\nSee the `examples` folder for usage. All configuration is done by the \n\n`Config.toml` file, read the description of options. Basically one needs to \n\n`use tiny_http;` and call `tiny_http::tiny_http()` to run the server.\n\n\n\nThe included `http` folder is for example use. \n\n\n\n# TODO\n\n\n\n### HTTP/0.9\n\n\n\n- [X] Accept a connection\n\n- [X] Respond to a request\n\n- [X] Close the connection\n\n- [X] Method Definitions\n\n - [X] GET\n\n- [X] Fetches resource \n\n\n", "file_path": "README.md", "rank": 38, "score": 8.759418718528757 }, { "content": "/// authentication scheme.\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]\n\npub enum RequestField {\n\n /// HTTP/1.0\n\n Allow,\n\n Authorization,\n\n ContentEncoding,\n\n ContentLength,\n\n ContentType,\n\n Date,\n\n Expires,\n\n FromField,\n\n IfModifiedSince,\n\n LastModified,\n\n Location,\n\n Pragma,\n\n Referer,\n\n Server,\n\n UserAgent,\n\n WwwAuthenticate,\n", "file_path": "src/protocol.rs", "rank": 39, "score": 8.6914847967289 }, { "content": "//! HTTP Request Handler\n\n//!\n\n//! HTTP request module. Parses raw byte reads from the TCP stream\n\n//! and can answer questions about the request. Please see RFC-1945\n\n//! for more information.\n\n//!\n\n//! Greg Hairfield\n\n//! CS410P Rust Programming\n\n//! Spring 2021\n\n//!\n\n\n\nuse std::collections::HashMap;\n\nuse std::str;\n\n\n\nuse crate::protocol;\n\n\n\n/// The standard error that the request parser will produce if there\n\n/// is any problem parsing the request. For the most part, if the\n\n/// request line is bad, then the entire request is bad. A basic\n\n/// request *must* contain: GET /CLRF\n", "file_path": "src/request.rs", "rank": 40, "score": 8.486565527539058 }, { "content": " // 2. An invalid `field: parameter` with no parameter (invalid request)\n\n let field: Vec<&str> = x[0].split('&').collect();\n\n if !field.is_empty() {\n\n for i in &field {\n\n let j: Vec<&str> = i.split('=').collect();\n\n if j.len() == 2 {\n\n // POST field!\n\n self.post_fields.insert(j[0].to_string(), j[1].to_string());\n\n }\n\n }\n\n }\n\n continue;\n\n }\n\n\n\n let field = Header::field_to_type(x[0]);\n\n if field == protocol::RequestField::Unknown {\n\n self.unknown_fields\n\n .insert(x[0].to_string(), x[1].to_string());\n\n } else {\n\n self.fields.insert(field, x[1].to_string());\n", "file_path": "src/request.rs", "rank": 41, "score": 7.603846929370883 }, { "content": "//! HTTP Protocols\n\n//!\n\n//! Greg Hairfield\n\n//! CS410P Rust Programming\n\n//! Spring 2021\n\n\n\n/// HTTP request method.\n\n/// Get: Standard request, supply the resource requested\n\n/// Head: Same as GET but do not send content\n\n/// Post: Client is responding with information\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]\n\npub enum RequestMethod {\n\n // HTTP/1.0 methods\n\n Get,\n\n Head,\n\n Post,\n\n // extended HTTP/1.0 methods\n\n Put,\n\n Delete,\n\n Link,\n", "file_path": "src/protocol.rs", "rank": 42, "score": 7.3059119610530345 }, { "content": "\n\n // A simple request is defined as\n\n // |GET /CRLF|\n\n // Anything less than this is not a valid request\n\n if request.len() < 7 {\n\n return header;\n\n }\n\n\n\n let mut parts: Vec<&str> = request.split(\"\\r\\n\").collect();\n\n let method: Vec<&str> = parts[0].split(' ').collect();\n\n if method.len() < 2 {\n\n return header;\n\n }\n\n\n\n match method[0] {\n\n \"GET\" => header.method = protocol::RequestMethod::Get,\n\n \"HEAD\" => header.method = protocol::RequestMethod::Head,\n\n \"POST\" => header.method = protocol::RequestMethod::Post,\n\n \"PUT\" => header.method = protocol::RequestMethod::Put,\n\n \"LINK\" => header.method = protocol::RequestMethod::Link,\n", "file_path": "src/request.rs", "rank": 43, "score": 7.126390352086553 }, { "content": "/// From: An e-mail address of the requesting user agent. Usually used\n\n/// for robots and is meant for logging\n\n/// If-Modified-Since: Used with GET method, if the requested resource\n\n/// has not been modified since the time specified, return 304,\n\n/// Not-Modified.\n\n/// Last-Modified: The date and time at which the sender believes the\n\n/// resource was last modified.\n\n/// Location: Identifies the exact location of a resource that was\n\n/// identified by the requested URI. For 3xx responses the\n\n/// server must indicate the preferred URL\n\n/// Pragma: Implementation specific directives that may apply to any\n\n/// recipient along the request/response chain.\n\n/// Referer: Allows the client to specify the URI of the resource from\n\n/// which the request was obtained.\n\n/// Server: Contains information about the software used by the origin\n\n/// server to handle the request.\n\n/// User-Agent: Request field containing information about the user\n\n/// agent originating the request.\n\n/// WWW-Authenticate: Used with 401 Unauthorized response message. The\n\n/// field consists of at least one challenge that indicates the\n", "file_path": "src/protocol.rs", "rank": 44, "score": 6.449137915437587 }, { "content": "/// of the fields follow. See [RFC 1945](https://www.ietf.org/rfc/rfc1945.txt)\n\n/// for more information.\n\n/// Accept: Lists the set of methods supported by the URI. Ignored of\n\n/// part of a POST request\n\n/// Authorization: A user agent that wishes to authenticate with the\n\n/// server. Mostly happens after a 401 response.\n\n/// Should be in the form: `\"Authorization\" : credentials`\n\n/// Content-Encoding: Indicates what additional content coding has\n\n/// been applied to the resource.\n\n/// Content-Length: The size of the `Entity-Body` less the header\n\n/// information. The size of the payload. In the case of a\n\n/// HEAD request, it represents \"what would of been sent\"\n\n/// Content-Type: The media type of the `Entity-Body`. In the case of\n\n/// HEAD request, it represents \"what would of been sent\"\n\n/// Date: The date and time the message originated. See\n\n/// [RFC 1123](https://datatracker.ietf.org/doc/html/rfc1123)\n\n/// [RFC 822](https://datatracker.ietf.org/doc/html/rfc822)\n\n/// for valid date and time formats\n\n/// Expires: The date/time when the entity is stale, or the resource\n\n/// is no longer considered valid.\n", "file_path": "src/protocol.rs", "rank": 45, "score": 5.634568277552102 }, { "content": " Unlink,\n\n // exclusive HTTP/1.1 methods\n\n // others\n\n Unknown,\n\n}\n\n\n\n/// HTTP request version. It comes as either a simple request, HTTP/1.0 or\n\n/// HTTP/1.1. Anything else is invalid.\n\n///\n\n/// Currently TinyHTTP communicates according to the HTTP/1.0 specification.\n\n/// The biggest upgrade to HTTP/1.1 is HTTPS and additional header fields.\n\n#[derive(Debug, PartialEq, Eq, Hash, Clone, Copy)]\n\npub enum RequestVersion {\n\n SimpleRequest, // Essentially HTTP/0.9\n\n HTTP1, // HTTP/1.0\n\n HTTP11, // HTTP/1.1\n\n Unknown,\n\n}\n\n\n\n/// The types of header field definitions we accept. A simple explanation\n", "file_path": "src/protocol.rs", "rank": 46, "score": 5.261278058252593 }, { "content": "//! Todo Move this to lib.rs\n\n//!\n\n//! Greg Hairfield\n\n//! CS410P Rust Programming\n\n//! Spring 2021\n\n\n\nmod configuration;\n\nmod protocol;\n\nmod request;\n\nmod response;\n\n\n\nuse crate::configuration::CONFIG;\n\nuse crate::request::Header;\n\nuse crate::response::Response;\n\nuse std::io::{Read, Write};\n\nuse std::net::{Shutdown, TcpListener, TcpStream};\n\nuse std::thread;\n\n\n", "file_path": "src/lib.rs", "rank": 47, "score": 4.765030652173545 }, { "content": "### What I wish I could of implemented\n\n\n\nAs I said above, a cache would of been a great improvment especially with as \n\nfew files that are currently included. I would of also liked to have better \n\nthread control, such as implementing `Keep-Alive`. Another one I would of\n\nliked to implement is `If-Modified-Since` as that would ease server load.\n\n\n\nMany of the fields from the client are recorded in a `HashMap` so they are \n\navailable to the `response` but their functionality is missing. For example \n\n`Referer` and `User-Agent` are just informal.\n\n\n\nI also wanted to implement a thread pool for the project. Currently it spawns\n\na thread per request. Being a toy implementation is is ok, but for any serious\n\nproject a thread pool would be needed. \n\n\n\n# License\n\n\n\nThis project is licensed under the [MIT license](LICENSE). A copy should be\n\nsupplied with the code. \n", "file_path": "README.md", "rank": 48, "score": 4.1449190557837055 }, { "content": "//! Example how to use TinyHTTP\n\n//!\n\n//! Greg Hairfield\n\n//! CS410P Rust Programming\n\n//! Spring 2021\n\n\n\nuse tiny_http;\n\n\n", "file_path": "examples/main.rs", "rank": 49, "score": 2.8606125013938395 }, { "content": " }\n\n Err(e) => {\n\n println!(\n\n \"An error occured while reading the stream! ip: {}, err: {}\",\n\n conn.peer_addr().unwrap(),\n\n e\n\n );\n\n conn.shutdown(Shutdown::Both).unwrap();\n\n }\n\n };\n\n}\n", "file_path": "src/lib.rs", "rank": 50, "score": 2.3214138994350404 } ]
Rust
cocoon-core/src/message/mod.rs
d42ejh/ilnyaplus-dev
c5f2d04b3d1c6a1b2d6089e583b470779c9b4278
use crate::constant; use bytecheck::CheckBytes; use rkyv::{ ser::{serializers::AllocSerializer, Serializer}, Archive, Deserialize, Infallible, Serialize, }; use std::net::SocketAddr; use tracing::{event, Level}; #[derive(Debug, PartialEq, Eq, FromPrimitive)] pub enum MessageType { PingRequest = 1, FindNodeRequest = 2, FindValueRequest = 3, StoreValueRequest = 4, PingResponse = 5, FindNodeResponse = 6, FindValueResponse = 7, } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct MessageHeader { pub message_type: u32, } impl MessageHeader { pub fn new(message_type: MessageType) -> Self { MessageHeader { message_type: message_type as u32, } } pub fn from_bytes(bytes: &[u8]) -> Self { assert!(bytes.len() >= constant::MESSAGE_HEADER_SIZE); let archived = rkyv::check_archived_root::<Self>(&bytes[0..constant::MESSAGE_HEADER_SIZE]).unwrap(); let header: Self = archived.deserialize(&mut Infallible).unwrap(); header } pub fn to_bytes(&self) -> Vec<u8> { let mut serializer = AllocSerializer::<256>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); let av = serializer.into_serializer().into_inner(); assert_eq!(av.len(), constant::MESSAGE_HEADER_SIZE); av.to_vec() } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct PingRequestMessage {} impl PingRequestMessage { pub fn new() -> Self { PingRequestMessage {} } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::PingRequest); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<32>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct FindNodeRequestMessage { pub key: Vec<u8>, } impl FindNodeRequestMessage { pub fn new(key: &[u8]) -> Self { FindNodeRequestMessage { key: key.to_vec() } } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::FindNodeRequest); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct FindValueRequestMessage { pub key: Vec<u8>, } impl FindValueRequestMessage { pub fn new(key: &[u8]) -> Self { debug_assert!(key.len() != 0); FindValueRequestMessage { key: key.to_owned(), } } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::FindValueRequest); let mut bytes = header.to_bytes(); println!("find val header {} bytes", bytes.len()); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); let body_bytes = serializer.into_serializer().into_inner(); println!("find val body {} bytes", body_bytes.len()); bytes.extend_from_slice(&body_bytes); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct StoreValueRequestMessage { pub key: Vec<u8>, pub data: Vec<u8>, pub replication_level: u32, } impl StoreValueRequestMessage { pub fn new(key: &[u8], data: &[u8], replication_level: u32) -> Self { StoreValueRequestMessage { key: key.to_vec(), data: data.to_vec(), replication_level: replication_level, } } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::StoreValueRequest); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct PingResponseMessage {} impl PingResponseMessage { pub fn new() -> Self { PingResponseMessage {} } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::PingResponse); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct FindNodeResponseMessage { pub nodes: Vec<SocketAddr>, } impl FindNodeResponseMessage { pub fn new(addrs: &[SocketAddr]) -> Self { FindNodeResponseMessage { nodes: addrs.to_vec(), } } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::FindNodeResponse); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct FindValueResponseMessage { pub key: Vec<u8>, pub node: Option<SocketAddr>, pub data: Option<Vec<u8>>, } impl FindValueResponseMessage { pub fn new(key: &[u8], node: Option<&SocketAddr>, data: Option<&[u8]>) -> Self { assert!(!(node.is_none() && data.is_none())); assert!(!(node.is_some() && data.is_some())); if node.is_some() && data.is_none() { FindValueResponseMessage { key: key.to_vec(), node: Some(*node.unwrap()), data: None, } } else { FindValueResponseMessage { key: key.to_vec(), node: None, data: Some(data.unwrap().to_vec()), } } } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::FindValueResponse); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[cfg(test)] mod tests { use super::constant::MESSAGE_HEADER_SIZE; use super::{FindNodeRequestMessage, MessageHeader, MessageType, PingRequestMessage}; use crate::message::{FindValueRequestMessage, PingResponseMessage, StoreValueRequestMessage}; use openssl::rand::rand_bytes; #[test] pub fn header() -> anyhow::Result<()> { let h = MessageHeader::new(MessageType::PingRequest); assert_eq!(h.message_type, MessageType::PingRequest as u32); let bytes = h.to_bytes(); assert_eq!(bytes.len(), MESSAGE_HEADER_SIZE); let hh = MessageHeader::from_bytes(&bytes); assert_eq!(h, hh); Ok(()) } #[test] pub fn ping_request() -> anyhow::Result<()> { let header = MessageHeader::new(MessageType::PingRequest); let req = PingRequestMessage::new(); let bytes = req.to_bytes(); let (h, r) = PingRequestMessage::from_bytes(&bytes); assert_eq!(h, header); assert_eq!(r, req); Ok(()) } #[test] pub fn find_node_request() -> anyhow::Result<()> { let header = MessageHeader::new(MessageType::FindNodeRequest); let mut key = vec![0; 64]; rand_bytes(&mut key)?; let req = FindNodeRequestMessage::new(&key); assert_eq!(key, req.key); let bytes = req.to_bytes(); let (h, r) = FindNodeRequestMessage::from_bytes(&bytes); assert_eq!(h, header); assert_eq!(r, req); Ok(()) } #[test] pub fn find_value_request() -> anyhow::Result<()> { let header = MessageHeader::new(MessageType::FindValueRequest); let mut key = vec![0; 64]; rand_bytes(&mut key)?; let req = FindValueRequestMessage::new(&key); assert_eq!(key, req.key); let bytes = req.to_bytes(); let (h, r) = FindValueRequestMessage::from_bytes(&bytes); assert_eq!(h, header); assert_eq!(r, req); Ok(()) } #[test] pub fn store_value_request() -> anyhow::Result<()> { let header = MessageHeader::new(MessageType::StoreValueRequest); let mut key = vec![0; 64]; let mut data = vec![0; 64]; rand_bytes(&mut key)?; rand_bytes(&mut data)?; let rep_level = 99; let req = StoreValueRequestMessage::new(&key, &data, rep_level); assert_eq!(key, req.key); assert_eq!(data, req.data); assert_eq!(rep_level, req.replication_level); let bytes = req.to_bytes(); let (h, r) = StoreValueRequestMessage::from_bytes(&bytes); assert_eq!(h, header); assert_eq!(r, req); Ok(()) } #[test] pub fn ping_response() -> anyhow::Result<()> { let header = MessageHeader::new(MessageType::PingResponse); let req = PingResponseMessage::new(); let bytes = req.to_bytes(); let (h, r) = PingResponseMessage::from_bytes(&bytes); assert_eq!(h, header); assert_eq!(r, req); Ok(()) } }
use crate::constant; use bytecheck::CheckBytes; use rkyv::{ ser::{serializers::AllocSerializer, Serializer}, Archive, Deserialize, Infallible, Serialize, }; use std::net::SocketAddr; use tracing::{event, Level}; #[derive(Debug, PartialEq, Eq, FromPrimitive)] pub enum MessageType { PingRequest = 1, FindNodeRequest = 2, FindValueRequest = 3, StoreValueRequest = 4, PingResponse = 5, FindNodeResponse = 6, FindValueResponse = 7, } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct MessageHeader { pub message_type: u32, } impl MessageHeader { pub fn new(message_type: MessageType) -> Self { MessageHeader { message_type: message_type as u32, } } pub fn from_bytes(bytes: &[u8]) -> Self { assert!(bytes.len() >= constant::MESSAGE_HEADER_SIZE); let archived = rkyv::check_archived_root::<Self>(&bytes[0..constant::MESSAGE_HEADER_SIZE]).unwrap(); let header: Self = archived.deserialize(&mut Infallible).unwrap(); header } pub fn to_bytes(&self) -> Vec<u8> { let mut serializer = AllocSerializer::<256>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); let av = serializer.into_serializer().into_inner(); assert_eq!(av.len(), constant::MESSAGE_HEADER_SIZE); av.to_vec() } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct PingRequestMessage {} impl PingRequestMessage { pub fn new() -> Self { PingRequestMessage {} } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::PingRequest); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<32>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct FindNodeRequestMessage { pub key: Vec<u8>, } impl FindNodeRequestMessage { pub fn new(key: &[u8]) -> Self { FindNodeRequestMessage { key: key.to_vec() } } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::FindNodeRequest); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct FindValueRequestMessage { pub key: Vec<u8>, } impl FindValueRequestMessage { pub fn new(key: &[u8]) -> Self { debug_assert!(key.len() != 0); FindValueRequestMessage { key: key.to_owned(), } } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::FindValueRequest); let mut bytes = header.to_bytes(); println!("find val header {} bytes", bytes.len()); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); let body_bytes = serializer.into_serializer().into_inner(); println!("find val body {} bytes", body_bytes.len()); bytes.extend_from_slice(&body_bytes); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct StoreValueRequestMessage { pub key: Vec<u8>, pub data: Vec<u8>, pub replication_level: u32, } impl StoreValueRequestMessage { pub fn new(key: &[u8], data: &[u8], replication_level: u32) -> Self { StoreValueRequestMessage { key: key.to_vec(), data: data.to_vec(), replication_level: replication_level, } } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::StoreValueRequest); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct PingResponseMessage {} impl PingResponseMessage { pub fn new() -> Self { PingResponseMessage {} } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::PingResponse); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct FindNodeResponseMessage { pub nodes: Vec<SocketAddr>, } impl FindNodeResponseMessage { pub fn new(addrs: &[SocketAddr]) -> Self { FindNodeResponseMessage { nodes: addrs.to_vec(), } } pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::FindNodeResponse); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[derive(Archive, Deserialize, Serialize, Debug, PartialEq)] #[archive_attr(derive(CheckBytes, Debug))] pub struct FindValueResponseMessage { pub key: Vec<u8>, pub node: Option<SocketAddr>, pub data: Option<Vec<u8>>, } impl FindValueResponseMessage { pub fn new(key: &[u8], node: Option<&SocketAddr>, data: Option<&[u8]>) -> Self { assert!(!(node.is_none() && data.is_none())); assert!(!(node.is_some() && data.is_some()));
} pub fn from_bytes(bytes: &[u8]) -> (MessageHeader, Self) { let header = MessageHeader::from_bytes(bytes); let archived = rkyv::check_archived_root::<Self>(&bytes[constant::MESSAGE_HEADER_SIZE..]).unwrap(); let msg: Self = archived.deserialize(&mut Infallible).unwrap(); (header, msg) } pub fn to_bytes(&self) -> Vec<u8> { let header = MessageHeader::new(MessageType::FindValueResponse); let mut bytes = header.to_bytes(); let mut serializer = AllocSerializer::<512>::default(); serializer .serialize_value(self) .expect("Failed to serialize a message"); bytes.extend_from_slice(&serializer.into_serializer().into_inner()); bytes } } #[cfg(test)] mod tests { use super::constant::MESSAGE_HEADER_SIZE; use super::{FindNodeRequestMessage, MessageHeader, MessageType, PingRequestMessage}; use crate::message::{FindValueRequestMessage, PingResponseMessage, StoreValueRequestMessage}; use openssl::rand::rand_bytes; #[test] pub fn header() -> anyhow::Result<()> { let h = MessageHeader::new(MessageType::PingRequest); assert_eq!(h.message_type, MessageType::PingRequest as u32); let bytes = h.to_bytes(); assert_eq!(bytes.len(), MESSAGE_HEADER_SIZE); let hh = MessageHeader::from_bytes(&bytes); assert_eq!(h, hh); Ok(()) } #[test] pub fn ping_request() -> anyhow::Result<()> { let header = MessageHeader::new(MessageType::PingRequest); let req = PingRequestMessage::new(); let bytes = req.to_bytes(); let (h, r) = PingRequestMessage::from_bytes(&bytes); assert_eq!(h, header); assert_eq!(r, req); Ok(()) } #[test] pub fn find_node_request() -> anyhow::Result<()> { let header = MessageHeader::new(MessageType::FindNodeRequest); let mut key = vec![0; 64]; rand_bytes(&mut key)?; let req = FindNodeRequestMessage::new(&key); assert_eq!(key, req.key); let bytes = req.to_bytes(); let (h, r) = FindNodeRequestMessage::from_bytes(&bytes); assert_eq!(h, header); assert_eq!(r, req); Ok(()) } #[test] pub fn find_value_request() -> anyhow::Result<()> { let header = MessageHeader::new(MessageType::FindValueRequest); let mut key = vec![0; 64]; rand_bytes(&mut key)?; let req = FindValueRequestMessage::new(&key); assert_eq!(key, req.key); let bytes = req.to_bytes(); let (h, r) = FindValueRequestMessage::from_bytes(&bytes); assert_eq!(h, header); assert_eq!(r, req); Ok(()) } #[test] pub fn store_value_request() -> anyhow::Result<()> { let header = MessageHeader::new(MessageType::StoreValueRequest); let mut key = vec![0; 64]; let mut data = vec![0; 64]; rand_bytes(&mut key)?; rand_bytes(&mut data)?; let rep_level = 99; let req = StoreValueRequestMessage::new(&key, &data, rep_level); assert_eq!(key, req.key); assert_eq!(data, req.data); assert_eq!(rep_level, req.replication_level); let bytes = req.to_bytes(); let (h, r) = StoreValueRequestMessage::from_bytes(&bytes); assert_eq!(h, header); assert_eq!(r, req); Ok(()) } #[test] pub fn ping_response() -> anyhow::Result<()> { let header = MessageHeader::new(MessageType::PingResponse); let req = PingResponseMessage::new(); let bytes = req.to_bytes(); let (h, r) = PingResponseMessage::from_bytes(&bytes); assert_eq!(h, header); assert_eq!(r, req); Ok(()) } }
if node.is_some() && data.is_none() { FindValueResponseMessage { key: key.to_vec(), node: Some(*node.unwrap()), data: None, } } else { FindValueResponseMessage { key: key.to_vec(), node: None, data: Some(data.unwrap().to_vec()), } }
if_condition
[]
Rust
src/options.rs
Dentosal/constcodegen
09d61a536d729aeda9639d03a52025b401ab1b53
use std::collections::HashMap; use serde::Deserialize; use crate::constants::Constant; use crate::format_value::*; use crate::template; #[derive(Debug, Deserialize, Default)] #[serde(default, deny_unknown_fields)] pub struct Options { pub codegen: CodegenOptions, lang: HashMap<String, LangOptions>, } impl Options { pub fn languages(&self) -> Vec<(&String, &LangOptions)> { self.lang .iter() .filter(|(ref name, _)| self.codegen.enabled.contains(name)) .collect() } } #[derive(Debug, Deserialize, Default)] #[serde(deny_unknown_fields)] pub struct CodegenOptions { enabled: Vec<String>, #[serde(default)] pub comment_sections: bool, } #[derive(Debug, Deserialize, Default)] #[serde(deny_unknown_fields)] pub struct LangOptions { pub file_ext: String, template: String, #[serde(default)] import: Option<String>, #[serde(default)] comment: Option<String>, #[serde(default)] intro: Option<String>, #[serde(default)] outro: Option<String>, #[serde(default)] format: Format, #[serde(default)] pub formatter: Option<Vec<String>>, #[serde(default, rename = "type")] pub types: HashMap<String, LangTypeOptions>, } impl LangOptions { pub fn format_constant(&self, constant: &Constant) -> Option<String> { let mut t_ctx = HashMap::new(); t_ctx.insert("$name", constant.name.clone()); t_ctx.insert( "$value", constant .type_ .clone() .and_then(|t| self.types.get(&t)) .map(|t_opts| t_opts.format.clone()) .unwrap_or_else(|| self.format.clone()) .format(&constant.value()), ); if template::contains_parameter(&self.template, "$type") { let type_ = constant.type_.clone()?; t_ctx.insert("$type", type_.clone()); if let Some(type_opts) = self.types.get(&type_) { if let Some(type_name) = &type_opts.name { t_ctx.insert("$type", type_name.clone()); } let old_value = t_ctx["$value"].clone(); t_ctx.insert( "$value", format!( "{}{}{}", type_opts.value_prefix, old_value, type_opts.value_suffix ), ); } } Some(template::replace_parameters(&self.template, &t_ctx)) } pub fn format_import(&self, import: &str) -> Option<String> { let mut t_ctx = HashMap::new(); t_ctx.insert("$import", import.to_owned()); let im = self.import.clone()?; Some(template::replace_parameters(&im, &t_ctx)) } pub fn format_comment(&self, comment: &str) -> String { let mut t_ctx = HashMap::new(); t_ctx.insert("$comment", comment.to_owned()); self.comment .clone() .map(|c| format!("{}\n", template::replace_parameters(&c, &t_ctx))) .unwrap_or_else(String::new) } pub fn format_intro(&self) -> String { let t_ctx = HashMap::new(); self.intro .clone() .map(|c| format!("{}\n", template::replace_parameters(&c, &t_ctx))) .unwrap_or_else(String::new) } pub fn format_outro(&self) -> String { let t_ctx = HashMap::new(); self.outro .clone() .map(|c| format!("{}\n", template::replace_parameters(&c, &t_ctx))) .unwrap_or_else(String::new) } pub fn constant_imports(&self, constant: &Constant) -> Vec<String> { if let Some(type_) = constant.type_.clone() { if let Some(type_opts) = self.types.get(&type_) { return type_opts.import.clone(); } } Vec::new() } } #[derive(Debug, Deserialize, Default)] #[serde(default, deny_unknown_fields)] pub struct LangTypeOptions { pub name: Option<String>, pub value_prefix: String, pub value_suffix: String, pub format: Format, pub import: Vec<String>, }
use std::collections::HashMap; use serde::Deserialize; use crate::constants::Constant; use crate::format_value::*; use crate::template; #[derive(Debug, Deserialize, Default)] #[serde(default, deny_unknown_fields)] pub struct Options { pub codegen: CodegenOptions, lang: HashMap<String, LangOptions>, } impl Options { pub fn languages(&self) -> Vec<(&String, &LangOptions)> { self.lang .iter() .filter(|(ref name, _)| self.codegen.enabled.contains(name)) .collect() } } #[derive(Debug, Deserialize, Default)] #[serde(deny_unknown_fields)] pub struct CodegenOptions { enabled: Vec<String>, #[serde(default)] pub comment_sections: bool, } #[derive(Debug, Deserialize, Default)] #[serde(deny_unknown_fields)] pub struct LangOptions { pub file_ext: String, template: String, #[serde(default)] import: Option<String>, #[serde(default)] comment: Option<String>, #[serde(default)] intro: Option<String>, #[serde(default)] outro: Option<String>, #[serde(default)] format: Format, #[serde(default)] pub formatter: Option<Vec<String>>, #[serde(default, rename = "type")] pub types: HashMap<String, LangTypeOptions>, } impl LangOptions { pub fn format_constant(&self, constant: &Constant) -> Option<String> { let mut t_ctx = HashMap::new(); t_ctx.insert("$name", constant.name.clone()); t_ctx.insert( "$value", constant .type_ .clone() .and_then(|t| self.types.get(&t)) .map(|t_opts| t_opts.format.clone()) .unwrap_or_else(|| self.format.clone()) .format(&constant.value()), ); if template::contains_parameter(&self.template, "$type") { let type_ = constant.type_.clone()?; t_ctx.insert("$type", type_.clone()); if let Some(type_opts) = self.types.get(&type_) { if let Some(type_name) = &type_opts.name { t_ctx.insert("$type", type_name.clone()); } let old_value = t_ctx["$value"].clone(); t_ctx.insert( "$value", format!( "{}{}{}", type_opts.value_prefix, old_value, type_opts.value_suffix ), ); } } Some(template::replace_parameters(&self.template, &t_ctx)) } pub fn format_import(&self, import: &str) -> Option<String> { let mut t_ctx = HashMap::new(); t_ctx.insert("$import", import.to_owned()); let im = self.import.clone()?; Some(template::replace_parameters(&im, &t_ctx)) } pub fn format_comment(&self, comment: &str) -> String { let mut t_ctx = HashMap::new(); t_ctx.insert("$comment", comment.to_owned()); self.comment .clone() .map(|c| format!("{}\n", template::replace_parameters(&c, &t_ctx))) .unwrap_or_else(String::new) } pub fn format_intro(&self) -> String { let t_ctx = HashMap::new(); self.intro .clone() .map(|c| format!("{}\n", template::replace_parameters(&c, &t_ctx))) .unwrap_or_else(String::new) } pub fn format_outro(&self) -> Strin
pub fn constant_imports(&self, constant: &Constant) -> Vec<String> { if let Some(type_) = constant.type_.clone() { if let Some(type_opts) = self.types.get(&type_) { return type_opts.import.clone(); } } Vec::new() } } #[derive(Debug, Deserialize, Default)] #[serde(default, deny_unknown_fields)] pub struct LangTypeOptions { pub name: Option<String>, pub value_prefix: String, pub value_suffix: String, pub format: Format, pub import: Vec<String>, }
g { let t_ctx = HashMap::new(); self.outro .clone() .map(|c| format!("{}\n", template::replace_parameters(&c, &t_ctx))) .unwrap_or_else(String::new) }
function_block-function_prefixed
[ { "content": "pub fn contains_parameter(text: &str, parameter: &str) -> bool {\n\n for cap in RE_PARAM.find_iter(text) {\n\n let name = cap.as_str();\n\n if name != \"$$\" && name == parameter {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "src/template.rs", "rank": 0, "score": 146774.85135099525 }, { "content": "pub fn replace_parameters(text: &str, context: &HashMap<&str, String>) -> String {\n\n let mut result = text.to_owned();\n\n for cap in RE_PARAM.find_iter(text) {\n\n let name = cap.as_str();\n\n if name == \"$$\" {\n\n continue;\n\n }\n\n if let Some(value) = context.get(name) {\n\n result = result.replace(name, value);\n\n } else {\n\n panic!(\"Unknown template parameter {:?}\", name);\n\n }\n\n }\n\n result.replace(\"$$\", \"$\")\n\n}\n", "file_path": "src/template.rs", "rank": 1, "score": 145336.2134321063 }, { "content": "fn run_formatter(cmd: &[String], source: &str) -> Result<String, Error> {\n\n use std::io::Write;\n\n use std::process::{Command, Stdio};\n\n\n\n if cmd.is_empty() {\n\n return Err(Error::Formatter(\"Formatter command empty\".to_owned()));\n\n }\n\n\n\n log::info!(\"Running formatter {:?}\", cmd);\n\n let mut p = Command::new(cmd[0].clone())\n\n .args(&cmd[1..])\n\n .stdin(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .spawn()?;\n\n\n\n p.stdin.as_mut().unwrap().write_all(source.as_bytes())?;\n\n let output = p.wait_with_output().expect(\"failed to wait on child\");\n\n\n\n if !output.status.success() {\n\n return Err(Error::Formatter(format!(\n\n \"Formatter returned with non-zero exit code {:?}\",\n\n output.status.code()\n\n )));\n\n }\n\n\n\n Ok(String::from_utf8(output.stdout).expect(\"Non-utf8 output from formatter\"))\n\n}\n", "file_path": "src/main.rs", "rank": 2, "score": 112081.79563614498 }, { "content": "fn int_float_eq(i: i128, f: f64) -> bool {\n\n if f.trunc() == f {\n\n if std::i128::MIN as f64 <= f && f <= std::i128::MAX as f64 {\n\n (f as i128) == i\n\n } else {\n\n false\n\n }\n\n } else {\n\n false\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialOrd, Serialize)]\n\npub enum Primitive {\n\n Boolean(bool),\n\n Integer(i128),\n\n Float(f64),\n\n}\n\nimpl Primitive {\n\n /// Normal equals for other types, but approx for floats\n", "file_path": "src/value.rs", "rank": 3, "score": 78351.48334157091 }, { "content": "pub fn evaluate(text: &str, ctx: &Context, fns: &Functions) -> Result<Primitive, EvalError> {\n\n let expr = parse(scan(text)?)?.resolve_all(ctx)?.call_functions(fns)?;\n\n\n\n if let ExprValue::Primitive(p) = expr.value {\n\n Ok(p)\n\n } else {\n\n unimplemented!(\"{:?}\", expr)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_expr {\n\n use crate::functions::Functions;\n\n use crate::value::{Context, Primitive};\n\n\n\n use super::evaluate;\n\n\n\n macro_rules! approx_eq {\n\n ($v1:expr, $v2:expr) => {{ $v1.approx_eq(&$v2, 0.01) }};\n\n }\n", "file_path": "src/expr.rs", "rank": 4, "score": 72664.89036269842 }, { "content": "type F = fn(Location, Vec<Expr>) -> R;\n\n\n\n#[derive(Debug)]\n\npub struct Functions(HashMap<String, F>);\n\nimpl Functions {\n\n pub fn new() -> Self {\n\n Self(HashMap::new())\n\n }\n\n\n\n pub fn default() -> Self {\n\n let mut result = Self::new();\n\n result.insert(\"not\", f_not);\n\n result.insert(\"and\", f_and);\n\n result.insert(\"or\", f_or);\n\n result.insert(\"add\", f_add);\n\n result.insert(\"mul\", f_mul);\n\n result.insert(\"fract\", f_fract);\n\n result\n\n }\n\n\n", "file_path": "src/functions.rs", "rank": 5, "score": 60570.11307295699 }, { "content": "fn scan(text: &str) -> Result<Vec<Token>, EvalError> {\n\n lazy_static! {\n\n static ref RE_FLT: Regex = Regex::new(r\"^[-+]?[0-9]+\\.[0-9]+([eE][-+]?[0-9]+)?\").unwrap();\n\n static ref RE_INT: Regex = Regex::new(r\"^[-+]?[0-9_]*[0-9]\").unwrap();\n\n static ref RE_RDX: Regex = Regex::new(r\"^0(b|o|x)([0-9a-f_]*[0-9a-f])\").unwrap();\n\n static ref RE_BLN: Regex = Regex::new(r\"^(true|false)\").unwrap();\n\n static ref RE_SYM: Regex = Regex::new(r\"^[a-zA-Z_][a-zA-Z0-9_]*\").unwrap();\n\n }\n\n\n\n let mut result = Vec::new();\n\n let mut offset: usize = 0;\n\n while offset < text.len() {\n\n if let Some(m) = RE_FLT.find(&text[offset..]) {\n\n result.push(Token {\n\n location: Location::new(text, offset, m.as_str().len()),\n\n type_: TokenValue::Literal(Primitive::Float(m.as_str().parse().unwrap())),\n\n });\n\n offset += m.as_str().len();\n\n } else if let Some(cap) = RE_RDX.captures(&text[offset..]) {\n\n let radix = cap.get(1);\n", "file_path": "src/expr.rs", "rank": 6, "score": 55508.88081277585 }, { "content": "use serde::Deserialize;\n\n\n\nuse crate::value::Primitive;\n\n\n\n#[derive(Debug, Clone, Deserialize, Default)]\n\n#[serde(default, deny_unknown_fields)]\n\npub struct Format {\n\n pub boolean: Option<BooleanFormat>,\n\n pub integer: Option<IntegerFormat>,\n\n}\n\nimpl Format {\n\n pub fn format(&self, value: &Primitive) -> String {\n\n (match value {\n\n Primitive::Boolean(v) => self.boolean.clone().map(|b| b.format(*v)),\n\n Primitive::Integer(v) => self.integer.clone().map(|b| b.format(*v)),\n\n _ => None,\n\n })\n\n .unwrap_or_else(|| value.to_string())\n\n }\n\n}\n", "file_path": "src/format_value.rs", "rank": 7, "score": 46362.98814682549 }, { "content": "\n\n#[derive(Debug, Clone, Deserialize)]\n\n#[serde(deny_unknown_fields)]\n\npub struct BooleanFormat {\n\n #[serde(rename = \"true\")]\n\n true_: String,\n\n #[serde(rename = \"false\")]\n\n false_: String,\n\n}\n\nimpl BooleanFormat {\n\n pub fn format(&self, boolean: bool) -> String {\n\n if boolean {\n\n self.true_.clone()\n\n } else {\n\n self.false_.clone()\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize, Default)]\n", "file_path": "src/format_value.rs", "rank": 8, "score": 46361.71103585137 }, { "content": "#[serde(default, deny_unknown_fields)]\n\npub struct IntegerFormat {\n\n radix: Radix,\n\n /// Underscore between every n digits, zero to disable\n\n underscores: u8,\n\n /// Zero pad to n digits, zero to disable\n\n zero_pad: u8,\n\n /// Omit `0x` prefix on non-base 10 numbers\n\n omit_prefix: bool,\n\n}\n\nimpl IntegerFormat {\n\n pub fn format(&self, mut integer: i128) -> String {\n\n let negative: bool = integer < 0;\n\n let radix = self.radix.value();\n\n\n\n integer = integer.abs();\n\n let mut digits: Vec<char> = Vec::new();\n\n while integer > 0 {\n\n let digit = (integer % (radix as i128)) as u32;\n\n digits.push(std::char::from_digit(digit, radix).unwrap());\n", "file_path": "src/format_value.rs", "rank": 9, "score": 46352.76283686624 }, { "content": " integer /= radix as i128;\n\n }\n\n\n\n while digits.len() < (self.zero_pad.max(1) as usize) {\n\n digits.push('0');\n\n }\n\n\n\n let mut result: String = digits.into_iter().rev().collect();\n\n if self.underscores != 0 {\n\n let mut i = result.len();\n\n while i > self.underscores as usize {\n\n i -= self.underscores as usize;\n\n result.insert(i, '_');\n\n }\n\n }\n\n\n\n if !self.omit_prefix {\n\n result = format!(\"{}{}\", self.radix.prefix(), result);\n\n }\n\n\n", "file_path": "src/format_value.rs", "rank": 10, "score": 46349.668561368984 }, { "content": " fn value(self) -> u32 {\n\n match self {\n\n Self::Binary => 2,\n\n Self::Octal => 8,\n\n Self::Decimal => 10,\n\n Self::Hexadecimal => 16,\n\n }\n\n }\n\n\n\n fn prefix(self) -> &'static str {\n\n match self {\n\n Self::Binary => \"0b\",\n\n Self::Octal => \"0o\",\n\n Self::Decimal => \"\",\n\n Self::Hexadecimal => \"0x\",\n\n }\n\n }\n\n}\n\nimpl Default for Radix {\n\n fn default() -> Self {\n", "file_path": "src/format_value.rs", "rank": 11, "score": 46349.55508488049 }, { "content": " if negative {\n\n format!(\"-{}\", result)\n\n } else {\n\n result\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Deserialize)]\n\npub enum Radix {\n\n #[serde(alias = \"bin\")]\n\n Binary,\n\n #[serde(alias = \"oct\")]\n\n Octal,\n\n #[serde(alias = \"dec\")]\n\n Decimal,\n\n #[serde(alias = \"hex\")]\n\n Hexadecimal,\n\n}\n\nimpl Radix {\n", "file_path": "src/format_value.rs", "rank": 12, "score": 46349.437497574014 }, { "content": " Self::Decimal\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test_formatting {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_integer_format_hex() {\n\n let f = IntegerFormat {\n\n radix: Radix::Hexadecimal,\n\n ..Default::default()\n\n };\n\n assert_eq!(f.format(0x0), \"0x0\");\n\n assert_eq!(f.format(0x1234), \"0x1234\");\n\n assert_eq!(f.format(0x1234_5678), \"0x12345678\");\n\n assert_eq!(f.format(0x1234_5678_90ab_cdef), \"0x1234567890abcdef\");\n\n\n\n let f = IntegerFormat {\n", "file_path": "src/format_value.rs", "rank": 13, "score": 46348.120459048645 }, { "content": "\n\n #[test]\n\n fn test_integer_format_bin() {\n\n let f = IntegerFormat {\n\n radix: Radix::Binary,\n\n ..Default::default()\n\n };\n\n assert_eq!(f.format(0b1010), \"0b1010\");\n\n\n\n let f = IntegerFormat {\n\n radix: Radix::Binary,\n\n underscores: 4,\n\n ..Default::default()\n\n };\n\n assert_eq!(f.format(0b1010), \"0b1010\");\n\n assert_eq!(f.format(0b1010_0101), \"0b1010_0101\");\n\n assert_eq!(f.format(0b1111_0000_1100_0011), \"0b1111_0000_1100_0011\");\n\n\n\n let f = IntegerFormat {\n\n radix: Radix::Binary,\n", "file_path": "src/format_value.rs", "rank": 14, "score": 46347.30366188275 }, { "content": " radix: Radix::Hexadecimal,\n\n underscores: 4,\n\n ..Default::default()\n\n };\n\n assert_eq!(f.format(0x1234), \"0x1234\");\n\n assert_eq!(f.format(0x1234_5678), \"0x1234_5678\");\n\n assert_eq!(f.format(0x1234_5678_90ab_cdef), \"0x1234_5678_90ab_cdef\");\n\n assert_eq!(f.format(0x123_4567), \"0x123_4567\");\n\n\n\n let f = IntegerFormat {\n\n radix: Radix::Hexadecimal,\n\n underscores: 4,\n\n zero_pad: 8,\n\n ..Default::default()\n\n };\n\n assert_eq!(f.format(0x0), \"0x0000_0000\");\n\n assert_eq!(f.format(0x1234), \"0x0000_1234\");\n\n assert_eq!(f.format(0x1234_5678), \"0x1234_5678\");\n\n assert_eq!(f.format(0x1234_5678_90ab_cdef), \"0x1234_5678_90ab_cdef\");\n\n }\n", "file_path": "src/format_value.rs", "rank": 15, "score": 46345.40496368955 }, { "content": " underscores: 4,\n\n zero_pad: 8,\n\n ..Default::default()\n\n };\n\n assert_eq!(f.format(0b1010), \"0b0000_1010\");\n\n assert_eq!(f.format(0b1010_0101), \"0b1010_0101\");\n\n assert_eq!(f.format(0b1111_0000_1100_0011), \"0b1111_0000_1100_0011\");\n\n }\n\n}\n", "file_path": "src/format_value.rs", "rank": 16, "score": 46344.330201819 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Token {\n\n location: Location,\n\n type_: TokenValue,\n\n}\n\nimpl Token {\n\n pub fn error_here(&self, message: EvalErrorMessage) -> EvalError {\n\n self.location.error_here(message)\n\n }\n\n}\n\n\n", "file_path": "src/expr.rs", "rank": 17, "score": 44745.447214049025 }, { "content": "type R = Result<Expr, EvalError>;\n", "file_path": "src/functions.rs", "rank": 18, "score": 35916.023187050225 }, { "content": "#[paw::main]\n\nfn main(args: Config) {\n\n pretty_env_logger::init();\n\n if let Err(e) = inner_main(args) {\n\n eprintln!(\"Error: {}\", e);\n\n std::process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 19, "score": 32143.351676773298 }, { "content": "fn inner_main(args: Config) -> Result<(), Error> {\n\n let c = fs::read(args.options_file).unwrap();\n\n let opts: Options = toml::from_slice(&c).unwrap();\n\n\n\n let mut constants = Vec::new();\n\n for p in args.constants_file {\n\n let c = fs::read(p).unwrap();\n\n let t: Constants = toml::from_slice(&c).unwrap();\n\n constants.extend(t.constants);\n\n }\n\n\n\n // Resolve constant values\n\n let mut context: Context = Context::new();\n\n for constant in constants.iter_mut() {\n\n if context.contains_key(&constant.name) {\n\n return Err(Error::DuplicateConstant(constant.name.clone()));\n\n }\n\n constant\n\n .resolve_value(&context)\n\n .map_err(|err| (constant.clone(), err))?;\n", "file_path": "src/main.rs", "rank": 20, "score": 27996.386757968565 }, { "content": "use serde::Deserialize;\n\n\n\nuse crate::expr::{evaluate, EvalError};\n\nuse crate::functions::Functions;\n\nuse crate::value::{Context, Primitive};\n\n\n\n#[derive(Debug, Deserialize, Default)]\n\n#[serde(default, deny_unknown_fields)]\n\npub struct Constants {\n\n #[serde(rename = \"constant\")]\n\n pub constants: Vec<Constant>,\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize, Default)]\n\n#[serde(deny_unknown_fields)]\n\npub struct Constant {\n\n pub name: String,\n\n\n\n #[serde(default, rename = \"type\")]\n\n pub type_: Option<String>,\n", "file_path": "src/constants.rs", "rank": 21, "score": 25610.836058079796 }, { "content": "\n\n #[serde(rename = \"value\")]\n\n value_string: String,\n\n\n\n #[serde(skip)]\n\n resolved_value: Option<Primitive>,\n\n}\n\nimpl Constant {\n\n pub fn value(&self) -> Primitive {\n\n self.resolved_value.clone().expect(\"Value not resolved\")\n\n }\n\n\n\n pub fn resolve_value(&mut self, ctx: &Context) -> Result<(), EvalError> {\n\n self.resolved_value = Some(evaluate(&self.value_string, ctx, &Functions::default())?);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/constants.rs", "rank": 22, "score": 25606.616429769834 }, { "content": "use std::collections::HashMap;\n\n\n\nuse lazy_static::lazy_static;\n\nuse regex::Regex;\n\n\n\nlazy_static! {\n\n static ref RE_PARAM: Regex = Regex::new(r\"\\$(\\$|[a-zA-Z_][a-zA-Z0-9_]*)\").unwrap();\n\n}\n\n\n", "file_path": "src/template.rs", "rank": 23, "score": 25515.63917252144 }, { "content": "/// Parse tokens to either S-expression or atom\n\nfn parse(tokens: Vec<Token>) -> Result<Expr, EvalError> {\n\n if tokens.is_empty() {\n\n Err(EvalError {\n\n location: Location::new(\"\", 0, 0),\n\n message: EvalErrorMessage::EmptyExpression,\n\n })\n\n } else if tokens.len() == 1 {\n\n match tokens[0].type_.clone() {\n\n TokenValue::Literal(val) => Ok(Expr {\n\n location: tokens[0].location.clone(),\n\n value: ExprValue::Primitive(val),\n\n }),\n\n TokenValue::Symbol(sym) => Ok(Expr {\n\n location: tokens[0].location.clone(),\n\n value: ExprValue::Symbol(sym),\n\n }),\n\n TokenValue::ExprOpen => Err(tokens[0].error_here(EvalErrorMessage::UnmatchedOpen)),\n\n TokenValue::ExprClose => Err(tokens[0].error_here(EvalErrorMessage::UnmatchedClose)),\n\n }\n\n } else {\n\n match tokens[0].type_.clone() {\n\n TokenValue::Literal(_) | TokenValue::Symbol(_) => {\n\n Err(tokens[1].error_here(EvalErrorMessage::UnexpectedToken))\n\n },\n\n TokenValue::ExprClose => Err(tokens[0].error_here(EvalErrorMessage::UnmatchedClose)),\n\n TokenValue::ExprOpen => parse_expr(tokens),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/expr.rs", "rank": 24, "score": 25437.991573029758 }, { "content": "/// Parse S-expression\n\nfn parse_expr(tokens: Vec<Token>) -> Result<Expr, EvalError> {\n\n type Level = u32;\n\n\n\n assert!(tokens.len() > 1 && tokens[0].type_ == TokenValue::ExprOpen);\n\n let mut level: Level = 1;\n\n let mut index: usize = 1;\n\n let mut buffer: Vec<(Level, Expr, usize)> = Vec::new();\n\n while index < tokens.len() {\n\n let location = tokens[index].location.clone();\n\n match tokens[index].type_.clone() {\n\n TokenValue::Literal(val) => buffer.push((\n\n level,\n\n Expr {\n\n location,\n\n value: ExprValue::Primitive(val),\n\n },\n\n index,\n\n )),\n\n TokenValue::Symbol(sym) => buffer.push((\n\n level,\n", "file_path": "src/expr.rs", "rank": 34, "score": 24797.896129191497 }, { "content": " Self::Integer(o) => int_float_eq(*o, *s),\n\n Self::Float(o) => s == o,\n\n _ => false,\n\n },\n\n }\n\n }\n\n}\n\nimpl fmt::Display for Primitive {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n write!(f, \"{}\", match self {\n\n Self::Boolean(v) => v.to_string(),\n\n Self::Integer(v) => v.to_string(),\n\n Self::Float(v) => v.to_string(),\n\n })\n\n }\n\n}\n\n\n\npub type Context = HashMap<String, Primitive>;\n", "file_path": "src/value.rs", "rank": 35, "score": 24168.149542272524 }, { "content": " pub fn approx_eq(&self, other: &Self, epsilon: f64) -> bool {\n\n if let Self::Float(f1) = self {\n\n if let Self::Float(f2) = other {\n\n return (f1 - f2).abs() < epsilon;\n\n }\n\n }\n\n self == other\n\n }\n\n\n\n /// Not\n\n pub fn not(&self) -> Result<Primitive, EvalErrorMessage> {\n\n use Primitive::*;\n\n Ok(match self {\n\n Boolean(a) => Boolean(!*a),\n\n a => {\n\n return Err(EvalErrorMessage::InvalidArgument(format!(\n\n \"Cannot (not {:?})\",\n\n a\n\n )));\n\n },\n", "file_path": "src/value.rs", "rank": 36, "score": 24164.33064335676 }, { "content": "use std::collections::HashMap;\n\nuse std::fmt;\n\n\n\nuse serde::Serialize;\n\n\n\nuse crate::expr::EvalErrorMessage;\n\n\n", "file_path": "src/value.rs", "rank": 37, "score": 24164.244741611616 }, { "content": " })\n\n }\n\n\n\n /// And\n\n pub fn and(&self, other: &Self) -> Result<Primitive, EvalErrorMessage> {\n\n use Primitive::*;\n\n Ok(match (self, other) {\n\n (Boolean(a), Boolean(b)) => Boolean(*a && *b),\n\n (a, b) => {\n\n return Err(EvalErrorMessage::InvalidArgument(format!(\n\n \"Cannot (and {:?} {:?})\",\n\n a, b\n\n )));\n\n },\n\n })\n\n }\n\n\n\n /// Or\n\n pub fn or(&self, other: &Self) -> Result<Primitive, EvalErrorMessage> {\n\n use Primitive::*;\n", "file_path": "src/value.rs", "rank": 38, "score": 24163.506221616055 }, { "content": " \"Cannot (mul {:?} {:?})\",\n\n a, b\n\n )));\n\n },\n\n })\n\n }\n\n}\n\nimpl PartialEq for Primitive {\n\n fn eq(&self, other: &Self) -> bool {\n\n match self {\n\n Self::Boolean(s) => match other {\n\n Self::Boolean(o) => s == o,\n\n _ => false,\n\n },\n\n Self::Integer(s) => match other {\n\n Self::Integer(o) => s == o,\n\n Self::Float(o) => int_float_eq(*s, *o),\n\n _ => false,\n\n },\n\n Self::Float(s) => match other {\n", "file_path": "src/value.rs", "rank": 39, "score": 24162.491319652778 }, { "content": " Ok(match (self, other) {\n\n (Boolean(a), Boolean(b)) => Boolean(*a || *b),\n\n (a, b) => {\n\n return Err(EvalErrorMessage::InvalidArgument(format!(\n\n \"Cannot (or {:?} {:?})\",\n\n a, b\n\n )));\n\n },\n\n })\n\n }\n\n\n\n /// Add\n\n pub fn add(&self, other: &Self) -> Result<Primitive, EvalErrorMessage> {\n\n use Primitive::*;\n\n Ok(match (self, other) {\n\n (Integer(a), Integer(b)) => {\n\n Integer(a.checked_add(*b).ok_or(EvalErrorMessage::Overflow)?)\n\n },\n\n (Integer(a), Float(b)) | (Float(b), Integer(a)) => Float(*a as f64 + b),\n\n (Float(a), Float(b)) => Float(a + b),\n", "file_path": "src/value.rs", "rank": 40, "score": 24161.672893637548 }, { "content": " (a, b) => {\n\n return Err(EvalErrorMessage::InvalidArgument(format!(\n\n \"Cannot (add {:?} {:?})\",\n\n a, b\n\n )));\n\n },\n\n })\n\n }\n\n\n\n /// Multiply\n\n pub fn mul(&self, other: &Self) -> Result<Primitive, EvalErrorMessage> {\n\n use Primitive::*;\n\n Ok(match (self, other) {\n\n (Integer(a), Integer(b)) => {\n\n Integer(a.checked_mul(*b).ok_or(EvalErrorMessage::Overflow)?)\n\n },\n\n (Integer(a), Float(b)) | (Float(b), Integer(a)) => Float(*a as f64 * b),\n\n (Float(a), Float(b)) => Float(a + b),\n\n (a, b) => {\n\n return Err(EvalErrorMessage::InvalidArgument(format!(\n", "file_path": "src/value.rs", "rank": 41, "score": 24161.57713642163 }, { "content": "fn f_or(location: Location, args: Vec<Expr>) -> Result<Expr, EvalError> {\n\n check_argc_min!(1; location, args);\n\n let mut acc = Primitive::Boolean(false);\n\n for arg in args.into_iter() {\n\n acc = acc.or(&value!(arg)).map_err(|err| arg.error_here(err))?;\n\n }\n\n Ok(Expr {\n\n location,\n\n value: ExprValue::Primitive(acc),\n\n })\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 42, "score": 23908.625644618027 }, { "content": "fn f_not(location: Location, args: Vec<Expr>) -> Result<Expr, EvalError> {\n\n check_argc_exact!(1; location, args);\n\n let acc = value!(args[0])\n\n .not()\n\n .map_err(|err| args[0].error_here(err))?;\n\n Ok(Expr {\n\n location,\n\n value: ExprValue::Primitive(acc),\n\n })\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 43, "score": 23908.625644618027 }, { "content": "fn f_and(location: Location, args: Vec<Expr>) -> Result<Expr, EvalError> {\n\n check_argc_min!(1; location, args);\n\n let mut acc = Primitive::Boolean(true);\n\n for arg in args.into_iter() {\n\n acc = acc.and(&value!(arg)).map_err(|err| arg.error_here(err))?;\n\n }\n\n Ok(Expr {\n\n location,\n\n value: ExprValue::Primitive(acc),\n\n })\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 44, "score": 23908.625644618027 }, { "content": "fn f_mul(location: Location, args: Vec<Expr>) -> Result<Expr, EvalError> {\n\n check_argc_min!(2; location, args);\n\n let mut acc = value!(args[0]);\n\n for arg in args.into_iter().skip(1) {\n\n acc = acc.mul(&value!(arg)).map_err(|message| EvalError {\n\n location: arg.location,\n\n message,\n\n })?;\n\n }\n\n Ok(Expr {\n\n location,\n\n value: ExprValue::Primitive(acc),\n\n })\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 45, "score": 23316.42510538497 }, { "content": "fn f_fract(location: Location, args: Vec<Expr>) -> Result<Expr, EvalError> {\n\n check_argc_exact!(1; location, args);\n\n if let Primitive::Float(p) = value!(args[0]) {\n\n Ok(Expr {\n\n location,\n\n value: ExprValue::Primitive(Primitive::Float(p.fract())),\n\n })\n\n } else {\n\n Err(args[0].error_here(EvalErrorMessage::InvalidArgument(\n\n \"Only floats have fractional parts\".to_owned(),\n\n )))\n\n }\n\n}\n", "file_path": "src/functions.rs", "rank": 46, "score": 23316.42510538497 }, { "content": "fn f_add(location: Location, args: Vec<Expr>) -> Result<Expr, EvalError> {\n\n check_argc_min!(2; location, args);\n\n let mut acc = value!(args[0]);\n\n for arg in args.into_iter().skip(1) {\n\n acc = acc.add(&value!(arg)).map_err(|message| EvalError {\n\n location: arg.location,\n\n message,\n\n })?;\n\n }\n\n Ok(Expr {\n\n location,\n\n value: ExprValue::Primitive(acc),\n\n })\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 47, "score": 23316.42510538497 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\nenum TokenValue {\n\n Literal(Primitive),\n\n Symbol(String),\n\n ExprOpen,\n\n ExprClose,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Expr {\n\n pub location: Location,\n\n pub value: ExprValue,\n\n}\n\nimpl Expr {\n\n pub fn error_here(&self, message: EvalErrorMessage) -> EvalError {\n\n self.location.error_here(message)\n\n }\n\n\n\n fn resolve_all(self, ctx: &Context) -> Result<Self, EvalError> {\n\n match self.value {\n\n ExprValue::Primitive(_) => Ok(self),\n", "file_path": "src/expr.rs", "rank": 48, "score": 21581.612400284528 }, { "content": " context.insert(constant.name.clone(), constant.value());\n\n }\n\n\n\n // Generate files to memory\n\n let outputs = opts\n\n .languages()\n\n .into_iter()\n\n .map(|(lang_name, lang_opts)| {\n\n log::info!(\"Processing target {}\", lang_name);\n\n let mut buffer = String::new();\n\n\n\n // Imports\n\n if opts.codegen.comment_sections {\n\n buffer.push_str(&lang_opts.format_comment(\"Imports\"));\n\n }\n\n let mut imports: Vec<String> = constants\n\n .iter()\n\n .flat_map(|c| lang_opts.constant_imports(c))\n\n .collect();\n\n imports.sort();\n", "file_path": "src/main.rs", "rank": 49, "score": 22.850097692238837 }, { "content": " imports.dedup();\n\n for import in &imports {\n\n buffer.push_str(&lang_opts.format_import(import).ok_or_else(|| {\n\n Error::ImportsNotSupported {\n\n language: lang_name.to_owned(),\n\n }\n\n })?);\n\n buffer.push('\\n');\n\n }\n\n\n\n // Intro\n\n if opts.codegen.comment_sections {\n\n buffer.push_str(&lang_opts.format_comment(\"Start body block\"));\n\n }\n\n buffer.push_str(&lang_opts.format_intro());\n\n\n\n // Actual constant values\n\n if opts.codegen.comment_sections {\n\n buffer.push_str(&lang_opts.format_comment(\"Constants\"));\n\n }\n", "file_path": "src/main.rs", "rank": 50, "score": 19.131579531866738 }, { "content": "use self::constants::{Constant, Constants};\n\nuse self::expr::EvalError;\n\nuse self::options::Options;\n\nuse self::value::Context;\n\n\n\n#[derive(Debug, StructOpt, Default)]\n\n#[structopt(author, about)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\npub struct Config {\n\n /// Target directory for generated files\n\n #[structopt(short, long, parse(from_os_str))]\n\n pub target_dir: PathBuf,\n\n\n\n /// Do not actually write files\n\n #[structopt(short, long)]\n\n pub dry_run: bool,\n\n\n\n /// Print output filenames to stdout\n\n #[structopt(short, long)]\n\n pub print_files: bool,\n", "file_path": "src/main.rs", "rank": 51, "score": 17.075329460184633 }, { "content": " for constant in &constants {\n\n buffer.push_str(&lang_opts.format_constant(constant).ok_or_else(|| {\n\n Error::TypeRequired {\n\n language: lang_name.to_owned(),\n\n constant: constant.name.to_owned(),\n\n }\n\n })?);\n\n buffer.push('\\n');\n\n }\n\n\n\n // Outro\n\n if opts.codegen.comment_sections {\n\n buffer.push_str(&lang_opts.format_comment(\"End body block\"));\n\n }\n\n buffer.push_str(&lang_opts.format_outro());\n\n\n\n // Run formatter if available\n\n if let Some(f) = &lang_opts.formatter {\n\n buffer = run_formatter(f, &buffer)?;\n\n }\n", "file_path": "src/main.rs", "rank": 52, "score": 16.992526713781604 }, { "content": " ImportsNotSupported { language: String },\n\n TypeRequired { language: String, constant: String },\n\n}\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n match self {\n\n Self::Evaluation(name, error) => write!(f, \"In constant {:?}: {}\", name, error),\n\n Self::DuplicateConstant(name) => write!(f, \"Duplicate constant definition {:?}\", name),\n\n Self::ImportsNotSupported { language } => write!(\n\n f,\n\n \"Language {:?} does not specify import syntax, but it is required\",\n\n language\n\n ),\n\n Self::TypeRequired { language, constant } => write!(\n\n f,\n\n \"Language {:?} requires types, but constant {:?} does not provide one\",\n\n language, constant\n\n ),\n\n _ => write!(f, \"{:?}\", self),\n\n }\n", "file_path": "src/main.rs", "rank": 53, "score": 13.922161870300581 }, { "content": "\n\n /// Target filename stem\n\n #[structopt(short, long, parse(from_os_str), default_value = \"constants\")]\n\n pub stem: OsString,\n\n\n\n /// File specifying generation options\n\n #[structopt(long = \"--options\", parse(from_os_str))]\n\n pub options_file: PathBuf,\n\n\n\n /// File specifying constants\n\n #[structopt(parse(from_os_str))]\n\n pub constants_file: Vec<PathBuf>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Io(io::Error),\n\n Evaluation(String, EvalError),\n\n DuplicateConstant(String),\n\n Formatter(String),\n", "file_path": "src/main.rs", "rank": 54, "score": 13.38099483138404 }, { "content": "use std::fmt;\n\n\n\nuse crate::functions::Functions;\n\nuse crate::value::{Context, Primitive};\n\n\n\nuse lazy_static::lazy_static;\n\nuse regex::Regex;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct Location {\n\n string: String,\n\n start: usize,\n\n len: usize,\n\n}\n\nimpl Location {\n\n pub fn new(s: &str, start: usize, len: usize) -> Self {\n\n Self {\n\n string: s.to_owned(),\n\n start,\n\n len,\n", "file_path": "src/expr.rs", "rank": 55, "score": 12.8468677727158 }, { "content": " pub fn insert(&mut self, key: &str, value: F) {\n\n self.0.insert(key.to_string(), value);\n\n }\n\n\n\n pub fn get(&self, key: &str) -> Option<&F> {\n\n self.0.get(key)\n\n }\n\n}\n\n\n\nmacro_rules! check_argc_exact {\n\n ($c:expr; $location:expr, $args:expr) => {\n\n if $args.len() != $c {\n\n return Err(EvalError {\n\n location: $location,\n\n message: EvalErrorMessage::ArgumentCount,\n\n });\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 56, "score": 10.973947399321055 }, { "content": "#![deny(unused_must_use)]\n\n#![deny(mutable_borrow_reservation_conflict)]\n\n#![allow(clippy::cast_lossless)]\n\n\n\nuse std::ffi::OsString;\n\nuse std::fmt;\n\nuse std::fs;\n\nuse std::io::{self};\n\nuse std::path::PathBuf;\n\n\n\nuse structopt::{self, StructOpt};\n\n\n\nmod constants;\n\nmod expr;\n\nmod format_value;\n\nmod functions;\n\nmod options;\n\nmod template;\n\nmod value;\n\n\n", "file_path": "src/main.rs", "rank": 57, "score": 10.734592615934519 }, { "content": " }\n\n }\n\n pub fn error_here(&self, message: EvalErrorMessage) -> EvalError {\n\n EvalError {\n\n location: self.clone(),\n\n message,\n\n }\n\n }\n\n}\n\nimpl fmt::Display for Location {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n write!(\n\n f,\n\n \" {}\\n {}{}\",\n\n self.string,\n\n \" \".repeat(self.start),\n\n \"^\".repeat(self.len)\n\n )\n\n }\n\n}\n", "file_path": "src/expr.rs", "rank": 58, "score": 10.64447134097308 }, { "content": "\n\n Ok((lang_name, lang_opts, buffer))\n\n })\n\n .collect::<Result<Vec<_>, Error>>()?;\n\n\n\n // Actually write generated files\n\n for (lang_name, lang_opts, buffer) in outputs.into_iter() {\n\n let target_file = args.target_dir.join(format!(\n\n \"{}{}\",\n\n args.stem.to_str().unwrap(),\n\n lang_opts.file_ext\n\n ));\n\n if args.print_files {\n\n println!(\"{}\", target_file.to_str().unwrap());\n\n }\n\n if !args.dry_run {\n\n log::info!(\"Writing {} file: {:?}\", lang_name, target_file);\n\n fs::write(target_file, buffer.as_bytes())?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 59, "score": 9.8783562048425 }, { "content": " } else {\n\n Ok(self)\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum ExprValue {\n\n Primitive(Primitive),\n\n Symbol(String),\n\n Call(String, Vec<Expr>),\n\n}\n\n\n", "file_path": "src/expr.rs", "rank": 60, "score": 9.116606604973903 }, { "content": "\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\n#[must_use]\n\npub struct EvalError {\n\n pub location: Location,\n\n pub message: EvalErrorMessage,\n\n}\n\nimpl fmt::Display for EvalError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n use EvalErrorMessage::*;\n\n write!(\n\n f,\n\n \"{}\\n{}\",\n\n match &self.message {\n\n InvalidChar(c) => format!(\"Invalid character {:?} for this position\", c),\n\n EmptyExpression => \"Empty expressions are not allowed\".to_owned(),\n\n UnmatchedOpen => \"Unmatched opening '('\".to_owned(),\n\n UnmatchedClose => \"Unmatched closing ')'\".to_owned(),\n\n UnexpectedToken => \"Unexpected token\".to_owned(),\n\n CallNonSymbol => \"Only functions can be called\".to_owned(),\n", "file_path": "src/expr.rs", "rank": 61, "score": 8.455834875629414 }, { "content": " ExprValue::Symbol(sym) => {\n\n if let Some(value) = ctx.get(&sym) {\n\n Ok(Expr {\n\n location: self.location,\n\n value: ExprValue::Primitive(value.clone()),\n\n })\n\n } else {\n\n Err(EvalError {\n\n location: self.location,\n\n message: EvalErrorMessage::UnknownSymbol(sym.clone()),\n\n })\n\n }\n\n },\n\n ExprValue::Call(sym, args) => Ok(Self {\n\n location: self.location,\n\n value: ExprValue::Call(\n\n sym,\n\n args.into_iter()\n\n .map(|a| a.resolve_all(ctx))\n\n .collect::<Result<Vec<Self>, EvalError>>()?,\n", "file_path": "src/expr.rs", "rank": 62, "score": 7.668961446474513 }, { "content": " }\n\n }\n\n\n\n let last_tok_index = buffer[buf_index].2;\n\n let mut expr_iter = buffer.drain(buf_index..).map(|(_, e, i)| (e, i));\n\n if let Some((function, fn_tok_index)) = expr_iter.next() {\n\n if let ExprValue::Symbol(fn_sym) = function.value {\n\n let args = expr_iter.map(|(e, _)| e).collect();\n\n buffer.push((\n\n level,\n\n Expr {\n\n location: function.location,\n\n value: ExprValue::Call(fn_sym, args),\n\n },\n\n fn_tok_index,\n\n ));\n\n } else {\n\n return Err(\n\n tokens[fn_tok_index].error_here(EvalErrorMessage::CallNonSymbol)\n\n );\n", "file_path": "src/expr.rs", "rank": 63, "score": 7.614109206914393 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::expr::{EvalError, EvalErrorMessage, Expr, ExprValue, Location};\n\nuse crate::value::Primitive;\n\n\n", "file_path": "src/functions.rs", "rank": 64, "score": 7.6138216299877 }, { "content": " }\n\n}\n\nimpl From<io::Error> for Error {\n\n fn from(error: io::Error) -> Self {\n\n Self::Io(error)\n\n }\n\n}\n\nimpl From<(Constant, EvalError)> for Error {\n\n fn from((c, error): (Constant, EvalError)) -> Self {\n\n Self::Evaluation(c.name, error)\n\n }\n\n}\n\n\n\n#[paw::main]\n", "file_path": "src/main.rs", "rank": 65, "score": 7.0453348409758165 }, { "content": " UnknownSymbol(sym) => format!(\"Unknown symbol name {:?}\", sym),\n\n UnknownFunction(name) => format!(\"Unknown function {:?}\", name),\n\n ArgumentCount => \"Function argument count incorrect\".to_owned(),\n\n InvalidArgument(msg) => format!(\"Argument invalid: {}\", msg),\n\n Overflow => \"Overflow or underflow occurred\".to_owned(),\n\n },\n\n self.location\n\n )\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub enum EvalErrorMessage {\n\n InvalidChar(char),\n\n EmptyExpression,\n\n UnmatchedOpen,\n\n UnmatchedClose,\n\n UnexpectedToken,\n\n CallNonSymbol,\n\n UnknownSymbol(String),\n\n UnknownFunction(String),\n\n ArgumentCount,\n\n InvalidArgument(String),\n\n Overflow,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "src/expr.rs", "rank": 66, "score": 7.003540773023318 }, { "content": "# ConstCodeGen - Share constants between languages\n\n\n\nFeatures:\n\n* Language agnostic: supports (almost) any language\n\n* Generates header files or replaces placeholders\n\n\n\nPlanned features:\n\n* Read from JSON in addition to TOML\n\n* Constraints for automatically checking values for validity\n\n* Data format exports, JSON etc.\n\n* Proper, autogenerated documentation\n\n\n\nNon-goals:\n\n* Turing complete configuration language\n\n* Sharing code - maybe small pure functions but not more\n\n\n\n## Try it!\n\n\n\n```bash\n\ncargo run -- --target-dir target/ --options-file examples/options.toml -c examples/constants.toml\n\ncat target/constants.rs\n\ncat target/constants.py\n\n```\n", "file_path": "README.md", "rank": 67, "score": 6.594576441112559 }, { "content": " location: Location::new(text, offset, cap.get(0).unwrap().as_str().len()),\n\n type_: TokenValue::Literal(Primitive::Integer(\n\n i128::from_str_radix(\n\n cap.get(0).unwrap().as_str(), 10\n\n )\n\n .expect(\"Integer parsing failed\"), // TODO: better error message\n\n )),\n\n });\n\n offset += cap.get(0).unwrap().as_str().len();\n\n } else if let Some(cap) = RE_BLN.captures(&text[offset..]) {\n\n let value_str = cap.get(0).unwrap().as_str();\n\n result.push(Token {\n\n location: Location::new(text, offset, value_str.len()),\n\n type_: TokenValue::Literal(Primitive::Boolean(value_str.parse().unwrap())),\n\n });\n\n offset += cap.get(0).unwrap().as_str().len();\n\n } else if let Some(m) = RE_SYM.find(&text[offset..]) {\n\n result.push(Token {\n\n location: Location::new(text, offset, m.as_str().len()),\n\n type_: TokenValue::Symbol(m.as_str().to_owned()),\n", "file_path": "src/expr.rs", "rank": 68, "score": 5.528811938848765 }, { "content": " ),\n\n }),\n\n }\n\n }\n\n\n\n fn call_functions(self, fns: &Functions) -> Result<Self, EvalError> {\n\n if let ExprValue::Call(sym, args) = self.value {\n\n let args = args\n\n .into_iter()\n\n .map(|a| a.call_functions(fns))\n\n .collect::<Result<Vec<Self>, EvalError>>()?;\n\n\n\n if let Some(fn_) = fns.get(&sym) {\n\n fn_(self.location, args)\n\n } else {\n\n Err(EvalError {\n\n location: self.location,\n\n message: EvalErrorMessage::UnknownFunction(sym),\n\n })\n\n }\n", "file_path": "src/expr.rs", "rank": 69, "score": 5.338926562060588 }, { "content": " Expr {\n\n location,\n\n value: ExprValue::Symbol(sym),\n\n },\n\n index,\n\n )),\n\n TokenValue::ExprOpen => {\n\n level += 1;\n\n },\n\n TokenValue::ExprClose => {\n\n level -= 1;\n\n if level == 0 && index + 1 < tokens.len() {\n\n return Err(tokens[index + 1].error_here(EvalErrorMessage::UnexpectedToken));\n\n }\n\n let mut buf_index: usize = buffer.len();\n\n while buf_index > 0 {\n\n buf_index -= 1;\n\n if buffer[buf_index].0 != level + 1 {\n\n buf_index += 1;\n\n break;\n", "file_path": "src/expr.rs", "rank": 70, "score": 4.036500150472844 }, { "content": "macro_rules! check_argc_min {\n\n ($minc:expr; $location:expr, $args:expr) => {\n\n if $args.len() < $minc {\n\n return Err(EvalError {\n\n location: $location,\n\n message: EvalErrorMessage::ArgumentCount,\n\n });\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! value {\n\n ($arg:expr) => {\n\n if let ExprValue::Primitive(p) = $arg.value.clone() {\n\n p\n\n } else {\n\n unreachable!(\"Calls and symbols should not exist anymore\")\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/functions.rs", "rank": 71, "score": 3.989380033999764 }, { "content": " let number = cap.get(2).unwrap().as_str().replace(\"_\", \"\");\n\n println!(\"{:?} => {:?}\", cap.get(0), (&number, &radix));\n\n result.push(Token {\n\n location: Location::new(text, offset, cap.get(0).unwrap().as_str().len()),\n\n type_: TokenValue::Literal(Primitive::Integer(\n\n i128::from_str_radix(\n\n &number,\n\n match radix.map(|m| m.as_str()) {\n\n Some(\"b\") => 2,\n\n Some(\"o\") => 8,\n\n Some(\"x\") => 16,\n\n _ => panic!(\"Invalid radix\"), // TODO: better error message\n\n },\n\n )\n\n .expect(\"Integer parsing failed\"), // TODO: better error message\n\n )),\n\n });\n\n offset += cap.get(0).unwrap().as_str().len();\n\n } else if let Some(cap) = RE_INT.captures(&text[offset..]) {\n\n result.push(Token {\n", "file_path": "src/expr.rs", "rank": 72, "score": 3.8514427222836045 }, { "content": " });\n\n offset += m.as_str().len();\n\n } else {\n\n match text[offset..].chars().nth(0).unwrap() {\n\n c if c.is_whitespace() => {\n\n offset += 1;\n\n },\n\n '(' => {\n\n result.push(Token {\n\n location: Location::new(text, offset, 1),\n\n type_: TokenValue::ExprOpen,\n\n });\n\n offset += 1;\n\n },\n\n ')' => {\n\n result.push(Token {\n\n location: Location::new(text, offset, 1),\n\n type_: TokenValue::ExprClose,\n\n });\n\n offset += 1;\n", "file_path": "src/expr.rs", "rank": 73, "score": 3.8312916956318026 }, { "content": "\n\n macro_rules! evaluate {\n\n ($s:expr) => {{ evaluate($s, &Context::new(), &Functions::default()) }};\n\n }\n\n\n\n #[test]\n\n fn test_eval_int() {\n\n for v in -10..=10 {\n\n assert_eq!(evaluate!(&v.to_string()), Ok(Primitive::Integer(v)));\n\n }\n\n\n\n assert_eq!(evaluate!(\"-0\"), Ok(Primitive::Integer(0)));\n\n assert_eq!(evaluate!(\"+1\"), Ok(Primitive::Integer(1)));\n\n }\n\n\n\n #[test]\n\n fn test_eval_hex_int() {\n\n assert_eq!(evaluate!(\"0x1\"), Ok(Primitive::Integer(1)));\n\n assert_eq!(evaluate!(\"0xf00\"), Ok(Primitive::Integer(0xf00)));\n\n assert_eq!(evaluate!(\"0xffff_8000_0000_0000\"), Ok(Primitive::Integer(0xffff_8000_0000_0000)));\n", "file_path": "src/expr.rs", "rank": 74, "score": 1.2104138439241265 } ]
Rust
websolver/src/ui/view/sudoku.rs
pepijnd/sudoku_web_solver
c3c04da1349d268843772abc5703bec3634a2906
use solver::Cell; use crate::{ ui::controller::{app::AppController, sudoku::SudokuController}, util::InitCell, }; use webelements::{we_builder, Result, WebElement}; #[we_builder( <div class="sdk-sudoku"> <CellBox we_field="cells" we_repeat="81" we_element /> </div> )] #[derive(Debug, Clone)] pub struct Sudoku {} impl WebElement for Sudoku { fn init(&mut self) -> Result<()> { for (index, cell) in self.cells.iter_mut().enumerate() { cell.set_cell(Cell::from_index(index)); } Ok(()) } } impl Sudoku { pub fn controller(&self, app: InitCell<AppController>) -> Result<SudokuController> { SudokuController::build(app, self) } pub fn cells(&self) -> std::slice::Iter<CellBox> { self.cells.iter() } pub fn update(&self, sudoku: &SudokuController) -> Result<()> { for cell in self.cells.iter() { cell.update(sudoku); } Ok(()) } } #[we_builder( <div class="sdk-cell"> <div class="background" /> <Indicator we_field="indicator" we_element /> <Cage we_field="cage" we_element /> <Options we_field="options" we_element /> <div class="sdk-number" we_field="number" /> </div> )] #[derive(Debug, Clone, WebElement)] pub struct CellBox { cell: Cell, } impl CellBox { pub fn cell(&self) -> Cell { self.cell } pub fn set_cell(&mut self, cell: Cell) { self.cell = cell; self.options.cell = cell; self.indicator.cell = cell; } pub fn update(&self, sudoku: &SudokuController) { let info = sudoku.app.info.info.borrow(); let model = sudoku.state.borrow(); let step = info .solve_step() .as_ref() .map(|s| *s.sudoku.cell(self.cell)); let value = model.start().cell(self.cell); debug_assert!(value <= 9, "invalid cell value {}", value); self.number.remove_class("starting state empty"); self.remove_class("target source selected"); if info.solve().is_some() { self.options.remove_class("hidden"); } else { self.options.add_class("hidden"); } if value > 0 { self.number.set_text(&format!("{}", value)); self.number.add_class("starting"); self.options.add_class("hidden"); } else if let Some(value) = step { self.number.add_class("state"); if value > 0 { self.number.set_text(&format!("{}", value)); self.options.add_class("hidden"); } else { self.number.set_text(""); } } else { self.number.add_class("empty"); self.number.set_text(""); } if let Some(step) = info.solve_step().as_ref() { if step.change.is_target(self.cell) { self.add_class("target"); } else if step.change.is_source(self.cell) { self.add_class("source"); } } if let Some(selected) = model.selected() { if selected == self.cell { self.add_class("selected"); } } self.options.update(sudoku); } } #[we_builder( <div class="cell-indicator"> <div class="indicator top" /> <div class="indicator left" /> <div class="indicator right" /> <div class="indicator bottom" /> </div> )] #[derive(Debug, Clone, WebElement)] pub struct Indicator { cell: Cell, } #[we_builder( <div class="cell-cage"> <div class="cage top" /> <div class="cage left" /> <div class="cage right" /> <div class="cage bottom" /> </div> )] #[derive(Debug, Clone, WebElement)] pub struct Cage { cell: Cell, } #[we_builder( <div class="cell-options"> <div class="cell-option" we_field="options" we_repeat="9" /> </div> )] #[derive(Debug, Clone)] pub struct Options { cell: Cell, } impl WebElement for Options { fn init(&mut self) -> Result<()> { dbg!("{:?}", &self.options); for (i, cell) in self.options.iter().enumerate() { cell.set_text(format!("{}", i + 1)); } Ok(()) } } impl Options { fn update(&self, sudoku: &SudokuController) { let info = sudoku.app.info.info.borrow(); for (option, e) in self.options.iter().enumerate() { if let Some(step) = info.solve_step() { let index = option as u8 + 1; let mut cache = step.cache; e.remove_class("target"); e.remove_class("source"); e.remove_class("hidden"); e.remove_class("digit"); if !cache.options(self.cell, &step.sudoku).has(index) { e.add_class("hidden"); } if let Some(step) = info.solve_step() { if step.change.is_target_digit(self.cell, index) { e.add_class("digit") } else if step.change.is_target_option(self.cell, index) { e.add_class("target") } else if step.change.is_source_option(self.cell, index) { e.add_class("source") } } } } } }
use solver::Cell; use crate::{ ui::controller::{app::AppController, sudoku::SudokuController}, util::InitCell, }; use webelements::{we_builder, Result, WebElement}; #[we_builder( <div class="sdk-sudoku"> <CellBox we_field="cells" we_repeat="81" we_element /> </div> )] #[derive(Debug, Clone)] pub struct Sudoku {} impl WebElement for Sudoku { fn init(&mut self) -> Result<()> { for (index, cell) in self.cells.iter_mut().enumerate() { cell.set_cell(Cell::from_index(index)); } Ok(()) } } impl Sudoku { pub fn controller(&self, app: InitCell<AppController>) -> Result<SudokuController> { SudokuController::build(app, self) } pub fn cells(&self) -> std::slice::Iter<CellBox> { self.cells.iter() } pub fn update(&self, sudoku: &SudokuController) -> Result<()> { for cell in self.cells.iter() { cell.update(sudoku); } Ok(()) } } #[we_builder( <div class="sdk-cell"> <div class="background" /> <Indicator we_field="indicator" we_element /> <Cage we_field="cage" we_element /> <Options we_field="options" we_element /> <div class="sdk-number" we_field="number" /> </div> )] #[derive(Debug, Clone, WebElement)] pub struct CellBox { cell:
if selected == self.cell { self.add_class("selected"); } } self.options.update(sudoku); } } #[we_builder( <div class="cell-indicator"> <div class="indicator top" /> <div class="indicator left" /> <div class="indicator right" /> <div class="indicator bottom" /> </div> )] #[derive(Debug, Clone, WebElement)] pub struct Indicator { cell: Cell, } #[we_builder( <div class="cell-cage"> <div class="cage top" /> <div class="cage left" /> <div class="cage right" /> <div class="cage bottom" /> </div> )] #[derive(Debug, Clone, WebElement)] pub struct Cage { cell: Cell, } #[we_builder( <div class="cell-options"> <div class="cell-option" we_field="options" we_repeat="9" /> </div> )] #[derive(Debug, Clone)] pub struct Options { cell: Cell, } impl WebElement for Options { fn init(&mut self) -> Result<()> { dbg!("{:?}", &self.options); for (i, cell) in self.options.iter().enumerate() { cell.set_text(format!("{}", i + 1)); } Ok(()) } } impl Options { fn update(&self, sudoku: &SudokuController) { let info = sudoku.app.info.info.borrow(); for (option, e) in self.options.iter().enumerate() { if let Some(step) = info.solve_step() { let index = option as u8 + 1; let mut cache = step.cache; e.remove_class("target"); e.remove_class("source"); e.remove_class("hidden"); e.remove_class("digit"); if !cache.options(self.cell, &step.sudoku).has(index) { e.add_class("hidden"); } if let Some(step) = info.solve_step() { if step.change.is_target_digit(self.cell, index) { e.add_class("digit") } else if step.change.is_target_option(self.cell, index) { e.add_class("target") } else if step.change.is_source_option(self.cell, index) { e.add_class("source") } } } } } }
Cell, } impl CellBox { pub fn cell(&self) -> Cell { self.cell } pub fn set_cell(&mut self, cell: Cell) { self.cell = cell; self.options.cell = cell; self.indicator.cell = cell; } pub fn update(&self, sudoku: &SudokuController) { let info = sudoku.app.info.info.borrow(); let model = sudoku.state.borrow(); let step = info .solve_step() .as_ref() .map(|s| *s.sudoku.cell(self.cell)); let value = model.start().cell(self.cell); debug_assert!(value <= 9, "invalid cell value {}", value); self.number.remove_class("starting state empty"); self.remove_class("target source selected"); if info.solve().is_some() { self.options.remove_class("hidden"); } else { self.options.add_class("hidden"); } if value > 0 { self.number.set_text(&format!("{}", value)); self.number.add_class("starting"); self.options.add_class("hidden"); } else if let Some(value) = step { self.number.add_class("state"); if value > 0 { self.number.set_text(&format!("{}", value)); self.options.add_class("hidden"); } else { self.number.set_text(""); } } else { self.number.add_class("empty"); self.number.set_text(""); } if let Some(step) = info.solve_step().as_ref() { if step.change.is_target(self.cell) { self.add_class("target"); } else if step.change.is_source(self.cell) { self.add_class("source"); } } if let Some(selected) = model.selected() {
random
[ { "content": "#[cfg(feature = \"worker\")]\n\n#[wasm_bindgen]\n\npub fn solve(sudoku: &JsValue) -> Result<JsValue, JsValue> {\n\n let s: Sudoku = sudoku\n\n .into_serde()\n\n .map_err(|e| JsValue::from_str(&format!(\"{}\", e)))?;\n\n let solve = s.solve_steps();\n\n JsValue::from_serde(&solve).map_err(|e| JsValue::from_str(&format!(\"{}\", e)))\n\n}\n", "file_path": "websolver/src/lib.rs", "rank": 0, "score": 110344.24078421883 }, { "content": "#[wasm_bindgen(start)]\n\npub fn run() -> Result<(), JsValue> {\n\n // This provides better error messages in debug mode.\n\n // It's disabled in release mode so it doesn't bloat up the file size.\n\n #[cfg(debug_assertions)]\n\n console_error_panic_hook::set_once();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "websolver/src/lib.rs", "rank": 1, "score": 103393.4297882707 }, { "content": "#[wasm_bindgen_test(async)]\n\nfn async_test() -> impl Future<Output = ()> {\n\n // Creates a JavaScript Promise which will asynchronously resolve with the value 42.\n\n let promise = js_sys::Promise::resolve(&JsValue::from(42));\n\n\n\n // Converts that Promise into a Future.\n\n // The unit test will wait for the Future to resolve.\n\n JsFuture::from(promise).map(|x| {\n\n assert_eq!(x.unwrap(), 42);\n\n })\n\n}\n", "file_path": "websolver/tests/app.rs", "rank": 2, "score": 94188.09540684774 }, { "content": "#[doc(hidden)]\n\npub fn serialize_array<S, T>(array: &[T], serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: serde::Serializer,\n\n T: serde::ser::Serialize,\n\n{\n\n array.serialize(serializer)\n\n}\n\n\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! serde_array {\n\n ($m:ident, $n:expr) => {\n\n pub mod $m {\n\n pub use crate::output::serialize_array as serialize;\n\n use serde::{de, Deserialize, Deserializer};\n\n use std::{mem, ptr};\n\n\n\n pub fn deserialize<'de, D, T>(deserializer: D) -> Result<[T; $n], D::Error>\n\n where\n\n D: Deserializer<'de>,\n", "file_path": "solver/src/output.rs", "rank": 3, "score": 92108.3627639639 }, { "content": "#[wasm_bindgen_test]\n\nfn web_test() {\n\n // assert_eq!(1, 1);\n\n}\n\n\n\n// This runs a unit test in the browser, and in addition it supports asynchronous Future APIs.\n", "file_path": "websolver/tests/app.rs", "rank": 4, "score": 71638.30525736709 }, { "content": "#[test]\n\nfn rust_test() {\n\n // assert_eq!(1, 1);\n\n}\n\n\n\n// This runs a unit test in the browser, so it can use browser APIs.\n", "file_path": "websolver/tests/app.rs", "rank": 5, "score": 71638.30525736709 }, { "content": "#[test]\n\nfn solver_solve() {\n\n for (i, &(sudoku, solution)) in INPUT.iter().enumerate() {\n\n eprintln!(\"{}: {}\", i, sudoku);\n\n let solve = Sudoku::from(sudoku).solve();\n\n if let Solution::Complete(solve) = solve {\n\n assert_eq!(solve, Sudoku::from(solution));\n\n } else {\n\n panic!(\"No valid solution found\");\n\n };\n\n }\n\n}\n\n\n", "file_path": "solver/tests/solver.rs", "rank": 6, "score": 43231.58859542465 }, { "content": "#[test]\n\nfn solver_steps() {\n\n for &(sudoku, _solution) in INPUT {\n\n let solve = Sudoku::from(sudoku).solve_steps();\n\n assert!(solve.end().valid);\n\n assert_eq!(solve.end().solver, Solver::Solved);\n\n }\n\n}\n", "file_path": "solver/tests/solver.rs", "rank": 7, "score": 43231.58859542465 }, { "content": "pub trait SolverExt {\n\n fn as_cloned_box(&self) -> Box<dyn EntrySolver>;\n\n fn as_any(&self) -> &dyn std::any::Any;\n\n fn as_any_mut(&mut self) -> &mut dyn std::any::Any;\n\n fn typename(&self) -> &str;\n\n}\n\n\n\nimpl<T> SolverExt for T\n\nwhere\n\n T: 'static + EntrySolver + Clone + Default,\n\n{\n\n fn as_cloned_box(&self) -> Box<dyn EntrySolver> {\n\n Box::new(self.clone())\n\n }\n\n\n\n fn as_any(&self) -> &dyn std::any::Any {\n\n self\n\n }\n\n\n\n fn as_any_mut(&mut self) -> &mut dyn std::any::Any {\n", "file_path": "solver/src/lib.rs", "rank": 8, "score": 39962.12719634816 }, { "content": "import \"../style/index.scss\";\n\nimport SolverWorker from \"worker-loader!./solver.js\";\n\n\n\nfunction test() {\n\n console.log(\"testing\");\n\n}\n\n\n\nimport(\"../pkg\").then((lib) => {\n\n let worker = new SolverWorker();\n\n let app = new lib.App();\n\n worker.onmessage = function (e) {\n\n if (e.data[0] == \"init\") {\n\n function solve(sudoku) {\n\n worker.postMessage([sudoku]);\n\n }\n\n app.set_solver(solve);\n\n app.start();\n\n } else if (e.data[0] == \"solved\") {\n\n console.log(e.data[1]);\n\n app.on_solve(e.data[1]);\n\n app.on_measure(e.data[2]);\n\n }\n\n };\n\n worker.postMessage([\"init\"]);\n\n});\n", "file_path": "js/index.js", "rank": 9, "score": 38788.08460853343 }, { "content": "struct StateModStep<'a> {\n\n s_mod: &'a StateMod,\n\n sudoku: &'a Sudoku,\n\n cache: &'a Options,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct SudokuInfo {\n\n measure: Option<Measure>,\n\n solve: Option<Solve>,\n\n step: usize,\n\n s_step: Option<SolveStep>,\n\n max: usize,\n\n}\n\n\n\nimpl SudokuInfo {\n\n pub fn measure(&self) -> Option<&Measure> {\n\n self.measure.as_ref()\n\n }\n\n\n", "file_path": "websolver/src/ui/model/info.rs", "rank": 10, "score": 38379.89565006373 }, { "content": "fn solver_benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"Solver\");\n\n\n\n for (i, (input, _)) in INPUT.iter().enumerate() {\n\n group.bench_with_input(format!(\"sudoku_{:?}\", i), input, |b, i| {\n\n let s = Sudoku::from(*i);\n\n b.iter(|| black_box(&s.clone()).solve())\n\n });\n\n }\n\n\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(benches, solver_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "solver/benches/bench_solver.rs", "rank": 11, "score": 34111.97812970393 }, { "content": "use futures::prelude::*;\n\nuse wasm_bindgen::JsValue;\n\nuse wasm_bindgen_futures::JsFuture;\n\nuse wasm_bindgen_test::{wasm_bindgen_test, wasm_bindgen_test_configure};\n\n\n\nwasm_bindgen_test_configure!(run_in_browser);\n\n\n\n// This runs a unit test in native Rust, so it can only use Rust APIs.\n\n#[test]\n", "file_path": "websolver/tests/app.rs", "rank": 12, "score": 31116.174548769377 }, { "content": "#![allow(clippy::suspicious_operation_groupings)]\n\n\n\nuse crate::{output::ser_array::a81, Cell, Sudoku};\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Copy, Clone, PartialEq, serde::Serialize, serde::Deserialize)]\n\npub struct CellOptions(u16);\n\n\n\nimpl CellOptions {\n\n pub fn all() -> Self {\n\n Self(0b1111111110)\n\n }\n\n\n\n #[inline(always)]\n\n pub fn add(&mut self, i: u8) {\n\n assert!(i <= 9);\n\n self.0 |= 0x1 << i\n\n }\n\n\n", "file_path": "solver/src/options.rs", "rank": 13, "score": 30896.80183929072 }, { "content": " cells: [CellOptions; 81],\n\n}\n\n\n\nimpl Options {\n\n pub fn remove(&mut self, cell: Cell, value: u8) -> bool {\n\n self.cells[cell.index()].remove(value)\n\n }\n\n\n\n pub fn options(&mut self, cell: Cell, sudoku: &Sudoku) -> CellOptions {\n\n let value = *sudoku.cell(cell);\n\n if value != 0 {\n\n let mut options = CellOptions::default();\n\n options.add(value);\n\n self.cells[cell.index()] = options;\n\n return options;\n\n }\n\n let options = &mut self.cells[cell.index()];\n\n for value in sudoku.row(cell.row) {\n\n options.remove(value);\n\n }\n", "file_path": "solver/src/options.rs", "rank": 14, "score": 30893.121331386854 }, { "content": " }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::{Cell, CellOptions, Sudoku};\n\n\n\n use super::Options;\n\n\n\n #[test]\n\n fn options_all() {\n\n let options = CellOptions::all();\n\n assert_eq!(\n\n options.iter().collect::<Vec<u8>>(),\n\n vec![1, 2, 3, 4, 5, 6, 7, 8, 9]\n\n );\n\n }\n\n\n", "file_path": "solver/src/options.rs", "rank": 15, "score": 30892.84010524567 }, { "content": " let mut options = Self::default();\n\n for i in input.into_iter() {\n\n options.add(*i);\n\n }\n\n options\n\n }\n\n}\n\n\n\nimpl Default for CellOptions {\n\n fn default() -> Self {\n\n Self(0b0000000000)\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct OptionPair(u8, u8);\n\n\n\nimpl OptionPair {\n\n pub fn common(&self, other: Self) -> Option<u8> {\n\n // Note we compare self.0 to other.1\n", "file_path": "solver/src/options.rs", "rank": 16, "score": 30891.9769173737 }, { "content": " for value in sudoku.col(cell.col) {\n\n options.remove(value);\n\n }\n\n for value in sudoku.sqr(cell.sqr()) {\n\n options.remove(value);\n\n }\n\n\n\n *options\n\n }\n\n\n\n pub fn cell(&self, cell: Cell) -> &CellOptions {\n\n &self.cells[9 * cell.row + cell.col]\n\n }\n\n\n\n pub fn cells(&self) -> &[CellOptions] {\n\n &self.cells\n\n }\n\n}\n\n\n\nimpl Default for Options {\n", "file_path": "solver/src/options.rs", "rank": 17, "score": 30890.20305475481 }, { "content": " if self.0 == other.0 || self.0 == other.1 {\n\n Some(self.0)\n\n } else if self.1 == other.0 || self.1 == other.1 {\n\n Some(self.1)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl PartialEq for OptionPair {\n\n fn eq(&self, other: &Self) -> bool {\n\n (self.0 == other.0 || self.0 == other.1) && (self.1 == other.0 || self.1 == other.1)\n\n }\n\n}\n\n\n\npub struct OptionsIter<'a> {\n\n options: &'a CellOptions,\n\n i: u8,\n\n}\n", "file_path": "solver/src/options.rs", "rank": 18, "score": 30889.20716757644 }, { "content": " fn default() -> Self {\n\n Self {\n\n cells: [CellOptions::all(); 81],\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Options {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n for row in 0..9 {\n\n if row % 3 == 0 {\n\n writeln!(f)?;\n\n }\n\n for col in 0..9 {\n\n if col % 3 == 0 {\n\n write!(f, \"|\")?;\n\n }\n\n write!(f, \"{}|\", self.cell(Cell { row, col }).len())?;\n\n }\n\n writeln!(f)?;\n", "file_path": "solver/src/options.rs", "rank": 19, "score": 30888.702580410267 }, { "content": "\n\nimpl<'a> Iterator for OptionsIter<'a> {\n\n type Item = u8;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n while self.i < 9 {\n\n self.i += 1;\n\n if self.options.has(self.i) {\n\n return Some(self.i);\n\n } else {\n\n continue;\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Deserialize, Serialize)]\n\npub struct Options {\n\n #[serde(with = \"a81\")]\n", "file_path": "solver/src/options.rs", "rank": 20, "score": 30888.095887258918 }, { "content": " if let Some(first) = first {\n\n if let Some(second) = second {\n\n return Some(OptionPair(first, second));\n\n }\n\n }\n\n None\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for CellOptions {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.debug_list().entries(self.iter()).finish()\n\n }\n\n}\n\n\n\nimpl<'a, T> From<T> for CellOptions\n\nwhere\n\n T: IntoIterator<Item = &'a u8>,\n\n{\n\n fn from(input: T) -> Self {\n", "file_path": "solver/src/options.rs", "rank": 21, "score": 30887.541756130337 }, { "content": " options.remove(5);\n\n options.remove(7);\n\n options.remove(9);\n\n assert_eq!(options.iter().collect::<Vec<u8>>(), vec![1, 3, 4, 6, 8]);\n\n }\n\n\n\n static SAMPLE: &str =\n\n \"___________98____7_8__6__5__5__4__3___79____2___________27____9_4__5__6_3____62__\";\n\n\n\n #[test]\n\n fn cache_string() {\n\n let cache = Options::default();\n\n cache.to_string();\n\n }\n\n\n\n #[test]\n\n fn cache_options() {\n\n let sudoku = Sudoku::from(SAMPLE);\n\n let mut cache = Options::default();\n\n let options = CellOptions::from(&[1, 2, 4, 5, 6, 7]);\n\n assert_eq!(cache.options(Cell::new(0, 0), &sudoku), options);\n\n }\n\n}\n", "file_path": "solver/src/options.rs", "rank": 22, "score": 30884.43488262858 }, { "content": " }\n\n None\n\n }\n\n\n\n #[inline]\n\n pub fn iter(&self) -> impl Iterator<Item = u8> + '_ {\n\n OptionsIter {\n\n options: self,\n\n i: 0,\n\n }\n\n }\n\n\n\n #[inline]\n\n pub fn len(&self) -> usize {\n\n let mut c = 0;\n\n for i in 1..=9 {\n\n if self.has(i) {\n\n c += 1;\n\n }\n\n }\n", "file_path": "solver/src/options.rs", "rank": 23, "score": 30883.934815265697 }, { "content": " pub fn combine(&mut self, other: &Self) {\n\n for i in 1..=9 {\n\n if other.has(i) {\n\n self.add(i);\n\n }\n\n }\n\n }\n\n\n\n pub fn as_pair(&self) -> Option<OptionPair> {\n\n let mut first = None;\n\n let mut second = None;\n\n for o in self.iter() {\n\n if first.is_none() {\n\n first = Some(o)\n\n } else if second.is_none() {\n\n second = Some(o)\n\n } else {\n\n return None;\n\n }\n\n }\n", "file_path": "solver/src/options.rs", "rank": 24, "score": 30883.257635981412 }, { "content": " #[test]\n\n fn options_none() {\n\n let options = CellOptions::default();\n\n assert_eq!(options.iter().collect::<Vec<u8>>(), vec![]);\n\n }\n\n\n\n #[test]\n\n fn options_add() {\n\n let mut options = CellOptions::default();\n\n options.add(2);\n\n options.add(5);\n\n options.add(7);\n\n options.add(9);\n\n assert_eq!(options.iter().collect::<Vec<u8>>(), vec![2, 5, 7, 9]);\n\n }\n\n\n\n #[test]\n\n fn options_remove() {\n\n let mut options = CellOptions::all();\n\n options.remove(2);\n", "file_path": "solver/src/options.rs", "rank": 25, "score": 30881.817253272646 }, { "content": " pub fn is_empty(&self) -> bool {\n\n for i in 1..=9 {\n\n if self.has(i) {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n\n\n #[inline]\n\n pub fn is_set(&self, other: &Self) -> bool {\n\n for i in 1..=9 {\n\n if other.has(i) && !self.has(i) {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n\n\n #[inline]\n", "file_path": "solver/src/options.rs", "rank": 26, "score": 30880.76085434831 }, { "content": " #[inline(always)]\n\n pub fn remove(&mut self, i: u8) -> bool {\n\n assert!(i <= 9);\n\n let old = (self.0 & (0x1 << i)) >> i;\n\n self.0 &= !(0x1 << i);\n\n old != 0\n\n }\n\n\n\n #[inline(always)]\n\n pub fn has(&self, i: u8) -> bool {\n\n (self.0 & (0x1 << i)) >> i != 0\n\n }\n\n\n\n #[inline]\n\n pub fn take(&mut self) -> Option<u8> {\n\n for i in 1..=9 {\n\n let old = self.remove(i);\n\n if old {\n\n return Some(i);\n\n }\n", "file_path": "solver/src/options.rs", "rank": 27, "score": 30880.16827266699 }, { "content": " c\n\n }\n\n\n\n #[inline]\n\n pub fn found(&self) -> Option<u8> {\n\n let mut found = None;\n\n for i in 1..=9 {\n\n if self.has(i) {\n\n match found {\n\n Some(_) => return None,\n\n None => {\n\n found = Some(i as u8);\n\n }\n\n }\n\n }\n\n }\n\n found\n\n }\n\n\n\n #[inline]\n", "file_path": "solver/src/options.rs", "rank": 28, "score": 30879.84142246314 }, { "content": "pub trait EntrySolver: SolverExt + std::fmt::Debug {\n\n fn advance(&mut self, state: &mut State) -> bool;\n\n fn verified(&self) -> bool {\n\n true\n\n }\n\n fn terminate(&self) -> bool {\n\n false\n\n }\n\n}\n", "file_path": "solver/src/lib.rs", "rank": 29, "score": 29991.630639740586 }, { "content": "//!\n\n\n\nuse std::rc::Rc;\n\n\n\nuse crate::{\n\n output::{ser_array::a81, Solve},\n\n util::Domain,\n\n Cell, Config, Entry, Info, Options, Solver,\n\n};\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Data structure that holds sudoku data.\n\n#[derive(Debug, Copy, Clone, Deserialize, Serialize)]\n\npub struct Sudoku {\n\n #[serde(with = \"a81\")]\n\n inner: [u8; 81],\n\n}\n\n\n\nimpl Sudoku {\n", "file_path": "solver/src/sudoku.rs", "rank": 30, "score": 29652.326546656903 }, { "content": " None\n\n } else {\n\n Some(*self.sudoku.cell(self.iter.cell(self.i)))\n\n };\n\n\n\n self.i += 1;\n\n out\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Buffer {\n\n buffer: Vec<Entry>,\n\n}\n\n\n\nimpl Buffer {\n\n pub fn new(sudoku: Sudoku, config: Rc<Config>) -> Self {\n\n let mut buffer = Vec::with_capacity(32);\n\n let state = Entry::new(sudoku, Options::default(), Solver::Init, config);\n\n buffer.push(state);\n", "file_path": "solver/src/sudoku.rs", "rank": 31, "score": 29650.91328666767 }, { "content": " *output = value;\n\n }\n\n }\n\n }\n\n Sudoku { inner: sudoku }\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct SudokuIter<'a> {\n\n sudoku: &'a Sudoku,\n\n iter: Domain,\n\n i: usize,\n\n}\n\n\n\nimpl<'a> Iterator for SudokuIter<'a> {\n\n type Item = u8;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n let out = if self.i == 9 {\n", "file_path": "solver/src/sudoku.rs", "rank": 32, "score": 29649.351720210172 }, { "content": " fn default() -> Self {\n\n Self { inner: [0; 81] }\n\n }\n\n}\n\n\n\nimpl PartialEq for Sudoku {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.inner == other.inner\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Sudoku {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {\n\n for row in 0..9 {\n\n if row % 3 == 0 {\n\n writeln!(f)?;\n\n }\n\n for (col, cell) in self.row(row).enumerate() {\n\n if col % 3 == 0 {\n\n write!(f, \"|\")?;\n", "file_path": "solver/src/sudoku.rs", "rank": 33, "score": 29647.78304805438 }, { "content": " }\n\n write!(f, \"{}|\", cell)?;\n\n }\n\n writeln!(f)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<T> From<T> for Sudoku\n\nwhere\n\n T: AsRef<str>,\n\n{\n\n fn from(input: T) -> Self {\n\n assert!(input.as_ref().len() == 81);\n\n let mut sudoku = [0; 81];\n\n for (cell, output) in input.as_ref().chars().zip(sudoku.iter_mut()) {\n\n let value = cell.to_string().parse::<u8>();\n\n if let Ok(value) = value {\n\n if value > 0 && value <= 9 {\n", "file_path": "solver/src/sudoku.rs", "rank": 34, "score": 29647.36959790992 }, { "content": "\n\n#[derive(Debug, Copy, Clone)]\n\npub enum Solution {\n\n Complete(Sudoku),\n\n Incomplete(Sudoku),\n\n Invalid,\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::Sudoku;\n\n\n\n #[test]\n\n fn sudoku_solve_all() {\n\n let sudoku = Sudoku::from(\n\n \"....27....1...4.....9..57...8....3..5..9..1......32...6.1....4...8....9.....4.6.5\",\n\n );\n\n let solutions = sudoku.solve_all();\n\n assert_eq!(solutions.len(), 235);\n\n }\n\n}\n", "file_path": "solver/src/sudoku.rs", "rank": 35, "score": 29647.21272472321 }, { "content": " }\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn cell(&self, cell: Cell) -> &u8 {\n\n &self.inner[cell.index()]\n\n }\n\n\n\n pub fn cell_mut(&mut self, cell: Cell) -> &mut u8 {\n\n &mut self.inner[cell.index()]\n\n }\n\n\n\n pub fn set_cell(&mut self, cell: Cell, value: u8) {\n\n *self.cell_mut(cell) = value\n\n }\n\n\n\n pub fn row(&self, row: usize) -> SudokuIter {\n\n SudokuIter {\n", "file_path": "solver/src/sudoku.rs", "rank": 36, "score": 29647.028286681794 }, { "content": " }\n\n\n\n pub fn as_string(self) -> String {\n\n let mut output = String::new();\n\n for cell in self.inner.iter() {\n\n if *cell > 0 {\n\n output.push_str(&format!(\"{}\", cell));\n\n } else {\n\n output.push('.');\n\n }\n\n }\n\n output\n\n }\n\n\n\n pub fn inner(&self) -> &[u8] {\n\n &self.inner\n\n }\n\n}\n\n\n\nimpl Default for Sudoku {\n", "file_path": "solver/src/sudoku.rs", "rank": 37, "score": 29644.815180786605 }, { "content": " Self { buffer }\n\n }\n\n\n\n pub fn get(&mut self) -> Option<&mut Entry> {\n\n self.buffer.last_mut()\n\n }\n\n\n\n pub fn push(&mut self, state: Entry) -> Option<&mut Entry> {\n\n self.buffer.push(state);\n\n self.get()\n\n }\n\n\n\n pub fn pop(&mut self) -> Option<Entry> {\n\n self.buffer.pop()\n\n }\n\n\n\n pub fn into_inner(self) -> Vec<Entry> {\n\n self.buffer\n\n }\n\n}\n", "file_path": "solver/src/sudoku.rs", "rank": 38, "score": 29642.656409539544 }, { "content": " sudoku: self,\n\n iter: Domain::Row(row),\n\n i: 0,\n\n }\n\n }\n\n\n\n pub fn col(&self, col: usize) -> SudokuIter {\n\n SudokuIter {\n\n sudoku: self,\n\n iter: Domain::Col(col),\n\n i: 0,\n\n }\n\n }\n\n\n\n pub fn sqr(&self, sqr: usize) -> SudokuIter {\n\n SudokuIter {\n\n sudoku: self,\n\n iter: Domain::Sqr(sqr),\n\n i: 0,\n\n }\n", "file_path": "solver/src/sudoku.rs", "rank": 39, "score": 29640.63121482776 }, { "content": " if !entry.verified() {\n\n break;\n\n };\n\n } else if let Some(last) = last_known {\n\n return Solve::from(last);\n\n } else {\n\n return Solve::invalid(*self);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn solve_all(&self) -> Vec<Sudoku> {\n\n let mut solutions = Vec::new();\n\n let config = Config::default();\n\n\n\n let mut buffer = Buffer::new(*self, Rc::new(config));\n\n loop {\n\n if solutions.len() >= 1000 {\n", "file_path": "solver/src/sudoku.rs", "rank": 40, "score": 29638.094974945256 }, { "content": " if last_known.is_none() && old.info.correct {\n\n last_known = Some(old.sudoku);\n\n }\n\n if let Some(entry) = buffer.get() {\n\n entry.merge_info(&old);\n\n if !entry.verified() {\n\n break;\n\n };\n\n } else if let Some(last) = last_known {\n\n return Solution::Incomplete(last);\n\n } else {\n\n return Solution::Invalid;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn solve_steps(&self) -> Solve {\n\n let config = Config::default();\n", "file_path": "solver/src/sudoku.rs", "rank": 41, "score": 29637.895328510094 }, { "content": " pub fn solve(&self) -> Solution {\n\n let config = Config::default();\n\n\n\n let mut buffer = Buffer::new(*self, Rc::new(config));\n\n loop {\n\n let entry = buffer.get().unwrap();\n\n if entry.advance() {\n\n let next = entry.make_next();\n\n let entry = buffer.push(next).unwrap();\n\n if entry.terminate() {\n\n return match entry.info {\n\n Info { valid: false, .. } => Solution::Invalid,\n\n Info { solved: true, .. } => Solution::Complete(entry.sudoku),\n\n Info { solved: false, .. } => Solution::Incomplete(entry.sudoku),\n\n };\n\n }\n\n } else {\n\n let mut last_known = None;\n\n loop {\n\n let old = buffer.pop().unwrap();\n", "file_path": "solver/src/sudoku.rs", "rank": 42, "score": 29637.735568004326 }, { "content": " return solutions;\n\n }\n\n\n\n let entry = buffer.get().unwrap();\n\n if entry.advance() {\n\n let next = entry.make_next();\n\n let entry = buffer.push(next).unwrap();\n\n if entry.terminate() && entry.info.valid && entry.info.solved {\n\n solutions.push(entry.sudoku);\n\n }\n\n } else {\n\n loop {\n\n let old = buffer.pop().unwrap();\n\n if let Some(entry) = buffer.get() {\n\n entry.merge_info(&old);\n\n if !entry.verified() {\n\n break;\n\n };\n\n } else {\n\n return solutions;\n", "file_path": "solver/src/sudoku.rs", "rank": 43, "score": 29635.75202030518 }, { "content": "\n\n let mut buffer = Buffer::new(*self, Rc::new(config));\n\n loop {\n\n let entry = buffer.get().unwrap();\n\n if entry.advance() {\n\n let next = entry.make_next();\n\n let entry = buffer.push(next).unwrap();\n\n if entry.terminate() {\n\n return Solve::from(buffer);\n\n }\n\n } else {\n\n let mut last_known = None;\n\n\n\n loop {\n\n let old = buffer.pop().unwrap();\n\n if last_known.is_none() && old.info.correct {\n\n last_known = Some(buffer.clone());\n\n }\n\n if let Some(entry) = buffer.get() {\n\n entry.merge_info(&old);\n", "file_path": "solver/src/sudoku.rs", "rank": 44, "score": 29635.620856011767 }, { "content": "use webelements::Result;\n\n\n\nuse crate::{ui::view::app::AppElement, util::InitCell};\n\n\n\nuse super::{editor::EditorController, info::InfoController, sudoku::SudokuController};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct AppController {\n\n element: AppElement,\n\n pub editor: InitCell<EditorController>,\n\n pub info: InitCell<InfoController>,\n\n pub sudoku: InitCell<SudokuController>,\n\n}\n\n\n\nimpl AppController {\n\n pub fn build(element: &AppElement) -> Result<InitCell<Self>> {\n\n let app = InitCell::with(AppController {\n\n element: element.clone(),\n\n editor: InitCell::new(),\n\n info: InitCell::new(),\n", "file_path": "websolver/src/ui/controller/app.rs", "rank": 45, "score": 28433.410332336985 }, { "content": " <Info we_field=\"info\" we_element />\n\n </div>\n\n <div class=\"app-options\"></div>\n\n </div>\n\n)]\n\n#[derive(Debug, Clone, WebElement)]\n\npub struct AppElement {}\n\n\n\nimpl AppElement {\n\n pub fn controller(&self) -> Result<InitCell<AppController>> {\n\n let app = AppController::build(self)?;\n\n Ok(app)\n\n }\n\n\n\n pub fn update(&self, app: &AppController) -> Result<()> {\n\n self.sudoku.update(&app.sudoku)?;\n\n self.editor.update(&app.editor)?;\n\n self.info.update(&app.info)?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "websolver/src/ui/view/app.rs", "rank": 46, "score": 28432.751731801036 }, { "content": "use webelements::{we_builder, Result, WebElement};\n\n\n\nuse crate::{\n\n ui::{\n\n controller::app::AppController,\n\n view::{editor::Editor, info::Info, sudoku::Sudoku},\n\n },\n\n util::InitCell,\n\n};\n\n\n\n#[we_builder(\n\n <div class=\"app\">\n\n <div class=\"app-main\" we_field=\"main\">\n\n <div class=\"sdk-box\">\n\n <div class=\"sdk\" we_field=\"sdk\">\n\n <div class=\"sdk-dummy\" />\n\n <Sudoku we_field=\"sudoku\" we_element />\n\n </div>\n\n </div>\n\n <Editor we_field=\"editor\" we_element />\n", "file_path": "websolver/src/ui/view/app.rs", "rank": 47, "score": 28428.34824655402 }, { "content": " sudoku: InitCell::new(),\n\n });\n\n\n\n InitCell::init(\n\n &app.sudoku,\n\n app.element.sudoku.controller(InitCell::clone(&app))?,\n\n );\n\n InitCell::init(\n\n &app.info,\n\n app.element.info.controller(InitCell::clone(&app))?,\n\n );\n\n InitCell::init(\n\n &app.editor,\n\n app.element.editor.controller(InitCell::clone(&app))?,\n\n );\n\n\n\n Ok(app)\n\n }\n\n\n\n pub fn update(&self) -> Result<()> {\n\n self.element.update(self)?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "websolver/src/ui/controller/app.rs", "rank": 48, "score": 28427.319619835434 }, { "content": "use std::{cell::RefCell, rc::Rc};\n\n\n\nuse solver::Solve;\n\nuse wasm_bindgen::JsValue;\n\n\n\nuse crate::{\n\n ui::sudoku::{Sudoku, SudokuModel, SudokuStateModel},\n\n util::InitCell,\n\n};\n\n\n\nuse webelements::Result;\n\n\n\nuse super::app::AppController;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct SudokuController {\n\n element: Sudoku,\n\n pub app: InitCell<AppController>,\n\n pub solver: RefCell<Option<js_sys::Function>>,\n\n pub state: Rc<RefCell<SudokuStateModel>>,\n", "file_path": "websolver/src/ui/controller/sudoku.rs", "rank": 50, "score": 27082.306322658435 }, { "content": "}\n\n\n\nimpl SudokuController {\n\n pub fn update(&self) -> Result<()> {\n\n self.element.update(self)?;\n\n Ok(())\n\n }\n\n\n\n pub fn build(app: InitCell<AppController>, element: &Sudoku) -> Result<Self> {\n\n let sudoku = InitCell::clone(&app.sudoku);\n\n webelements::document()?\n\n .on_key(move |event| {\n\n {\n\n let mut model = sudoku.state.borrow_mut();\n\n let selected = model.selected();\n\n if let Some(mut selected) = selected {\n\n match &*event.key() {\n\n \"ArrowLeft\" => {\n\n if selected.col > 0 {\n\n selected.col -= 1\n", "file_path": "websolver/src/ui/controller/sudoku.rs", "rank": 52, "score": 27080.652225142774 }, { "content": " }\n\n}\n\n\n\nimpl From<Sudoku> for SudokuModel {\n\n fn from(sudoku: Sudoku) -> Self {\n\n Self { sudoku }\n\n }\n\n}\n\n\n\nimpl SudokuModel {\n\n pub fn set(&mut self, sudoku: Sudoku) {\n\n self.sudoku = sudoku;\n\n }\n\n\n\n pub fn get(&self) -> &Sudoku {\n\n &self.sudoku\n\n }\n\n\n\n pub fn cell(&self, cell: Cell) -> u8 {\n\n *self.sudoku.cell(cell)\n", "file_path": "websolver/src/ui/model/sudoku.rs", "rank": 57, "score": 27074.108990636756 }, { "content": " let mut model = sudoku.state.borrow_mut();\n\n model.set_selected(clicked);\n\n }\n\n sudoku.update().unwrap();\n\n }))?;\n\n }\n\n Ok(Self {\n\n app: InitCell::clone(&app),\n\n element: element.clone(),\n\n solver: RefCell::new(None),\n\n state: Rc::new(RefCell::new(SudokuStateModel::default())),\n\n })\n\n }\n\n\n\n pub fn solve(&self) {\n\n let model = self.state.borrow();\n\n let start = model.start();\n\n if let Some(solver) = self.solver.borrow().as_ref() {\n\n let this = JsValue::null();\n\n solver\n", "file_path": "websolver/src/ui/controller/sudoku.rs", "rank": 58, "score": 27073.428275247283 }, { "content": "use solver::{Cell, Sudoku};\n\n\n\n#[derive(Debug)]\n\npub struct SudokuStateModel {\n\n pub start: SudokuModel,\n\n pub state: Option<SudokuModel>,\n\n selected: Option<Cell>,\n\n}\n\n\n\nimpl SudokuStateModel {\n\n pub fn start(&self) -> &SudokuModel {\n\n &self.start\n\n }\n\n\n\n pub fn start_mut(&mut self) -> &mut SudokuModel {\n\n &mut self.start\n\n }\n\n\n\n pub fn set_start(&mut self, start: Sudoku) {\n\n self.start.set(start)\n", "file_path": "websolver/src/ui/model/sudoku.rs", "rank": 59, "score": 27073.203104445514 }, { "content": "\n\nimpl From<Sudoku> for SudokuStateModel {\n\n fn from(start: Sudoku) -> Self {\n\n Self {\n\n start: SudokuModel::from(start),\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct SudokuModel {\n\n sudoku: Sudoku,\n\n}\n\n\n\nimpl Default for SudokuModel {\n\n fn default() -> Self {\n\n Self {\n\n sudoku: Sudoku::default(),\n\n }\n", "file_path": "websolver/src/ui/model/sudoku.rs", "rank": 60, "score": 27072.019850563687 }, { "content": " }\n\n\n\n pub fn set_selected(&mut self, cell: Cell) {\n\n self.selected.replace(cell);\n\n }\n\n\n\n pub fn deselect(&mut self) {\n\n self.selected.take();\n\n }\n\n}\n\n\n\nimpl Default for SudokuStateModel {\n\n fn default() -> Self {\n\n Self {\n\n start: Default::default(),\n\n state: None,\n\n selected: None,\n\n }\n\n }\n\n}\n", "file_path": "websolver/src/ui/model/sudoku.rs", "rank": 62, "score": 27070.739243205062 }, { "content": " str => {\n\n if let Ok(value) = str.parse::<u8>() {\n\n if value <= 9 {\n\n model.start_mut().set_cell(selected, value);\n\n }\n\n }\n\n }\n\n }\n\n model.set_selected(selected);\n\n }\n\n }\n\n sudoku.update().unwrap()\n\n })\n\n .unwrap();\n\n\n\n for cell in element.cells() {\n\n let clicked = cell.cell();\n\n let sudoku = InitCell::clone(&app.sudoku);\n\n cell.on_click(Box::new(move |_event| {\n\n {\n", "file_path": "websolver/src/ui/controller/sudoku.rs", "rank": 63, "score": 27070.33915986592 }, { "content": " .call1(&this, &JsValue::from_serde(start.get()).unwrap())\n\n .unwrap();\n\n }\n\n }\n\n\n\n pub fn on_solve(&self, solve: Solve) -> Result<()> {\n\n {\n\n let mut model = self.state.borrow_mut();\n\n let mut info = self.app.info.info.borrow_mut();\n\n\n\n let step = solve.iter().last().unwrap();\n\n model.set_state(SudokuModel::from(step.sudoku));\n\n info.set_solve(solve)?;\n\n let max = info.max();\n\n info.set_step(max)?;\n\n }\n\n self.app.update()?;\n\n Ok(())\n\n }\n\n\n\n pub fn set_solver(&self, solver: &js_sys::Function) {\n\n self.solver.borrow_mut().replace(solver.clone());\n\n }\n\n}\n", "file_path": "websolver/src/ui/controller/sudoku.rs", "rank": 64, "score": 27069.13387005773 }, { "content": " }\n\n\n\n pub fn clear_start(&mut self) {\n\n self.start.clear()\n\n }\n\n\n\n pub fn state(&self) -> Option<&SudokuModel> {\n\n self.state.as_ref()\n\n }\n\n\n\n pub fn set_state(&mut self, sudoku: SudokuModel) {\n\n self.state.replace(sudoku);\n\n }\n\n\n\n pub fn clear_state(&mut self) {\n\n self.state.take();\n\n }\n\n\n\n pub fn selected(&self) -> Option<Cell> {\n\n self.selected\n", "file_path": "websolver/src/ui/model/sudoku.rs", "rank": 65, "score": 27069.028471135593 }, { "content": " }\n\n\n\n pub fn set_cell(&mut self, cell: Cell, value: u8) {\n\n self.sudoku.set_cell(cell, value);\n\n }\n\n\n\n pub fn clear(&mut self) {\n\n self.sudoku = Sudoku::default()\n\n }\n\n}\n", "file_path": "websolver/src/ui/model/sudoku.rs", "rank": 66, "score": 27067.526802827702 }, { "content": " }\n\n }\n\n \"ArrowUp\" => {\n\n if selected.row > 0 {\n\n selected.row -= 1\n\n }\n\n }\n\n \"ArrowRight\" => {\n\n if selected.col < 8 {\n\n selected.col += 1\n\n }\n\n }\n\n \"ArrowDown\" => {\n\n if selected.row < 8 {\n\n selected.row += 1\n\n }\n\n }\n\n \"Delete\" => {\n\n model.start_mut().set_cell(selected, 0);\n\n }\n", "file_path": "websolver/src/ui/controller/sudoku.rs", "rank": 70, "score": 27059.237685531334 }, { "content": "const common = require('./webpack.common.js');\n", "file_path": "webpack.dev.js", "rank": 71, "score": 23934.867275537603 }, { "content": "const path = require(\"path\");\n", "file_path": "webpack.common.js", "rank": 72, "score": 23934.867275537603 }, { "content": "const path = require(\"path\");\n", "file_path": "webpack.dev.js", "rank": 73, "score": 23934.867275537603 }, { "content": "const dist = path.resolve(__dirname, \"dist\");\n", "file_path": "webpack.common.js", "rank": 74, "score": 23934.867275537603 }, { "content": "const common = require('./webpack.common.js');\n", "file_path": "webpack.prod.js", "rank": 75, "score": 23934.867275537603 }, { "content": "const dist = path.resolve(__dirname, 'dist');\n", "file_path": "webpack.dev.js", "rank": 76, "score": 23934.867275537603 }, { "content": "const WasmPackPlugin = require(\"@wasm-tool/wasm-pack-plugin\");\n", "file_path": "webpack.common.js", "rank": 77, "score": 22225.201437525702 }, { "content": "const HtmlWebpackPlugin = require('html-webpack-plugin')\n", "file_path": "webpack.common.js", "rank": 78, "score": 22225.201437525702 }, { "content": "const MiniCssExtractPlugin = require('mini-css-extract-plugin');\n", "file_path": "webpack.common.js", "rank": 79, "score": 21458.801131300253 }, { "content": "[![MIT License][license-shield]][license-url]\n\n\n\n<br />\n\n<p align=\"center\">\n\n <h3 align=\"center\">Sudoku Web Solver & Analyzer</h3>\n\n <p align=\"center\">\n\n\tA sudoku solver and analyzer, written in Rust that runs in your browser using webassembly.\t\t \n\n </p>\n\n</p>\n\n\n\n<details open=\"open\">\n\n <ol>\n\n <li>\n\n <a href=\"#about-the-project\">About The Project</a>\n\n </li>\n\n </ol>\n\n</details>\n\n\n\n## About The Project\n\n\n\n[![Product Name Screen Shot][product-screenshot]](https://pepijnd.github.io/sudoku_web_solver/)\n\n\n\n[license-shield]: https://shields.io/badge/license-MIT-blue.svg\n\n[license-url]: https://github.com/pepijnd/sudoku_web_solver/LICENSE\n\n[product-screenshot]: application.png\n", "file_path": "README.md", "rank": 80, "score": 19451.56081684694 }, { "content": "const { merge } = require('webpack-merge');\n\nconst common = require('./webpack.common.js');\n\nconst path = require(\"path\");\n\n\n\nconst dist = path.resolve(__dirname, 'dist');\n\n\n\nmodule.exports = merge(common, {\n\n mode: 'development',\n\n devtool: 'inline-source-map',\n\n devServer: {\n\n contentBase: dist\n\n }\n", "file_path": "webpack.dev.js", "rank": 81, "score": 18857.692423478642 }, { "content": "onmessage = function (e) {\n\n if (e.data[0] == \"init\") {\n\n import(\"../pkg_solver\").then((lib) => {\n\n onmessage = function (e) {\n\n performance.mark(\"perf_start\");\n\n let solve = lib.solve(e.data[0]);\n\n performance.mark(\"perf_stop\");\n\n performance.measure(\"perf_measure\", \"perf_start\", \"perf_stop\");\n\n let entries = performance.getEntriesByName(\"perf_measure\");\n\n let measure = entries[entries.length - 1];\n\n postMessage([\n\n \"solved\",\n\n solve,\n\n {\n\n name: measure.name,\n\n startTime: measure.startTime,\n\n duration: measure.duration,\n\n },\n\n ]);\n\n };\n\n postMessage([\"init\"]);\n\n });\n\n }\n\n};\n", "file_path": "js/solver.js", "rank": 82, "score": 18857.692423478642 }, { "content": "const { merge } = require('webpack-merge');\n\nconst common = require('./webpack.common.js');\n\n\n\nmodule.exports = merge(common, {\n\n mode: 'production',\n", "file_path": "webpack.prod.js", "rank": 83, "score": 18857.692423478642 }, { "content": "const path = require(\"path\");\n\nconst WasmPackPlugin = require(\"@wasm-tool/wasm-pack-plugin\");\n\nconst MiniCssExtractPlugin = require('mini-css-extract-plugin');\n\nconst HtmlWebpackPlugin = require('html-webpack-plugin')\n\nconst { CleanWebpackPlugin } = require('clean-webpack-plugin');\n\n\n\nconst dist = path.resolve(__dirname, \"dist\");\n\n\n\nmodule.exports = {\n\n entry: {\n\n index: \"./js/index.js\",\n\n },\n\n output: {\n\n path: dist,\n\n filename: \"[contenthash].js\"\n\n },\n\n module: {\n\n rules: [\n\n {\n\n test: /\\.(sa|sc|c)ss$/,\n\n use: [\n\n MiniCssExtractPlugin.loader,\n\n {\n\n loader: 'css-loader'\n\n },\n\n {\n\n loader: 'postcss-loader',\n\n options: {\n\n postcssOptions: {\n\n plugins: [\n\n \"autoprefixer\",\n\n ]\n\n }\n\n }\n\n },\n\n {\n\n loader: \"sass-loader\"\n\n }\n\n ]\n\n }\n\n ]\n\n },\n\n plugins: [\n\n new CleanWebpackPlugin(),\n\n new HtmlWebpackPlugin({\n\n title: \"Sudoku Solver and Analyzer\",\n\n meta: {\n\n 'viewport': 'width=device-width, initial-scale=1, shrink-to-fit=no',\n\n 'color-scheme': 'dark light'\n\n }\n\n }),\n\n new MiniCssExtractPlugin({\n\n filename: '[contenthash].css',\n\n chunkFilename: '[chunkhash].css',\n\n }),\n\n new WasmPackPlugin({\n\n crateDirectory: path.resolve(__dirname, \"websolver\"),\n\n outDir: path.resolve(__dirname, \"pkg\"),\n\n extraArgs: \"--no-typescript\",\n\n watchDirectories: [\n\n path.resolve(__dirname, \"websolver/ui\"),\n\n path.resolve(__dirname, \"solver\"),\n\n ],\n\n }),\n\n new WasmPackPlugin({\n\n crateDirectory: path.resolve(__dirname, \"websolver\"),\n\n outDir: path.resolve(__dirname, \"pkg_solver\"),\n\n outName: \"worker\",\n\n extraArgs: \"--no-typescript -- --no-default-features --features=worker\",\n\n watchDirectories: [\n\n path.resolve(__dirname, \"solver\"),\n\n ],\n\n }),\n\n ],\n\n experiments: {\n\n asyncWebAssembly: true\n\n }\n", "file_path": "webpack.common.js", "rank": 84, "score": 18857.692423478642 }, { "content": "use std::{cell::RefCell, rc::Rc};\n\n\n\nuse webelements::Result;\n\n\n\nuse crate::{\n\n ui::{view::info::Info, SudokuInfo},\n\n util::InitCell,\n\n};\n\n\n\nuse super::app::AppController;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct InfoController {\n\n element: Info,\n\n app: InitCell<AppController>,\n\n pub info: Rc<RefCell<SudokuInfo>>,\n\n}\n\n\n\nimpl InfoController {\n\n pub fn update(&self) -> Result<()> {\n", "file_path": "websolver/src/ui/controller/info.rs", "rank": 85, "score": 26.254106003529618 }, { "content": "use std::convert::TryInto;\n\n\n\nuse crate::{\n\n ui::editor::{Editor, EditorAction},\n\n util::InitCell,\n\n};\n\n\n\nuse webelements::Result;\n\n\n\nuse crate::ui::app::AppController;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct EditorController {\n\n element: Editor,\n\n pub app: InitCell<AppController>,\n\n}\n\n\n\nimpl EditorController {\n\n pub fn update(&self) -> Result<()> {\n\n self.element.update(self)?;\n", "file_path": "websolver/src/ui/controller/editor.rs", "rank": 86, "score": 24.803600077971048 }, { "content": "use solver::{output::SolveStep, solvers::Solver, Options, Solve, StateMod, Sudoku};\n\n\n\nuse webelements::Result;\n\n\n\nuse crate::util::Measure;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum Stat {\n\n Tech,\n\n Steps,\n\n Guesses,\n\n GSteps,\n\n GTotal,\n\n None,\n\n}\n\n\n\nimpl Default for Stat {\n\n fn default() -> Self {\n\n Self::None\n\n }\n\n}\n\n\n", "file_path": "websolver/src/ui/model/info.rs", "rank": 87, "score": 23.000279679073255 }, { "content": " self.element.update(self)?;\n\n Ok(())\n\n }\n\n pub fn build(app: InitCell<AppController>, info: &Info) -> Result<Self> {\n\n Ok(Self {\n\n app,\n\n element: info.clone(),\n\n info: Rc::new(RefCell::new(SudokuInfo::default())),\n\n })\n\n }\n\n}\n", "file_path": "websolver/src/ui/controller/info.rs", "rank": 88, "score": 22.418301581827407 }, { "content": "use webelements::{we_builder, Result, WebElement, WebElementBuilder};\n\n\n\nuse crate::{\n\n ui::{\n\n controller::app::AppController,\n\n editor::{EditorAction, EditorController},\n\n ButtonElement,\n\n },\n\n util::InitCell,\n\n};\n\n\n\n#[we_builder(\n\n <div>\n\n <NumberBar we_field=\"numbers\" we_element />\n\n <OptionBar we_field=\"options\" we_element />\n\n <StepInput we_field=\"steps\" we_element />\n\n </div>\n\n)]\n\n#[derive(Debug, Clone, WebElement)]\n\npub struct Editor {}\n", "file_path": "websolver/src/ui/view/editor.rs", "rank": 89, "score": 22.26091971146115 }, { "content": "\n\nimpl Editor {\n\n pub fn connect(&self, editor: InitCell<EditorController>) -> Result<()> {\n\n self.numbers.connect(InitCell::clone(&editor))?;\n\n self.options.connect(InitCell::clone(&editor))?;\n\n self.steps.connect(InitCell::clone(&editor))?;\n\n Ok(())\n\n }\n\n\n\n pub fn controller(&self, app: InitCell<AppController>) -> Result<EditorController> {\n\n EditorController::build(app, self)\n\n }\n\n\n\n pub fn update(&self, editor: &EditorController) -> Result<()> {\n\n self.numbers.update(editor);\n\n self.options.update(editor);\n\n self.steps.update(editor)?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "websolver/src/ui/view/editor.rs", "rank": 90, "score": 22.066004768947295 }, { "content": "#![cfg(feature = \"webui\")]\n\nuse wasm_bindgen::prelude::*;\n\n\n\nuse crate::util::{InitCell, Measure};\n\n\n\nuse super::{controller::app::AppController, view::app::AppElement};\n\nuse solver::{Solve, Sudoku};\n\nuse webelements::{document, WebElementBuilder};\n\n\n\n#[wasm_bindgen]\n\n#[derive(Debug)]\n\npub struct App {\n\n controller: InitCell<AppController>,\n\n element: AppElement,\n\n}\n\n\n\n#[wasm_bindgen]\n\nimpl App {\n\n #[wasm_bindgen(constructor)]\n\n pub fn new() -> Result<App, JsValue> {\n", "file_path": "websolver/src/ui/build.rs", "rank": 91, "score": 21.234913257251538 }, { "content": "use crate::{Cell, CellMod, CellOptions, EntrySolver, State, StateMod};\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct StateInit;\n\n\n\nimpl EntrySolver for StateInit {\n\n fn advance(&mut self, state: &mut State) -> bool {\n\n state.info.push_state();\n\n true\n\n }\n\n}\n\n\n\nimpl Default for StateInit {\n\n fn default() -> Self {\n\n Self\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct StateSolved;\n", "file_path": "solver/src/solvers/base.rs", "rank": 92, "score": 19.23460896586218 }, { "content": "#[derive(Debug, Clone)]\n\npub struct OptionBar {}\n\n\n\nimpl WebElement for OptionBar {\n\n fn init(&mut self) -> Result<()> {\n\n self.solve.action = EditorAction::Solve;\n\n self.erase.action = EditorAction::Erase;\n\n self.clear.action = EditorAction::Clear;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl OptionBar {\n\n pub fn connect(&self, editor: InitCell<EditorController>) -> Result<()> {\n\n self.solve.connect(InitCell::clone(&editor))?;\n\n self.erase.connect(InitCell::clone(&editor))?;\n\n self.clear.connect(InitCell::clone(&editor))?;\n\n Ok(())\n\n }\n\n\n", "file_path": "websolver/src/ui/view/editor.rs", "rank": 93, "score": 19.21395336457945 }, { "content": "use crate::{\n\n options::OptionPair, util::SetDomain, Cell, CellMod, CellOptions, EntrySolver, State, StateMod,\n\n};\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub struct XYWingSolver;\n\n\n\nimpl EntrySolver for XYWingSolver {\n\n fn advance(&mut self, state: &mut State) -> bool {\n\n for row in 0..9 {\n\n for col in 0..9 {\n\n let cell = Cell::new(row, col);\n\n let c_opts = state.options.options(cell, &state.sudoku);\n\n if *state.sudoku.cell(cell) != 0 {\n\n continue;\n\n }\n\n if let Some(c_pair) = c_opts.as_pair() {\n\n Self::test_cell(cell, c_opts, c_pair, state);\n\n }\n\n }\n", "file_path": "solver/src/solvers/xywing.rs", "rank": 94, "score": 18.976329015212908 }, { "content": "#[derive(Debug, Clone)]\n\npub struct State {\n\n pub sudoku: Sudoku,\n\n pub options: Options,\n\n pub info: Info,\n\n pub config: Rc<Config>,\n\n}\n\n\n\nimpl State {\n\n pub fn update(&mut self, cell: Cell, value: u8) {\n\n self.sudoku.set_cell(cell, value);\n\n }\n\n\n\n pub fn remove(&mut self, cell: Cell, value: u8) -> bool {\n\n self.options.remove(cell, value)\n\n }\n\n\n\n pub fn merge_info(&mut self, other: &Self) {\n\n self.info.merge(&other.info);\n\n }\n", "file_path": "solver/src/lib.rs", "rank": 95, "score": 18.709329404612074 }, { "content": " Self {\n\n cell: None,\n\n options: CellOptions::all(),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::{Cell, CellOptions, EntrySolver, State, Sudoku};\n\n\n\n use super::Backtrace;\n\n\n\n static SAMPLE: &str =\n\n \"...6..8....35.4...65..217...6..............5..7138..2...7.1.6.4.1.......9....3..7\";\n\n\n\n #[test]\n\n fn backtrace_test() {\n\n let sudoku = Sudoku::from(SAMPLE);\n\n let mut state = State {\n", "file_path": "solver/src/solvers/base.rs", "rank": 96, "score": 17.996488754831297 }, { "content": "use crate::{\n\n ui::{\n\n controller::{app::AppController, info::InfoController},\n\n model::info::Stat,\n\n },\n\n util::InitCell,\n\n};\n\n\n\nuse webelements::{we_builder, Result, WebElement};\n\n\n\n#[we_builder(\n\n <div class=\"solve-info\">\n\n <InfoStat we_field=\"tech\" we_element />\n\n <InfoStat we_field=\"steps\" we_element />\n\n <InfoStat we_field=\"guesses\" we_element />\n\n <InfoStat we_field=\"g_steps\" we_element />\n\n <InfoStat we_field=\"g_total\" we_element />\n\n </div>\n\n)]\n\n#[derive(Debug, Clone)]\n", "file_path": "websolver/src/ui/view/info.rs", "rank": 97, "score": 17.395031025780554 }, { "content": "\n\n#[we_builder(\n\n <div class=\"btn-panel sudoku-numbers\">\n\n <EditorButton we_field=\"buttons\" we_repeat=\"10\" we_element />\n\n </div>\n\n)]\n\n#[derive(Debug, Clone)]\n\npub struct NumberBar {}\n\n\n\nimpl WebElement for NumberBar {\n\n fn init(&mut self) -> Result<()> {\n\n for (n, btn) in self.buttons.iter_mut().enumerate() {\n\n btn.action = EditorAction::SetValue(n as u8);\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl NumberBar {\n\n fn connect(&self, editor: InitCell<EditorController>) -> Result<()> {\n", "file_path": "websolver/src/ui/view/editor.rs", "rank": 98, "score": 17.364261645231892 }, { "content": " Ok(())\n\n }\n\n\n\n pub fn build(app: InitCell<AppController>, element: &Editor) -> Result<Self> {\n\n element.connect(InitCell::clone(&app.editor))?;\n\n\n\n element.steps.slider.slider.on_input(Box::new({\n\n let app = InitCell::clone(&app);\n\n let input = element.steps.slider.slider.clone();\n\n move |_event| {\n\n {\n\n let mut info = app.info.info.borrow_mut();\n\n if let Ok(value) = input.get_value::<i32>() {\n\n info.set_step(value.try_into().unwrap()).unwrap();\n\n }\n\n }\n\n app.update().unwrap();\n\n }\n\n }))?;\n\n\n", "file_path": "websolver/src/ui/controller/editor.rs", "rank": 99, "score": 17.316901463800253 } ]
Rust
firm-construction/src/runtime.rs
comprakt/comprakt
2315e85972e63ea327c4d115ffe623253b520440
use libfirm_rs::{types::*, Entity}; use strum_macros::EnumDiscriminants; use crate::type_checking::type_system; #[strum_discriminants(derive(Display))] #[derive(EnumDiscriminants)] pub enum RuntimeFunction { SystemOutPrintln, SystemOutWrite, SystemOutFlush, SystemInRead, New, Dumpstack, NullUsage, ArrayOutOfBounds, DivByZero, } pub trait RTLib { fn ld_name(&self, builtin: RuntimeFunction) -> &'static str; fn mj_main_name(&self) -> &'static str; } impl From<type_system::BuiltinMethodBody> for RuntimeFunction { fn from(mb: type_system::BuiltinMethodBody) -> Self { use self::type_system::BuiltinMethodBody; match mb { BuiltinMethodBody::SystemOutPrintln => RuntimeFunction::SystemOutPrintln, BuiltinMethodBody::SystemOutWrite => RuntimeFunction::SystemOutWrite, BuiltinMethodBody::SystemOutFlush => RuntimeFunction::SystemOutFlush, BuiltinMethodBody::SystemInRead => RuntimeFunction::SystemInRead, } } } pub struct Mjrt; impl RTLib for Mjrt { fn ld_name(&self, rtf: RuntimeFunction) -> &'static str { match rtf { RuntimeFunction::SystemOutPrintln => "mjrt_system_out_println", RuntimeFunction::SystemOutWrite => "mjrt_system_out_write", RuntimeFunction::SystemOutFlush => "mjrt_system_out_flush", RuntimeFunction::SystemInRead => "mjrt_system_in_read", RuntimeFunction::Dumpstack => "mjrt_dumpstack", RuntimeFunction::DivByZero => "mjrt_div_by_zero", RuntimeFunction::NullUsage => "mjrt_null_usage", RuntimeFunction::ArrayOutOfBounds => "mjrt_array_out_of_bounds", RuntimeFunction::New => "mjrt_new", } } fn mj_main_name(&self) -> &'static str { "mj_main" } } pub struct Runtime { pub lib: Box<dyn RTLib>, pub system_out_println: Entity, pub system_out_write: Entity, pub system_out_flush: Entity, pub system_in_read: Entity, pub new: Entity, pub dumpstack: Entity, pub null_usage: Entity, pub array_out_of_bounds: Entity, pub div_by_zero: Entity, } impl Runtime { pub fn new(lib: Box<dyn RTLib>) -> Self { let dumpstack = { let t = MethodTyBuilder::new().build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::Dumpstack), t.into()) }; let system_out_println = { let it = PrimitiveTy::i32(); let mut t = MethodTyBuilder::new(); t.add_param(it.into()); let t = t.build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::SystemOutPrintln), t.into()) }; let system_out_write = { let it = PrimitiveTy::i32(); let mut t = MethodTyBuilder::new(); t.add_param(it.into()); let t = t.build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::SystemOutWrite), t.into()) }; let system_out_flush = { let t = MethodTyBuilder::new().build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::SystemOutFlush), t.into()) }; let system_in_read = { let it = PrimitiveTy::i32(); let mut t = MethodTyBuilder::new(); t.set_res(it.into()); let t = t.build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::SystemInRead), t.into()) }; let new = { let loc = PrimitiveTy::ptr(); let size = PrimitiveTy::i64(); let mut t = MethodTyBuilder::new(); t.add_param(size.into()); t.set_res(loc.into()); let t = t.build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::New), t.into()) }; let div_by_zero = { let t = MethodTyBuilder::new().build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::DivByZero), t.into()) }; let null_usage = { let t = MethodTyBuilder::new().build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::NullUsage), t.into()) }; let array_out_of_bounds = { let t = MethodTyBuilder::new().build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::ArrayOutOfBounds), t.into()) }; Self { lib, system_out_println, system_out_write, system_out_flush, system_in_read, new, dumpstack, div_by_zero, null_usage, array_out_of_bounds, } } }
use libfirm_rs::{types::*, Entity}; use strum_macros::EnumDiscriminants; use crate::type_checking::type_system; #[strum_discriminants(derive(Display))] #[derive(EnumDiscriminants)] pub enum RuntimeFunction { SystemOutPrintln, SystemOutWrite, SystemOutFlush, SystemInRead, New, Dumpstack, NullUsage, ArrayOutOfBounds, DivByZero, } pub trait RTLib { fn ld_name(&self, builtin: RuntimeFunction) -> &'static str; fn mj_main_name(&self) -> &'static str; } impl From<type_system::BuiltinMethodBody> for RuntimeFunction { fn from(mb: type_system::BuiltinMethodBody) -> Self { use self::type_system::BuiltinMethodBody; match mb { BuiltinMethodBody::SystemOutPrintln => RuntimeFunction::SystemOutPrintln, BuiltinMethodBody::SystemOutWrite => RuntimeFunction::SystemOutWrite, BuiltinMethodBody::SystemOutFlush => RuntimeFunction::SystemOutFlush, BuiltinMethodBody::SystemInRead => RuntimeFunction::SystemInRead, } } } pub struct Mjrt; impl RTLib for Mjrt {
fn mj_main_name(&self) -> &'static str { "mj_main" } } pub struct Runtime { pub lib: Box<dyn RTLib>, pub system_out_println: Entity, pub system_out_write: Entity, pub system_out_flush: Entity, pub system_in_read: Entity, pub new: Entity, pub dumpstack: Entity, pub null_usage: Entity, pub array_out_of_bounds: Entity, pub div_by_zero: Entity, } impl Runtime { pub fn new(lib: Box<dyn RTLib>) -> Self { let dumpstack = { let t = MethodTyBuilder::new().build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::Dumpstack), t.into()) }; let system_out_println = { let it = PrimitiveTy::i32(); let mut t = MethodTyBuilder::new(); t.add_param(it.into()); let t = t.build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::SystemOutPrintln), t.into()) }; let system_out_write = { let it = PrimitiveTy::i32(); let mut t = MethodTyBuilder::new(); t.add_param(it.into()); let t = t.build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::SystemOutWrite), t.into()) }; let system_out_flush = { let t = MethodTyBuilder::new().build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::SystemOutFlush), t.into()) }; let system_in_read = { let it = PrimitiveTy::i32(); let mut t = MethodTyBuilder::new(); t.set_res(it.into()); let t = t.build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::SystemInRead), t.into()) }; let new = { let loc = PrimitiveTy::ptr(); let size = PrimitiveTy::i64(); let mut t = MethodTyBuilder::new(); t.add_param(size.into()); t.set_res(loc.into()); let t = t.build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::New), t.into()) }; let div_by_zero = { let t = MethodTyBuilder::new().build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::DivByZero), t.into()) }; let null_usage = { let t = MethodTyBuilder::new().build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::NullUsage), t.into()) }; let array_out_of_bounds = { let t = MethodTyBuilder::new().build_no_this_call(); Entity::new_global(lib.ld_name(RuntimeFunction::ArrayOutOfBounds), t.into()) }; Self { lib, system_out_println, system_out_write, system_out_flush, system_in_read, new, dumpstack, div_by_zero, null_usage, array_out_of_bounds, } } }
fn ld_name(&self, rtf: RuntimeFunction) -> &'static str { match rtf { RuntimeFunction::SystemOutPrintln => "mjrt_system_out_println", RuntimeFunction::SystemOutWrite => "mjrt_system_out_write", RuntimeFunction::SystemOutFlush => "mjrt_system_out_flush", RuntimeFunction::SystemInRead => "mjrt_system_in_read", RuntimeFunction::Dumpstack => "mjrt_dumpstack", RuntimeFunction::DivByZero => "mjrt_div_by_zero", RuntimeFunction::NullUsage => "mjrt_null_usage", RuntimeFunction::ArrayOutOfBounds => "mjrt_array_out_of_bounds", RuntimeFunction::New => "mjrt_new", } }
function_block-full_function
[ { "content": "pub fn dot_string(string: &str) -> String {\n\n format!(\"\\\"{}\\\"\", string.replace(\"\\\"\", \"\\\\\\\"\").replace(\"\\n\", \"\\\\n\"))\n\n}\n", "file_path": "debugging/src/dot/mod.rs", "rank": 0, "score": 206150.33104397444 }, { "content": "// dummy_writer returns a WriteColor meant for use in tests.\n\npub fn dummy_writer() -> impl termcolor::WriteColor {\n\n use termcolor::Buffer;\n\n // FIXME: actually have something that discards the output\n\n Buffer::no_color()\n\n}\n", "file_path": "compiler-shared/src/context.rs", "rank": 1, "score": 203442.89791523974 }, { "content": "// TODO: deduplicate after 172 is merged\n\npub fn escape_record_content(text: &str) -> String {\n\n text.replace(\"|\", \"\\\\|\")\n\n .replace(\"{\", \"\\\\{\")\n\n .replace(\"}\", \"\\\\}\")\n\n .replace(\"<\", \"\\\\<\")\n\n .replace(\">\", \"\\\\>\")\n\n}\n", "file_path": "debugging/src/dot/implementations.rs", "rank": 2, "score": 203205.33674117515 }, { "content": "// TODO: deduplicate after 172 is merged\n\npub fn escape_record_content(text: &str) -> String {\n\n text.replace(\"|\", \"\\\\|\")\n\n .replace(\"{\", \"\\\\{\")\n\n .replace(\"}\", \"\\\\}\")\n\n .replace(\"<\", \"\\\\<\")\n\n .replace(\">\", \"\\\\>\")\n\n}\n\n\n", "file_path": "optimization/src/code_placement.rs", "rank": 3, "score": 203205.33674117515 }, { "content": "pub fn pad_left(s: &str, pad: usize) -> String {\n\n pad_left_with_char(s, pad, ' ')\n\n}\n\n\n", "file_path": "diagnostics/src/diagnostics.rs", "rank": 4, "score": 196986.66376846365 }, { "content": "/// Build and get a binary project of a workspace crate\n\n///\n\n/// Passing `None` will return the main binary of the method invoking\n\n/// crate. **Not** the main binary of the virtual workspace.\n\nfn project_binary(subproject: Option<&'static str>) -> PathBuf {\n\n let mut cache = BIN_PATH_CACHE.lock().unwrap();\n\n\n\n if let Some(path) = cache.get(&subproject) {\n\n return path.clone();\n\n }\n\n\n\n let mut cmd = Command::new(env::var(\"CARGO\").unwrap());\n\n\n\n cmd.arg(\"build\").arg(\"--message-format=json\");\n\n\n\n if !cfg!(debug_assertions) {\n\n cmd.arg(\"--release\");\n\n }\n\n\n\n if let Some(workspace_crate) = subproject {\n\n cmd.arg(\"-p\");\n\n cmd.arg(workspace_crate);\n\n }\n\n\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 5, "score": 194209.51841462762 }, { "content": "pub fn lir_box(block: &BasicBlock, body: &str) -> Label {\n\n Label::from_text(format!(\n\n r#\"{{|<header> Block {block_id}|<code>{code}|}}\"#,\n\n block_id = block.firm.node_id(),\n\n code = escape_record_content(body),\n\n ))\n\n .shape(Shape::Record)\n\n .styles(vec![Style::Rounded, Style::Filled])\n\n}\n\n\n\nimpl GraphData<lir::BasicBlock> for lir::LIR {\n\n fn graph_data<T>(&self, label_maker: &T) -> HashMap<String, GraphState>\n\n where\n\n Self: Sized,\n\n T: LabelMaker<lir::BasicBlock>,\n\n {\n\n let mut dot_files = HashMap::new();\n\n\n\n for function in &self.functions {\n\n let name = function.name.to_string();\n", "file_path": "lowering/src/lir/debugging.rs", "rank": 7, "score": 188550.92447349796 }, { "content": "/// append another extension to a filename\n\npub fn add_extension(path: &PathBuf, extension: &str) -> PathBuf {\n\n let mut filepath = path.clone();\n\n\n\n let original_extension = filepath\n\n .extension()\n\n .unwrap_or_else(|| OsStr::new(\"\"))\n\n .to_os_string();\n\n\n\n filepath.set_extension({\n\n let mut ext = original_extension.clone();\n\n ext.push(OsStr::new(\".\"));\n\n ext.push(OsStr::new(extension));\n\n ext\n\n });\n\n\n\n filepath\n\n}\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 8, "score": 183534.96637678504 }, { "content": "pub fn pad_left_with_char(s: &str, pad: usize, chr: char) -> String {\n\n format!(\n\n \"{padding}{string}\",\n\n padding = chr\n\n .to_string()\n\n .repeat(pad.checked_sub(s.len()).unwrap_or(0)),\n\n string = s\n\n )\n\n}\n\n\n", "file_path": "diagnostics/src/diagnostics.rs", "rank": 9, "score": 181235.60309862188 }, { "content": "pub fn write(file: &Option<PathBuf>, contents: &str) -> Result<(), Error> {\n\n if let Some(path) = file {\n\n let mut file = File::create(path)?;\n\n file.write_all(contents.as_bytes())?;\n\n } else {\n\n stdout().write_all(contents.as_bytes())?;\n\n };\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 10, "score": 176219.645001909 }, { "content": "/// A trait to abstract from Node enum and various *-Node structs.\n\n/// Inspired by <https://github.com/libfirm/jFirm/blob/master/src/firm/nodes/Node.java>.\n\npub trait NodeTrait {\n\n fn internal_ir_node(&self) -> *mut bindings::ir_node;\n\n\n\n fn as_node(&self) -> Node {\n\n Node::wrap(self.internal_ir_node())\n\n }\n\n\n\n // TODO move to graph\n\n fn keep_alive(&self) {\n\n unsafe { bindings::keep_alive(self.internal_ir_node()) }\n\n }\n\n\n\n fn mode(&self) -> Mode {\n\n Mode::from_libfirm(unsafe { bindings::get_irn_mode(self.internal_ir_node()) })\n\n }\n\n\n\n fn block(&self) -> Block {\n\n if Node::is_block(self.as_node()) {\n\n return Block::new(self.internal_ir_node());\n\n }\n", "file_path": "libfirm-rs/src/nodes/nodes_ext.rs", "rank": 11, "score": 165686.12512277527 }, { "content": "/// An optimization that only works on a single graph and therefore does not\n\n/// optimize across function call boundaries.\n\npub trait Local {\n\n fn optimize_function(graph: Graph) -> Outcome;\n\n}\n\n\n\nimpl<T> Interprocedural for T\n\nwhere\n\n T: Local,\n\n{\n\n fn optimize(program: &mut FirmProgram<'_, '_>) -> Outcome {\n\n let mut collector = OutcomeCollector::new();\n\n for method in program.methods.values() {\n\n if let Some(graph) = method.borrow().graph {\n\n collector.push(Self::optimize_function(graph));\n\n }\n\n }\n\n collector.result()\n\n }\n\n}\n\n\n\n/// All available optimizations\n", "file_path": "optimization/src/lib.rs", "rank": 12, "score": 163000.05644899668 }, { "content": "pub trait Relational {\n\n fn relation(&self, other: &Self) -> Relation;\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Eq, PartialOrd, Ord)]\n\npub struct Symbol<'f>(&'f str);\n\n\n\nimpl Symbol<'_> {\n\n fn as_raw(&self) -> *const str {\n\n self.0 as *const str\n\n }\n\n\n\n pub fn as_str(&self) -> &str {\n\n self.0\n\n }\n\n}\n\n\n\nimpl<'f> Relational for Symbol<'f> {\n\n fn relation(&self, other: &Symbol<'f>) -> Relation {\n\n let distance = levenshtein::levenshtein(self.0, other.0);\n", "file_path": "strtab/src/strtab.rs", "rank": 13, "score": 163000.05644899668 }, { "content": "/// An optimization that optimizes the whole program by examining all function\n\n/// graphs at once.\n\npub trait Interprocedural {\n\n fn optimize(program: &mut FirmProgram<'_, '_>) -> Outcome;\n\n}\n\n\n", "file_path": "optimization/src/lib.rs", "rank": 14, "score": 163000.05644899668 }, { "content": "pub trait TyTrait: Sized {\n\n fn ir_type(self) -> *mut bindings::ir_type;\n\n\n\n fn pointer(self) -> PointerTy {\n\n PointerTy::from(Ty::from_ir_type(unsafe {\n\n bindings::new_type_pointer(self.ir_type())\n\n }))\n\n .expect(\"must return pointer type\")\n\n }\n\n\n\n fn array(self) -> ArrayTy {\n\n ArrayTy::from(Ty::from_ir_type(unsafe {\n\n bindings::new_type_array(self.ir_type(), 0)\n\n }))\n\n .expect(\"must return array type\")\n\n }\n\n\n\n fn size(self) -> u32 {\n\n unsafe { bindings::get_type_size(self.ir_type()) }\n\n }\n", "file_path": "libfirm-rs/src/types.rs", "rank": 15, "score": 162695.44722675334 }, { "content": "pub trait VisitResult {\n\n fn stop_visit(&self) -> bool;\n\n}\n\n\n\nimpl VisitResult for () {\n\n fn stop_visit(&self) -> bool {\n\n false\n\n }\n\n}\n\n\n\nimpl<X, E> VisitResult for Result<X, E> {\n\n fn stop_visit(&self) -> bool {\n\n self.is_err()\n\n }\n\n}\n\n\n\nimpl<'a, 't> NodeKind<'a, 't> {\n\n /// Visit the children of `self`, invoking `cb` on each.\n\n /// If `self` has no children, this method returns `None, otherwise\n\n /// `Some(res)` where `res` is the return value of `cb`.\n", "file_path": "parser/src/visitor.rs", "rank": 16, "score": 160461.12764344996 }, { "content": "pub trait Named {\n\n fn name(&self) -> String;\n\n}\n\n\n\npub struct NamedDot<'dot, TDot, TNode>\n\nwhere\n\n TDot: Dot<TNode>,\n\n{\n\n dot: &'dot TDot,\n\n name: String,\n\n node_marker: std::marker::PhantomData<TNode>,\n\n}\n\n\n\nimpl<'dot, TDot, TNode> Named for NamedDot<'dot, TDot, TNode>\n\nwhere\n\n TDot: Dot<TNode>,\n\n{\n\n fn name(&self) -> String {\n\n self.name.clone()\n\n }\n", "file_path": "debugging/src/dot/mod.rs", "rank": 17, "score": 160461.12764344996 }, { "content": "pub trait AsmBackend {\n\n fn emit_asm(&mut self, out: &mut dyn AsmOut) -> std::io::Result<()>;\n\n}\n\n\n\npub mod amd64 {\n\n\n\n use crate::firm_context::FirmContext;\n\n use lowering;\n\n\n\n pub struct Backend<'src, 'ast> {\n\n // member lir holds raw pointers to data stored in firm_ctx\n\n pub firm_ctx: FirmContext<'src, 'ast>,\n\n pub no_peep: bool,\n\n }\n\n\n\n use super::{AsmBackend, AsmOut};\n\n\n\n impl AsmBackend for Backend<'_, '_> {\n\n fn emit_asm(&mut self, out: &mut dyn AsmOut) -> std::io::Result<()> {\n\n compiler_shared::timed_scope!(\"backend\");\n\n let firm_program = self.firm_ctx.use_external_backend();\n\n lowering::run_backend(firm_program, &mut box out, self.no_peep)\n\n }\n\n }\n\n\n\n}\n", "file_path": "compiler-lib/src/backend.rs", "rank": 18, "score": 158054.92342281085 }, { "content": "#[allow(clippy::similar_names)]\n\npub fn run_backend(\n\n firm_program: &FirmProgram<'_, '_>,\n\n out: &mut impl std::io::Write,\n\n no_peep: bool,\n\n) -> std::io::Result<()> {\n\n let mut lir = LIR::from(firm_program);\n\n debugging::breakpoint!(\"LIR stage 1\", lir, &|block: &lir::BasicBlock| {\n\n lir::debugging::default_lir_label(block)\n\n });\n\n\n\n writeln!(out, \"\\t.text\")?;\n\n\n\n // TODO predictable order\n\n for f in &mut lir.functions {\n\n basic_block_scheduling::basic_block_scheduling(f);\n\n let lva_result = live_variable_analysis::live_variable_analysis(\n\n &f.graph.blocks_scheduled.as_ref().unwrap(),\n\n &lir.allocator,\n\n );\n\n\n", "file_path": "lowering/src/lib.rs", "rank": 19, "score": 157628.71074485005 }, { "content": "pub fn init() {\n\n INIT.call_once(|| unsafe {\n\n bindings::ir_init_library();\n\n });\n\n}\n", "file_path": "libfirm-rs/src/lib.rs", "rank": 20, "score": 157628.71074485005 }, { "content": "pub fn print() {\n\n if std::env::var(\"MEASURE_STDERR\").is_ok() {\n\n eprintln!(\"Performance Analysis\");\n\n eprintln!(\"====================\\n\");\n\n\n\n if cfg!(feature = \"debugger_gui\") {\n\n eprintln!(\"Measurements not available with enabled breakpoints\");\n\n } else {\n\n eprintln!(\"{}\", TIMINGS.lock().unwrap());\n\n }\n\n }\n\n\n\n if let Ok(path) = std::env::var(\"MEASURE_JSON\") {\n\n let file = File::create(path).unwrap();\n\n serde_json::to_writer(\n\n file,\n\n &CompilerMeasurements::from(TIMINGS.lock().unwrap().clone()),\n\n )\n\n .unwrap();\n\n }\n", "file_path": "compiler-shared/src/timing.rs", "rank": 21, "score": 157628.71074485005 }, { "content": "pub trait ValueNode: NodeTrait {\n\n fn value_nodes(&self) -> Vec<Box<dyn ValueNode>>;\n\n fn compute(&self, values: Vec<Tarval>) -> Tarval;\n\n}\n\n\n\nimpl From<Box<dyn ValueNode>> for Node {\n\n fn from(n: Box<dyn ValueNode>) -> Self {\n\n Self::wrap(n.internal_ir_node())\n\n }\n\n}\n\n\n\nimpl From<&Box<dyn ValueNode>> for Node {\n\n fn from(n: &Box<dyn ValueNode>) -> Self {\n\n Self::wrap(n.internal_ir_node())\n\n }\n\n}\n\n\n\nimpl From<&dyn ValueNode> for Node {\n\n fn from(n: &dyn ValueNode) -> Self {\n\n Self::wrap(n.internal_ir_node())\n", "file_path": "libfirm-rs/src/nodes/value_nodes.rs", "rank": 22, "score": 157022.90867685195 }, { "content": "pub trait LintContext {\n\n fn struct_lint(&self, lint: &'static Lint, span: Span<'_>, msg: &str);\n\n}\n\n\n\npub struct EarlyContext<'a, 'f> {\n\n diagnostics: &'a Diagnostics,\n\n builder: LintLevelBuilder,\n\n locator: Locator<'f>,\n\n}\n\n\n\nimpl<'a> LintContext for EarlyContext<'a, '_> {\n\n fn struct_lint(&self, lint: &'static Lint, span: Span<'_>, msg: &str) {\n\n let lvl = self\n\n .builder\n\n .lint_level(LintId::of(lint))\n\n .unwrap_or(lint.level);\n\n if lvl == MessageLevel::Allow {\n\n return;\n\n }\n\n self.diagnostics\n", "file_path": "compiler-lib/src/linter/mod.rs", "rank": 23, "score": 155771.30145665852 }, { "content": "pub trait LintPass {\n\n fn get_lints(&self) -> LintArray;\n\n}\n\n\n", "file_path": "compiler-lib/src/linter/mod.rs", "rank": 24, "score": 155771.30145665852 }, { "content": "pub trait IntoReferenceData {\n\n fn into_reference_data(self, base: &PathBuf) -> ReferenceData;\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 25, "score": 155771.30145665852 }, { "content": "/// This abstraction allows us to call the diagnostics API with pretty\n\n/// much everything.\n\n///\n\n/// The following examples are all equivalent and will print a warning\n\n/// without a source code snippet below the message:\n\n///\n\n/// ```rust,ignore\n\n/// context.diagnostics.warning(&\"Something went wrong\");\n\n/// context\n\n/// .diagnostics\n\n/// .warning(&WithoutSpan(\"Something went wrong\"));\n\n/// ```\n\n///\n\n/// The following examples will print a message with a source code\n\n/// snippet. Note that all errors generated by the compiler are\n\n/// a `Spanned<_, Fail>` and can therefore be directly passed to\n\n/// the diagnostics API.\n\n///\n\n/// ```rust,ignore\n\n/// // `lexer_error` is the `Err` returned by `Lexer::next`\n\n/// context.diagnostics.error(&lexer_error);\n\n/// // `span` is some `asciifile::Span`\n\n/// context.diagnostics.error({\n\n/// span: span,\n\n/// data: \"something went wrong\"\n\n/// });\n\n/// ```\n\npub trait Printable<'a, 'b> {\n\n fn as_maybe_spanned(&'b self) -> MaybeSpanned<'a, &'b dyn Display>;\n\n}\n\n\n\n// TODO: implementing on `str` (which is what you would like to do, to\n\n// support calls with warning(\"aa\") instead of warning(&\"aa\").\n\nimpl<'a, 'b> Printable<'a, 'b> for &'b str {\n\n fn as_maybe_spanned(&'b self) -> MaybeSpanned<'a, &'b dyn Display> {\n\n MaybeSpanned::WithoutSpan(self)\n\n }\n\n}\n\n\n\nimpl<'a, 'b, T: Display + 'b> Printable<'a, 'b> for Spanned<'a, T> {\n\n fn as_maybe_spanned(&'b self) -> MaybeSpanned<'a, &'b dyn Display> {\n\n MaybeSpanned::WithSpan(Spanned {\n\n span: self.span,\n\n data: &self.data,\n\n })\n\n }\n\n}\n", "file_path": "diagnostics/src/diagnostics.rs", "rank": 26, "score": 153775.7553330957 }, { "content": "pub trait NodeDebug {\n\n fn fmt(&self, f: &mut fmt::Formatter, options: NodeDebugOpts) -> fmt::Result;\n\n\n\n fn debug_fmt(self) -> NodeDebugFmt<Self>\n\n where\n\n Self: Sized + Copy,\n\n {\n\n NodeDebugFmt(self, NodeDebugOpts::default())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct NodeDebugOpts {\n\n pub short: bool,\n\n pub new_print_class: bool,\n\n pub print_id: bool,\n\n}\n\n\n\nimpl NodeDebugOpts {\n\n pub fn default() -> Self {\n", "file_path": "libfirm-rs/src/nodes/nodes_ext.rs", "rank": 27, "score": 153601.12713256426 }, { "content": "pub trait UnaryOp {\n\n fn operand(&self) -> Box<dyn ValueNode>;\n\n fn compute(&self, val: Tarval) -> Tarval;\n\n}\n\n\n\nmacro_rules! unaryop_impl {\n\n ($node_ty: ident, $compute: expr) => {\n\n impl UnaryOp for $node_ty {\n\n fn operand(&self) -> Box<dyn ValueNode> {\n\n try_as_value_node(self.op()).unwrap()\n\n }\n\n fn compute(&self, val: Tarval) -> Tarval {\n\n $compute(self, val)\n\n }\n\n }\n\n\n\n impl ValueNode for $node_ty {\n\n fn value_nodes(&self) -> Vec<Box<dyn ValueNode>> {\n\n vec![self.operand()]\n\n }\n", "file_path": "libfirm-rs/src/nodes/value_nodes.rs", "rank": 28, "score": 153601.12713256426 }, { "content": "pub trait BinOp {\n\n fn left(&self) -> Box<dyn ValueNode>;\n\n fn right(&self) -> Box<dyn ValueNode>;\n\n fn compute(&self, left: Tarval, right: Tarval) -> Tarval;\n\n}\n\n\n\nmacro_rules! binop_impl {\n\n ($node_ty: ident, $compute: expr) => {\n\n impl BinOp for $node_ty {\n\n fn left(&self) -> Box<dyn ValueNode> {\n\n try_as_value_node($node_ty::left(*self)).unwrap()\n\n }\n\n fn right(&self) -> Box<dyn ValueNode> {\n\n try_as_value_node($node_ty::right(*self)).unwrap()\n\n }\n\n fn compute(&self, left: Tarval, right: Tarval) -> Tarval {\n\n $compute(self, left, right)\n\n }\n\n }\n\n\n", "file_path": "libfirm-rs/src/nodes/value_nodes.rs", "rank": 29, "score": 153601.12713256426 }, { "content": "pub fn label_for_late_placement(\n\n node: &Node,\n\n current_node: Node,\n\n earliest_allowed: nodes::Block,\n\n latest_allowed: Option<nodes::Block>,\n\n) -> Label {\n\n let mut label = dom_info_box(node);\n\n\n\n if let Node::Block(rendered_block) = node {\n\n // is within the chain of possibilities\n\n if let Some(latest_block) = latest_allowed {\n\n if earliest_allowed.dominates(*rendered_block) && rendered_block.dominates(latest_block)\n\n {\n\n label = label\n\n .add_style(Style::Filled)\n\n .fillcolor(X11Color::Pink)\n\n .fontcolor(X11Color::White);\n\n }\n\n }\n\n\n", "file_path": "optimization/src/code_placement.rs", "rank": 30, "score": 152958.08291146473 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Display)]\n\n#[display(fmt = \"identifier '{}'\", _0)]\n\nstruct ExactlyIdentifier<'s>(&'s str);\n", "file_path": "parser/src/parser.rs", "rank": 31, "score": 152161.9039287637 }, { "content": "#[cfg(feature = \"debugger_gui\")]\n\nfn gui_thread() -> &'static GUI {\n\n if (*GUI.lock().unwrap()).is_none() {\n\n spawn_gui_thread();\n\n }\n\n\n\n &GUI\n\n}\n\n\n", "file_path": "debugging/src/lib.rs", "rank": 32, "score": 152092.89368983154 }, { "content": "pub fn compare_class_member(\n\n a: &ast::ClassMember<'_>,\n\n b: &ast::ClassMember<'_>,\n\n) -> std::cmp::Ordering {\n\n use crate::ast::ClassMemberKind::*;\n\n match (&a.kind, &b.kind) {\n\n (Field(..), Field(..))\n\n | (Method(..), Method(..))\n\n | (MainMethod(..), MainMethod(..))\n\n | (Method(..), MainMethod(..))\n\n | (MainMethod(..), Method(..)) => a.name.cmp(&b.name),\n\n (Method(..), Field(..)) | (MainMethod(..), Field(..)) => std::cmp::Ordering::Less,\n\n (Field(..), Method(..)) | (Field(..), MainMethod(..)) => std::cmp::Ordering::Greater,\n\n }\n\n}\n\n\n\n// clippy::ptr-arg wants args to be a slice of Expr,\n\n// but that doesn't improve expressiveness here\n", "file_path": "compiler-lib/src/print/pretty.rs", "rank": 33, "score": 150796.7924504322 }, { "content": "#[allow(dead_code)]\n\npub fn assert_compiler_phase<\n\n TestMetadata: IntoReferenceData + FromReferencesPath<TestMetadata> + DeserializeOwned + Clone,\n\n>(\n\n phase: CompilerCall,\n\n spec: &TestSpec,\n\n) -> TestData<TestMetadata> {\n\n let (input_without_yaml_path, test_data) = load_test_data::<TestMetadata>(spec);\n\n let mut call = compiler_call(phase, &input_without_yaml_path);\n\n println!(\"Executing: {:?}\", call);\n\n let output = call.output().expect(\"failed to call compiler under test\");\n\n\n\n assert_output(\n\n &output,\n\n test_data\n\n .reference\n\n .clone()\n\n .into_reference_data(&spec.references),\n\n &spec,\n\n );\n\n\n\n test_data\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 34, "score": 150796.7924504322 }, { "content": "/// `check` returns an `Err` iff at least one errors was emitted through\n\n/// `context`.\n\npub fn check<'a, 'f>(\n\n strtab: &mut strtab::StringTable<'f>,\n\n ast: &'a ast::AST<'f>,\n\n context: &Context<'f>,\n\n) -> Result<(TypeSystem<'f, 'a>, TypeAnalysis<'f, 'a>), ()> {\n\n let mut first_pass_visitor = ClassesAndMembersVisitor::new(context);\n\n first_pass_visitor.do_visit(&NodeKind::from(ast));\n\n\n\n // Check if a static method was found. If multiple static methods were found or\n\n // the static method is not called `main` the error is already emitted in\n\n // the visitor\n\n if first_pass_visitor.static_method_found == 0 {\n\n context\n\n .diagnostics\n\n .error(&MaybeSpanned::WithoutSpan(SemanticError::NoMainMethod));\n\n }\n\n\n\n if context.diagnostics.errored() {\n\n return Err(());\n\n }\n\n let res = super::check(strtab, &ast, &context);\n\n if context.diagnostics.errored() {\n\n return Err(());\n\n }\n\n Ok(res)\n\n}\n\n\n", "file_path": "type_checking/src/semantics.rs", "rank": 35, "score": 148735.54788179218 }, { "content": "/// Abstraction over anything that can be transformed into labels for\n\n/// each node in a graph\n\npub trait LabelMaker<TNode> {\n\n fn label_for_node(&self, node: &TNode) -> Label;\n\n}\n\n\n\n#[derive(Default, Debug, Clone)]\n\npub struct Label {\n\n text: LabelText,\n\n id: String,\n\n style: Option<Vec<Style>>,\n\n fillcolor: Option<Color>,\n\n fontcolor: Option<Color>,\n\n shape: Option<Shape>,\n\n sides: Option<u32>,\n\n peripheries: Option<u32>,\n\n distortion: Option<f32>,\n\n skew: Option<f32>,\n\n}\n\n\n", "file_path": "debugging/src/dot/mod.rs", "rank": 36, "score": 148655.5134389377 }, { "content": "pub trait GraphData<TNode> {\n\n /// Transform the object into a map of unique function names\n\n /// to graph information in dot format\n\n fn graph_data<T>(&self, label_maker: &T) -> HashMap<String, GraphState>\n\n where\n\n Self: Sized,\n\n T: LabelMaker<TNode>;\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize)]\n\npub struct GraphState {\n\n pub name: String,\n\n pub dot_content: String,\n\n}\n\n\n", "file_path": "debugging/src/dot/mod.rs", "rank": 37, "score": 148655.5134389377 }, { "content": "pub trait FromReferencesPath<T> {\n\n fn from_reference_path(base: &PathBuf) -> T;\n\n}\n\n\n\nimpl FromReferencesPath<OptionalReferenceData> for OptionalReferenceData {\n\n fn from_reference_path(base: &PathBuf) -> Self {\n\n Self::all_from_own_file(base)\n\n }\n\n}\n\n\n\nimpl FromReferencesPath<ReferenceData> for ReferenceData {\n\n fn from_reference_path(base: &PathBuf) -> Self {\n\n Self::all_from_own_file(base)\n\n }\n\n}\n\n\n\nimpl IntoReferenceData for ReferenceData {\n\n fn into_reference_data(self, _base: &PathBuf) -> ReferenceData {\n\n self\n\n }\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 38, "score": 148655.5134389377 }, { "content": "pub trait Lattice: Eq + Clone {\n\n fn is_progression_of(&self, other: &Self) -> bool;\n\n fn join(&self, other: &Self, context: &mut JoinContext) -> Self;\n\n\n\n fn join_default(&self, other: &Self) -> Self {\n\n self.join(other, &mut JoinContext::None)\n\n }\n\n\n\n /*fn join_many<'t, I>(vals: I) -> Option<Self>\n\n where\n\n I: IntoIterator<Item = &'t Self>,\n\n Self: 't,\n\n {\n\n let mut cur: Option<Self> = None;\n\n for val in vals {\n\n cur = Some(if let Some(cur) = cur {\n\n cur.join(val)\n\n } else {\n\n val.clone()\n\n })\n", "file_path": "optimization/src/lattices/mod.rs", "rank": 39, "score": 146547.00034075754 }, { "content": "pub trait Dot<TNode>: Sized {\n\n fn into_dot_format<T>(&self, writer: &mut dyn Write, graph_name: &str, data: &T)\n\n where\n\n Self: Sized,\n\n T: LabelMaker<TNode>;\n\n\n\n fn dump_as_dot_file<T>(&self, filename: &PathBuf, graph_name: &str, data: &T)\n\n where\n\n Self: Sized,\n\n T: LabelMaker<TNode>,\n\n {\n\n let write_file = File::create(filename).unwrap();\n\n let mut writer = BufWriter::new(&write_file);\n\n self.into_dot_format(&mut writer, graph_name, data)\n\n }\n\n\n\n fn into_dot_format_string<T>(&self, graph_name: &str, data: &T) -> String\n\n where\n\n Self: Sized,\n\n T: LabelMaker<TNode>,\n", "file_path": "debugging/src/dot/mod.rs", "rank": 40, "score": 144376.82601666328 }, { "content": "pub fn check<'ast, 'src>(\n\n strtab: &'_ mut StringTable<'src>,\n\n ast: &'ast ast::AST<'src>,\n\n context: &Context<'src>,\n\n) -> (TypeSystem<'src, 'ast>, TypeAnalysis<'src, 'ast>) {\n\n let mut sem_context = SemanticContext::new(context);\n\n\n\n let mut type_system = TypeSystem::default();\n\n let mut type_analysis = TypeAnalysis::new();\n\n\n\n if let ast::AST::Program(program) = ast {\n\n let builtin_types = BuiltinTypes::add_to(&mut type_system, strtab, &mut sem_context);\n\n\n\n add_types_from_ast(\n\n strtab,\n\n &mut type_system,\n\n &mut type_analysis,\n\n &builtin_types,\n\n &sem_context,\n\n program,\n", "file_path": "type_checking/src/checker.rs", "rank": 41, "score": 144064.92004840687 }, { "content": "/// Print an error in a format intended for end users and terminate\n\n/// the program.\n\npub fn exit_with_error(err: &Error) -> ! {\n\n let mut stderr = io::stderr();\n\n print_error(&mut stderr, err).expect(\"unable to print error\");\n\n exit(1);\n\n}\n\n\n", "file_path": "compiler-cli/src/main.rs", "rank": 42, "score": 141903.6295873743 }, { "content": "fn normalize_stderr(stderr: &str) -> String {\n\n stderr.replace(&*ROOT_DIR, \"{ROOT}\")\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct TestSpec {\n\n pub input: PathBuf,\n\n pub references: PathBuf,\n\n pub generate_tentatives: bool,\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 43, "score": 141017.5215858228 }, { "content": "// TODO: not sure if this is actually necessary\n\nfn sort_functions(s: &str) -> String {\n\n let mut blocks = s\n\n .split(\"# -- Begin \")\n\n .map(|block| block.trim())\n\n .collect::<Vec<&str>>();\n\n blocks.sort();\n\n\n\n format!(\"# -- Begin {}\", blocks.join(\"\\n# -- Begin \"))\n\n}\n\n\n", "file_path": "runner-integration-tests/src/testkind/optimization.rs", "rank": 44, "score": 141017.5215858228 }, { "content": "fn remove_labels(s: &str) -> String {\n\n let regex = regex::RegexBuilder::new(r\"^\\.L[0-9]+:\\n\")\n\n .multi_line(true)\n\n .build()\n\n .unwrap();\n\n let s = regex.replace_all(s, \"\").to_string();\n\n let regex = regex::RegexBuilder::new(r\"^.*j(mp|lt|gt|e|ne|ge|le) \\.L[0-9]+\\n\")\n\n .multi_line(true)\n\n .build()\n\n .unwrap();\n\n regex.replace_all(&s, \"\").to_string()\n\n}\n\n\n", "file_path": "runner-integration-tests/src/testkind/optimization.rs", "rank": 45, "score": 141017.5215858228 }, { "content": "fn cargo_envpath(varname: &str) -> PathBuf {\n\n let r: String = env::var(varname).expect(&format!(\"env variable {} not set\", varname));\n\n PathBuf::from(r)\n\n}\n\n\n\nuse std::fs;\n\n\n", "file_path": "libfirm-rs-bindings/build.rs", "rank": 46, "score": 141017.5215858228 }, { "content": "fn strip_comments(s: &str) -> String {\n\n let regex = regex::Regex::new(\"/\\\\*.*?\\\\*/\").unwrap();\n\n regex.replace_all(s, \"\").to_string()\n\n}\n\n\n", "file_path": "runner-integration-tests/src/testkind/optimization.rs", "rank": 47, "score": 141017.5215858228 }, { "content": "pub fn checked_type_from_ty<'src, 'ast>(\n\n ty: &'ast ast::Type<'src>,\n\n context: &SemanticContext<'_, 'src>,\n\n type_system: &TypeSystem<'src, 'ast>,\n\n void_handling: VoidIs,\n\n) -> CheckedType<'src> {\n\n let void_handling = if ty.array_depth > 0 {\n\n VoidIs::Forbidden\n\n } else {\n\n void_handling\n\n };\n\n\n\n let mut checked_ty = checked_type_from_basic_ty(&ty.basic, context, type_system, void_handling);\n\n\n\n for _ in 0..ty.array_depth {\n\n checked_ty = CheckedType::Array(Box::new(checked_ty));\n\n }\n\n\n\n checked_ty\n\n}\n", "file_path": "type_checking/src/checker.rs", "rank": 48, "score": 139847.1071088979 }, { "content": "fn normalize_asm(asm: &str) -> String {\n\n [\n\n strip_comments,\n\n remove_trailing_whitespace,\n\n remove_labels,\n\n sort_functions,\n\n ]\n\n .iter()\n\n .fold(asm.to_owned(), |acc, transform| transform(&acc))\n\n}\n\n\n", "file_path": "runner-integration-tests/src/testkind/optimization.rs", "rank": 49, "score": 139054.34476257887 }, { "content": "// TODO: this could also be done in strip_comments\n\nfn remove_trailing_whitespace(s: &str) -> String {\n\n let mut lines: Vec<&str> = vec![];\n\n for line in s.lines() {\n\n let trimmed = line.trim_end();\n\n if !trimmed.is_empty() {\n\n lines.push(trimmed);\n\n }\n\n }\n\n\n\n lines.join(\"\\n\")\n\n}\n\n\n", "file_path": "runner-integration-tests/src/testkind/optimization.rs", "rank": 50, "score": 139054.34476257887 }, { "content": "pub fn is_jit_operand(node: Node) -> bool {\n\n match node {\n\n Node::Const(_) | Node::Address(_) | Node::Member(_) | Node::Sel(_) | Node::Size(_) => true,\n\n _ => false,\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub enum Operand {\n\n Var(Var),\n\n\n\n /// NOTE: Tarval contains a raw pointer, thus Imm(t) is only valid for the\n\n /// lifetime of that pointer (the FIRM graph).\n\n Imm(Tarval),\n\n}\n\n\n\nimpl fmt::Debug for Operand {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n use self::Operand::*;\n\n match self {\n", "file_path": "lowering/src/lir.rs", "rank": 51, "score": 138299.6789993479 }, { "content": "pub fn checked_type_from_basic_ty<'src, 'ast>(\n\n basic_ty: &'ast Spanned<'src, ast::BasicType<'src>>,\n\n context: &SemanticContext<'_, 'src>,\n\n type_system: &TypeSystem<'src, 'ast>,\n\n void_handling: VoidIs,\n\n) -> CheckedType<'src> {\n\n use self::ast::BasicType::*;\n\n match &basic_ty.data {\n\n Int => CheckedType::Int,\n\n Boolean => CheckedType::Boolean,\n\n Void => match void_handling {\n\n VoidIs::Allowed => CheckedType::Void,\n\n VoidIs::Forbidden => {\n\n context.report_error(&basic_ty.span, SemanticError::VoidNotAllowed);\n\n CheckedType::Void\n\n }\n\n },\n\n Custom(name) => match type_system.lookup_class(*name) {\n\n Some((_, class_id)) => CheckedType::TypeRef(class_id),\n\n None => {\n", "file_path": "type_checking/src/checker.rs", "rank": 52, "score": 137887.91498128965 }, { "content": "/// `None` indicates that the given type is not convertible, which\n\n/// is not necessarily an error (e.g. `void`)\n\npub fn ty_from_checked_type<'src, 'ast>(\n\n ct: &CheckedType<'src>,\n\n type_system: &'_ TypeSystem<'src, 'ast>,\n\n program: &'_ FirmProgram<'src, 'ast>,\n\n) -> Option<Ty> {\n\n let ty = match ct {\n\n CheckedType::Int => PrimitiveTy::i32().into(),\n\n CheckedType::Void => return None,\n\n CheckedType::TypeRef(class_def_id) => {\n\n let def = type_system.class(*class_def_id);\n\n let class = program.class(def).unwrap();\n\n let ty = class.borrow().entity.ty();\n\n ty.pointer().into()\n\n // If, for some unforeseen reason, the line above does not work,\n\n // return this instead: `PrimitiveTy::ptr().into()`.\n\n // However, this looses the class type we are pointing at.\n\n // We need this information in optimizations.\n\n }\n\n CheckedType::Array(checked_type) => {\n\n ty_from_checked_type(checked_type, type_system, program)\n", "file_path": "firm-construction/src/type_translation.rs", "rank": 53, "score": 137887.91498128965 }, { "content": "pub trait AstLintPass<'a, 'f>: LintPass {\n\n fn check_program(\n\n &mut self,\n\n _cx: &EarlyContext<'a, 'f>,\n\n _program: &Spanned<'f, ast::Program<'f>>,\n\n ) {\n\n }\n\n fn check_class_decl(\n\n &mut self,\n\n _cx: &EarlyContext<'a, 'f>,\n\n _class_decl: &Spanned<'f, ast::ClassDeclaration<'f>>,\n\n ) {\n\n }\n\n fn check_class_member(\n\n &mut self,\n\n _cx: &EarlyContext<'a, 'f>,\n\n _class_member: &Spanned<'f, ast::ClassMember<'f>>,\n\n ) {\n\n }\n\n fn check_parameter_list(\n", "file_path": "compiler-lib/src/linter/mod.rs", "rank": 54, "score": 136606.4608125858 }, { "content": "pub trait SemanticLintPass<'a, 'f>: LintPass {\n\n fn check_program(\n\n &mut self,\n\n _cx: &LateContext<'a, 'f>,\n\n _program: &Spanned<'f, ast::Program<'f>>,\n\n ) {\n\n }\n\n fn check_class_decl(\n\n &mut self,\n\n _cx: &LateContext<'a, 'f>,\n\n _class_decl: &Spanned<'f, ast::ClassDeclaration<'f>>,\n\n ) {\n\n }\n\n fn check_class_member(\n\n &mut self,\n\n _cx: &LateContext<'a, 'f>,\n\n _class_member: &Spanned<'f, ast::ClassMember<'f>>,\n\n ) {\n\n }\n\n fn check_parameter_list(\n", "file_path": "compiler-lib/src/linter/mod.rs", "rank": 55, "score": 136606.4608125858 }, { "content": "pub fn default_label(node: &Node) -> Label {\n\n let mut label = Label::from_text(format!(\"{:?}\", node));\n\n if Node::is_proj(*node) {\n\n label = label.shape(Shape::Note);\n\n }\n\n if let Some(span) = Spans::lookup_span(*node) {\n\n label = label.append(format!(\" [src:{}]\", span));\n\n }\n\n label\n\n}\n\n\n\nimpl<S: BuildHasher> LabelMaker<Node> for HashMap<Node, Label, S> {\n\n fn label_for_node(&self, node: &Node) -> Label {\n\n self.get(&node)\n\n .cloned()\n\n .unwrap_or_else(|| Label::from_text(\"\".to_string()))\n\n }\n\n}\n\n\n", "file_path": "debugging/src/dot/implementations.rs", "rank": 56, "score": 136243.15652087147 }, { "content": "pub fn u8_to_printable_representation(byte: u8) -> String {\n\n let bytes = escape_default(byte).collect::<Vec<u8>>();\n\n let rep = unsafe { std::str::from_utf8_unchecked(&bytes) };\n\n rep.to_owned()\n\n}\n\n\n", "file_path": "diagnostics/src/diagnostics.rs", "rank": 57, "score": 136243.15652087147 }, { "content": "fn generate_tentative_reference(setup: &TestSpec, actual: &str, label: &str) -> Option<PathBuf> {\n\n if !setup.generate_tentatives {\n\n return None;\n\n }\n\n\n\n let file_tentative = tentative_file_path(&add_extension(&setup.references, label));\n\n\n\n File::create(&file_tentative)\n\n .and_then(|mut file| file.write_all(actual.as_bytes()))\n\n .ok();\n\n\n\n Some(file_tentative)\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 58, "score": 136013.08148734382 }, { "content": "fn main() {\n\n let this_crate_dir = PathBuf::from(std::env::var(\"CARGO_MANIFEST_DIR\").unwrap());\n\n let impl_crate_dir = this_crate_dir.ancestors().nth(1).unwrap().join(\"mjrt-impl\");\n\n\n\n assert!(\n\n std::fs::read_to_string(impl_crate_dir.clone().join(\"Cargo.toml\"))\n\n .unwrap()\n\n .contains(\"mjrt-impl\")\n\n );\n\n\n\n let features: Vec<_> = std::env::vars()\n\n .filter_map(|(var, _)| {\n\n const CARGO_FEATURE: &'static str = \"CARGO_FEATURE_\";\n\n if var.starts_with(CARGO_FEATURE) {\n\n let feature = var.trim_start_matches(CARGO_FEATURE);\n\n Some((var.to_owned(), feature.to_owned()))\n\n } else {\n\n None\n\n }\n\n })\n", "file_path": "mjrt/build.rs", "rank": 59, "score": 134935.26593146558 }, { "content": "pub fn external_val(ty: Ty) -> NodeValue {\n\n debug_assert!(!ty.mode().is_pointer() || PointerTy::from(ty).is_some());\n\n NodeValue::non_const_val(ty.mode(), MemoryArea::external())\n\n}\n\n\n", "file_path": "optimization/src/lattices/heap.rs", "rank": 60, "score": 134283.96439326322 }, { "content": "pub fn dom_info_box(node: &Node) -> Label {\n\n if let Node::Block(block) = node {\n\n let dom_depth = unsafe { bindings::get_Block_dom_depth(node.internal_ir_node()) };\n\n Label::from_text(format!(\n\n r#\"{{{body}|{{Dom Depth|{dom_depth}}}|{{Loop Depth|{loop_depth}}}}}\"#,\n\n dom_depth = dom_depth,\n\n loop_depth = block.loop_depth(),\n\n body = escape_record_content(&format!(\"{:?}\", block)),\n\n ))\n\n .shape(Shape::Record)\n\n .styles(vec![Style::Rounded, Style::Filled])\n\n } else {\n\n default_label(node)\n\n }\n\n}\n\n\n", "file_path": "debugging/src/dot/implementations.rs", "rank": 61, "score": 134283.96439326322 }, { "content": "pub fn dom_info_box(node: &Node) -> Label {\n\n let label = if let Node::Block(block) = node {\n\n let dom_depth = unsafe { bindings::get_Block_dom_depth(node.internal_ir_node()) };\n\n Label::from_text(format!(\n\n r#\"{{{body}|{{Dom Depth|{dom_depth}}}|{{Loop Depth|{loop_depth}}}}}\"#,\n\n dom_depth = dom_depth,\n\n loop_depth = block.loop_depth(),\n\n body = escape_record_content(&format!(\"{:?}\", block)),\n\n ))\n\n .shape(Shape::Record)\n\n .styles(vec![Style::Rounded, Style::Filled])\n\n } else {\n\n default_label(node)\n\n };\n\n\n\n if !is_movable(*node) {\n\n label.add_style(Style::Bold)\n\n } else {\n\n label.add_style(Style::Dashed)\n\n }\n\n}\n\n\n", "file_path": "optimization/src/code_placement.rs", "rank": 62, "score": 134283.96439326322 }, { "content": "pub fn exec_ast_inspector_test(input: PathBuf) {\n\n let spec = TestSpec {\n\n references: input.clone(),\n\n input,\n\n generate_tentatives: true,\n\n };\n\n\n\n let (input_without_yaml_path, data) = load_test_data::<Data>(&spec);\n\n\n\n let callinfo = CompilerCall::AstInspector {\n\n content: data.reference.content.clone(),\n\n kind: data.reference.kind.clone(),\n\n typeinfo: data.reference.typeinfo.clone(),\n\n };\n\n\n\n let mut call = compiler_call(callinfo, &input_without_yaml_path);\n\n println!(\"Executing: {:?}\", call);\n\n\n\n let output = call.output().expect(\"failed to call ast-inspector\");\n\n\n\n assert_output(\n\n &output,\n\n data.reference.clone().into_reference_data(&spec.references),\n\n &spec,\n\n );\n\n}\n", "file_path": "runner-integration-tests/src/testkind/spans.rs", "rank": 63, "score": 132529.8045645579 }, { "content": "pub fn default_lir_label(block: &BasicBlock) -> Label {\n\n let mut s = Vec::new();\n\n\n\n write!(&mut s, \"\\\\lCOPY IN\\\\l\").unwrap();\n\n for instr in block.code.copy_in.iter() {\n\n write!(&mut s, \"{:?}\\\\l\", &**instr).unwrap();\n\n }\n\n\n\n write!(&mut s, \"\\\\lBODY\\\\l\").unwrap();\n\n for instr in block.code.body.iter() {\n\n write!(&mut s, \"{:?}\\\\l\", &**instr).unwrap();\n\n }\n\n\n\n write!(&mut s, \"\\\\lCOPY OUT\\\\l\").unwrap();\n\n for instr in block.code.copy_out.iter() {\n\n write!(&mut s, \"{:?}\\\\l\", &**instr).unwrap();\n\n }\n\n\n\n write!(&mut s, \"\\\\lLEAVE\\\\l\").unwrap();\n\n for instr in block.code.leave.iter() {\n\n write!(&mut s, \"{:?}\\\\l\", &**instr).unwrap();\n\n }\n\n\n\n lir_box(block, &format!(\"{}\\\\l\", String::from_utf8(s).unwrap()))\n\n}\n\n\n", "file_path": "lowering/src/lir/debugging.rs", "rank": 64, "score": 132415.35272143714 }, { "content": "fn exactly<'f>(thing: impl Into<Exactly<'f>>) -> Exactly<'f> {\n\n thing.into()\n\n}\n\n\n\nimpl<'f> ExpectedToken<'f> for Exactly<'f> {\n\n type Yields = ();\n\n fn matching(&self, token: &TokenKind<'f>) -> Option<Self::Yields> {\n\n if &self.0 == token {\n\n Some(())\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<'f> ExpectedToken<'f> for BinaryOp {\n\n type Yields = (ast::BinaryOp, Precedence, Assoc);\n\n fn matching(&self, token: &TokenKind<'f>) -> Option<Self::Yields> {\n\n match token {\n\n TokenKind::Operator(op) => BINARY_OPERATORS\n", "file_path": "parser/src/parser.rs", "rank": 65, "score": 130782.3361586389 }, { "content": "trait OperandUsingRegs {\n\n // The list of registers used by the operand, excluding rsp and rbp.\n\n fn used_regs(self) -> Vec<Amd64Reg>;\n\n}\n\n\n\nimpl OperandUsingRegs for lir::AddressComputation<Amd64Reg> {\n\n fn used_regs(self) -> Vec<Amd64Reg> {\n\n self.operands() // FIXME move that here\n\n }\n\n}\n\n\n\npub(crate) trait OperandTrait {\n\n fn size(self) -> Size;\n\n fn into_size(self, size: Size) -> Self;\n\n fn eq_ignore_size(self, other: Self) -> bool;\n\n}\n\n\n\n#[derive(Debug, Copy, Clone)]\n\npub(crate) enum SrcOperand {\n\n Ar(Ar),\n", "file_path": "lowering/src/codegen.rs", "rank": 66, "score": 129959.4897314428 }, { "content": "#[derive(Debug, Clone)]\n\nstruct EnumRewriter {\n\n rules: Vec<ConstifiedEnumRewrite>,\n\n rewrites: HashMap<String, String>, // bindgen_enum_name => trim_start_matches prefix\n\n}\n\n\n\nimpl EnumRewriter {\n\n fn from_rewrites(rules: Vec<ConstifiedEnumRewrite>) -> Self {\n\n let mut rewrites = HashMap::new();\n\n for rule in &rules {\n\n let res = rewrites.insert(rule.bindgen_enum_name(), rule.trim_start_matches_prefix());\n\n debug_assert!(\n\n res.is_none(),\n\n \"duplicate in constified_enum_rewrites: {:?}\",\n\n rule\n\n );\n\n }\n\n return EnumRewriter { rewrites, rules };\n\n }\n\n fn builder_with_rewrites(&self, builder: bindgen::Builder) -> bindgen::Builder {\n\n let mut builder = builder;\n", "file_path": "libfirm-rs-bindings/build.rs", "rank": 67, "score": 129585.34373248352 }, { "content": "/// Execute the command specified by [`CliCommand`]. This takes over the\n\n/// process, including stdout and stderr, and might not return.\n\npub fn run_compiler(cmd: &CliCommand) -> Result<(), Error> {\n\n match cmd {\n\n CliCommand::Echo { path } => cmd_echo(path),\n\n CliCommand::LexerTest { path } => cmd_lextest(path),\n\n CliCommand::ParserTest { path } => cmd_parsetest(path),\n\n CliCommand::PrintAst { path } => cmd_printast(path, &print::pretty::print),\n\n CliCommand::DebugDumpAst { path } => cmd_printast(path, &print::structure::print),\n\n CliCommand::Check { path, lint } => cmd_check(path, *lint),\n\n CliCommand::CompileFirm(options) => cmd_compile_firm(options),\n\n CliCommand::Compile(options) => cmd_compile(options),\n\n }\n\n}\n\n\n", "file_path": "compiler-cli/src/main.rs", "rank": 68, "score": 129239.65197088319 }, { "content": "pub fn default_reference_stderr(base: &PathBuf) -> ExpectedData {\n\n ExpectedData::InFile(add_extension(base, \"stderr\"))\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 69, "score": 128925.85397653148 }, { "content": "pub fn default_reference_stdout(base: &PathBuf) -> ExpectedData {\n\n ExpectedData::InFile(add_extension(base, \"stdout\"))\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 70, "score": 128925.85397653148 }, { "content": "pub fn default_reference_exitcode(base: &PathBuf) -> ExpectedData {\n\n ExpectedData::InFile(add_extension(base, \"exitcode\"))\n\n}\n\n\n\nimpl OptionalReferenceData {\n\n pub fn all_from_own_file(base: &PathBuf) -> Self {\n\n Self {\n\n stderr: Some(default_reference_stderr(base)),\n\n stdout: Some(default_reference_stdout(base)),\n\n exitcode: Some(default_reference_exitcode(base)),\n\n }\n\n }\n\n}\n\n\n\nimpl ReferenceData {\n\n pub fn all_from_own_file(base: &PathBuf) -> Self {\n\n Self {\n\n stderr: default_reference_stderr(base),\n\n stdout: default_reference_stdout(base),\n\n exitcode: default_reference_exitcode(base),\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 71, "score": 128925.85397653148 }, { "content": "// QuotingWormhole is a gadget to encapsulate the use of serde_json\n\n// for transfer of test case definitions from this procedural macro\n\n// to the generated test cases.\n\n//\n\n// It must be public because the quote!() in impl quote::ToTokens uses it,\n\n// but it should never be used by external users of this crate.\n\npub trait QuotingWormhole<'a>: serde::Deserialize<'a> + serde::Serialize {\n\n fn to_json(&self) -> String {\n\n serde_json::to_string_pretty(self).expect(\"could not serialize syntax test case\")\n\n }\n\n fn from_json(json: &'a str) -> Self {\n\n serde_json::from_str(json).expect(\"could not deserialize syntax test case\")\n\n }\n\n}\n\n\n\nimpl<'a> QuotingWormhole<'a> for SyntaxTestCase {}\n\nimpl<'a> QuotingWormhole<'a> for SemanticTestCase {}\n\n\n\nmacro_rules! derive_to_tokens_for_wormwhole {\n\n ($wormwholetype:ty) => {\n\n impl quote::ToTokens for $wormwholetype {\n\n fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {\n\n let x = self.to_json();\n\n let code = quote!({\n\n use mjtest::QuotingWormhole;\n\n let y: $wormwholetype = QuotingWormhole::from_json(#x);\n", "file_path": "mjtest-rs/src/lib.rs", "rank": 72, "score": 127784.50500985449 }, { "content": "pub fn basic_block_scheduling(func: &mut lir::Function) {\n\n // reverse postorder\n\n let ordered = func\n\n .graph\n\n .postorder_blocks()\n\n .into_iter()\n\n .rev()\n\n .collect::<Vec<_>>();\n\n func.graph.blocks_scheduled = Some(ordered);\n\n}\n", "file_path": "lowering/src/basic_block_scheduling.rs", "rank": 73, "score": 127455.48090111563 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_optimization_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"optimization\", \"\", |test_name, mj_file| {\n\n let function_name = Ident::new(&test_name, Span::call_site());\n\n let path_str = mj_file.to_str().unwrap();\n\n\n\n quote! {\n\n #[test]\n\n fn #function_name() {\n\n let input = PathBuf::from(#path_str);\n\n exec_optimization_test(input.clone(), Backend::Own);\n\n exec_optimization_test(input.clone(), Backend::Libfirm);\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 74, "score": 127294.25648133842 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_assembly_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"assembly\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Assembly) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 75, "score": 127294.25648133842 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_binary_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"binary\", \"\", |test_name, mj_file| {\n\n let function_name = Ident::new(&test_name, Span::call_site());\n\n let path_str = mj_file.to_str().unwrap();\n\n\n\n quote! {\n\n #[test]\n\n fn #function_name() {\n\n let input = PathBuf::from(#path_str);\n\n exec_binary_test(input.clone(), Level::None, Backend::Own);\n\n exec_binary_test(input.clone(), Level::None, Backend::Libfirm);\n\n exec_binary_test(input.clone(), Level::Aggressive, Backend::Own);\n\n exec_binary_test(input.clone(), Level::Aggressive, Backend::Libfirm);\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 76, "score": 127294.25648133842 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_timeout_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"timeout\", \"\", |test_name, mj_file| {\n\n let function_name = Ident::new(&test_name, Span::call_site());\n\n let path_str = mj_file.to_str().unwrap();\n\n\n\n quote! {\n\n #[test]\n\n fn #function_name() {\n\n let input = PathBuf::from(#path_str);\n\n exec_timeout_test(input.clone(), Level::None, Backend::Own);\n\n exec_timeout_test(input.clone(), Level::None, Backend::Libfirm);\n\n exec_timeout_test(input.clone(), Level::Aggressive, Backend::Own);\n\n exec_timeout_test(input.clone(), Level::Aggressive, Backend::Libfirm);\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 77, "score": 127294.25648133842 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_semantic_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"semantic\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Semantic) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 78, "score": 127294.25648133842 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_parser_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"parser\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Parser) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 79, "score": 127294.25648133842 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_ast_inspector_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"spans\", \"\", |test_name, mj_file| {\n\n let function_name = Ident::new(&test_name, Span::call_site());\n\n let path_str = mj_file.to_str().unwrap();\n\n\n\n quote! {\n\n #[test]\n\n fn #function_name() {\n\n let input = PathBuf::from(#path_str);\n\n exec_ast_inspector_test(input);\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 80, "score": 127294.25648133842 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_lexer_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"lexer\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Lexer) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 81, "score": 127294.25648133842 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_lints_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"lints\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Linter) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 82, "score": 127294.25648133842 }, { "content": "pub fn get_files<T: FromReferencesPath<T> + DeserializeOwned>(\n\n path_input: &PathBuf,\n\n path_references: &PathBuf,\n\n) -> Result<TestData<T>, Error> {\n\n let input: String = if let Ok(utf8_data) = read(&Some(path_input.clone())) {\n\n utf8_data\n\n } else {\n\n // we do not support yaml in front of non UTF-8 files.\n\n // Use default\n\n return Ok(TestData {\n\n input: InputData::NotLoaded(path_input.clone()),\n\n reference: T::from_reference_path(path_references),\n\n });\n\n };\n\n let input_file = yaml::FrontMatter::new(&input);\n\n\n\n // 1.) Try to find reference output in yaml front matter\n\n if let Some(yaml) = input_file.front_matter() {\n\n let reference: T = serde_yaml::from_str(yaml).context(DataError::InvalidFrontMatter {\n\n path: path_input.clone(),\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 83, "score": 125750.15322597753 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_ast_idempotence_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"ast\", \"_idempotence\", |test_name, mj_file| {\n\n let function_name = Ident::new(&test_name, Span::call_site());\n\n let path_str = mj_file.to_str().unwrap();\n\n\n\n quote! {\n\n #[test]\n\n fn #function_name() {\n\n let input = PathBuf::from(#path_str);\n\n\n\n assert_compiler_phase::<OptionalReferenceData>(\n\n CompilerCall::RawCompiler(CompilerPhase::Ast),\n\n &TestSpec {\n\n input: add_extension(&input, \"stdout\"),\n\n references: input.clone(),\n\n generate_tentatives: false\n\n }\n\n );\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 84, "score": 125731.70873731391 }, { "content": "#[allow(clippy::needless_pass_by_value)] // rust-clippy/issues/3067\n\n#[proc_macro]\n\npub fn gen_ast_reference_integration_tests(_args: TokenStream) -> TokenStream {\n\n gen_integration_tests(\"ast\", \"\", |test_name, mj_file| {\n\n default_test_generator(\n\n &quote! { CompilerCall::RawCompiler(CompilerPhase::Ast) },\n\n test_name,\n\n mj_file,\n\n )\n\n })\n\n}\n\n\n", "file_path": "codegen-integration-tests/src/lib.rs", "rank": 85, "score": 125731.70873731391 }, { "content": "fn parse_flag(s: &str) -> Result<optimization::Flag, ParseError> {\n\n match s.to_ascii_lowercase().as_str() {\n\n \"d\" | \"vcg\" => Ok(optimization::Flag::DumpVcg),\n\n \"g\" | \"gui\" => {\n\n if cfg!(feature = \"debugger_gui\") {\n\n Ok(optimization::Flag::Gui)\n\n } else {\n\n Err(ParseError::NoDebuggerSupport {\n\n flag: s.to_string(),\n\n })\n\n }\n\n }\n\n _ => Err(ParseError::UnknownFlag {\n\n name: s.to_string(),\n\n }),\n\n }\n\n}\n\n\n", "file_path": "compiler-cli/src/optimization_arg.rs", "rank": 86, "score": 125626.39268896576 }, { "content": "pub fn exec_optimization_test(input: PathBuf, backend: Backend) {\n\n // 1.) compile asm and binary for the unoptimized reference binary\n\n // this is either\n\n // - the file specified by 'expect: IsIdenticalTo: path\"\n\n // - or the same file as the optimized file, but without optimizations if\n\n // 'expect: Change/Unchanged'.\n\n // 2.) compile asm and binary of the input file with the given optimizations\n\n // 3.) Assert\n\n // - that stdout/stderr/exitcode are the same for both binaries\n\n // - if 'expect: Change', that the asm is different\n\n // - if 'expect: Unchanged', that the asm is the same\n\n // - if 'expect: IsIdenticalTo', that the asm is the same\n\n let path_binary_optimized =\n\n input.with_extension(&format!(\"{}.optimized.out\", backend.to_ascii_label()));\n\n let path_binary_reference =\n\n input.with_extension(&format!(\"{}.reference.out\", backend.to_ascii_label()));\n\n let path_asm_optimized =\n\n input.with_extension(&format!(\"{}.optimized.S\", backend.to_ascii_label()));\n\n let path_asm_reference =\n\n input.with_extension(&format!(\"{}.reference.S\", backend.to_ascii_label()));\n", "file_path": "runner-integration-tests/src/testkind/optimization.rs", "rank": 87, "score": 124118.55573078446 }, { "content": "fn parse_level(arg: &str) -> Result<optimization::Level, ParseError> {\n\n let mut parts = arg.split(':');\n\n let level = parts.next().unwrap();\n\n\n\n match (\n\n level.to_ascii_lowercase().as_str(),\n\n parts.next(),\n\n parts.next(),\n\n ) {\n\n (_, _, Some(_)) => Err(ParseError::TooManySeparators),\n\n (\"custom\", Some(sequence), _) => {\n\n parse_custom_sequence(sequence).map(optimization::Level::Custom)\n\n }\n\n (\"custom\", None, _) => Err(ParseError::CustomWithoutList),\n\n (\"none\", None, _) => Ok(optimization::Level::None),\n\n (\"moderate\", None, _) => Ok(optimization::Level::Moderate),\n\n (\"aggressive\", None, _) => Ok(optimization::Level::Aggressive),\n\n (_, _, _) => Err(ParseError::UnknownLevel {\n\n name: level.to_string(),\n\n }),\n\n }\n\n}\n\n\n", "file_path": "compiler-cli/src/optimization_arg.rs", "rank": 88, "score": 123917.5966394662 }, { "content": "pub fn label_with_cse_info(node: &Node, highlight: &Node) -> Label {\n\n let mut label = default_label(node);\n\n\n\n if node == highlight {\n\n label = label\n\n .add_style(Style::Filled)\n\n .fillcolor(X11Color::Blue)\n\n .fontcolor(X11Color::White);\n\n }\n\n\n\n if !CommonSubExpr::node_qualifies_for_elim(*node) {\n\n label.add_style(Style::Bold)\n\n } else {\n\n label.add_style(Style::Dashed)\n\n }\n\n}\n", "file_path": "optimization/src/common_subexpr_elim.rs", "rank": 89, "score": 122930.65112258939 }, { "content": "pub fn get_firm_mode(ty: &CheckedType<'_>) -> Option<Mode> {\n\n match ty {\n\n CheckedType::Int => Some(Mode::Is()),\n\n CheckedType::Boolean => Some(Mode::Bu()),\n\n CheckedType::TypeRef(_) | CheckedType::Array(_) | CheckedType::Null => Some(Mode::P()),\n\n CheckedType::Void | CheckedType::UnknownType(_) => None,\n\n }\n\n}\n", "file_path": "firm-construction/src/type_translation.rs", "rank": 90, "score": 122930.65112258939 }, { "content": "#[cfg(feature = \"debugger_gui\")]\n\n#[allow(clippy::implicit_hasher)]\n\n#[allow(clippy::print_stdout)]\n\npub fn pause(breakpoint: Breakpoint, program: HashMap<String, GraphState>) {\n\n if let Ok(fnname) = std::env::var(\"COMPRAKT_DEBUGGER_GUI_DUMP_LIR_DOT_GRAPH\") {\n\n if breakpoint.label.matches(\"LIR\").count() > 0 {\n\n println!(\"{}\", program[&fnname].dot_content);\n\n }\n\n }\n\n\n\n let mut filters = FILTERS.lock().unwrap();\n\n\n\n if filters.is_disabled(&breakpoint, &program) {\n\n log::info!(\n\n \"ignoring disabled breakpoint {} ({}:{})\",\n\n breakpoint.label,\n\n breakpoint.file,\n\n breakpoint.line,\n\n );\n\n\n\n return;\n\n }\n\n\n", "file_path": "debugging/src/lib.rs", "rank": 91, "score": 122115.9289015991 }, { "content": "#[cfg(not(feature = \"debugger_gui\"))]\n\n#[allow(clippy::implicit_hasher)]\n\npub fn pause(_breakpoint: Breakpoint, _program: HashMap<String, GraphState>) {}\n\n\n", "file_path": "debugging/src/lib.rs", "rank": 92, "score": 122115.9289015991 }, { "content": "#[derive(Debug, Clone, Copy, From)]\n\nstruct SortCopyPropEntity(DstOperand);\n\n\n\n// keep in sync with Hash\n\nimpl PartialEq for SortCopyPropEntity {\n\n fn eq(&self, other: &Self) -> bool {\n\n use self::DstOperand::*;\n\n match (self.0, other.0) {\n\n // size is not identifying here (rax and eax) **must** be the same for the purpose of\n\n // cycle removal\n\n (Reg(reg1), Reg(reg2)) => reg1.reg == reg2.reg,\n\n // same goes for Ar\n\n (Ar(ar1), Ar(ar2)) => ar1.pos == ar2.pos,\n\n (Reg(_), Ar(_)) | (Ar(_), Reg(_)) => false,\n\n }\n\n }\n\n}\n\n\n\n// keep in sync with PartialEq\n\nimpl Hash for SortCopyPropEntity {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n", "file_path": "lowering/src/codegen.rs", "rank": 93, "score": 119838.81609309094 }, { "content": "pub fn compiler_call(compiler_call: CompilerCall, filepath: &PathBuf) -> Command {\n\n match compiler_call {\n\n CompilerCall::RawCompiler(phase) => {\n\n let mut cmd = env::var(\"COMPILER_BINARY\")\n\n .map(|path| {\n\n log::debug!(\"Test run using alternate compiler binary at {}\", path);\n\n Command::new(path)\n\n })\n\n .unwrap_or_else(|_| {\n\n let binary = project_binary(Some(\"compiler-cli\"));\n\n log::debug!(\"Test run using the default compiler binary at {:?}\", binary);\n\n Command::new(binary)\n\n });\n\n\n\n cmd.env(\"TERM\", \"dumb\"); // disable color output\n\n cmd.env(compile_time_assertions::ENV_VAR_NAME, \"enabled\");\n\n match phase {\n\n CompilerPhase::Parser | CompilerPhase::Linter => {\n\n cmd.env(\"CHOCOLATE\", \"1\");\n\n }\n", "file_path": "runner-integration-tests/src/lib.rs", "rank": 94, "score": 119736.5058833718 }, { "content": "fn parse_custom_sequence(s: &str) -> Result<Vec<optimization::Optimization>, ParseError> {\n\n let mut list = Vec::new();\n\n for opt in s.split(',').filter(|s| !s.is_empty()) {\n\n let mut fields = opt.split('.');\n\n let kind = optimization::Kind::from_str(fields.next().unwrap()).map_err(|_| {\n\n ParseError::UnknownOptimization {\n\n name: opt.to_string(),\n\n }\n\n })?;\n\n let flags = fields\n\n .map(parse_flag)\n\n .collect::<Result<Vec<optimization::Flag>, _>>()?;\n\n\n\n list.push(optimization::Optimization { kind, flags });\n\n }\n\n\n\n Ok(list)\n\n}\n\n\n\n#[derive(Debug, Clone, Default)]\n", "file_path": "compiler-cli/src/optimization_arg.rs", "rank": 95, "score": 119718.3720440254 }, { "content": "pub fn label_with_dom_info(graph: Graph, node: &Node, highlight: &Node) -> Label {\n\n // TODO: no side effects in debug code\n\n graph.assure_loopinfo();\n\n\n\n let mut label = dom_info_box(node);\n\n\n\n let highlight_block = if let Node::Block(block) = highlight {\n\n *block\n\n } else {\n\n highlight.block()\n\n };\n\n\n\n if let Node::Block(node_block) = node {\n\n // NOTE: block also dominates itself!\n\n if highlight_block.dominates(*node_block) {\n\n label = label\n\n .add_style(Style::Filled)\n\n .fillcolor(X11Color::Pink)\n\n .fontcolor(X11Color::White);\n\n }\n", "file_path": "optimization/src/code_placement.rs", "rank": 96, "score": 116513.10557127642 }, { "content": "pub fn read(file: &Option<PathBuf>) -> Result<String, io::Error> {\n\n // TODO: stream instead of reading everything into string\n\n let mut contents = String::new();\n\n\n\n if let Some(path) = file {\n\n File::open(&path)?.read_to_string(&mut contents)?;\n\n } else {\n\n stdin().read_to_string(&mut contents)?;\n\n };\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "runner-integration-tests/src/lookup.rs", "rank": 97, "score": 116513.10557127642 }, { "content": "pub trait AsmOut: std::io::Write + std::os::unix::io::AsRawFd {}\n\n\n\nimpl AsmOut for std::fs::File {}\n\n\n", "file_path": "compiler-lib/src/backend.rs", "rank": 98, "score": 116280.8146055874 }, { "content": "#[proc_macro]\n\npub fn gen_syntax_tests(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let gen_args = GenArgs::must_from_token_stream(input);\n\n let cases = SyntaxTestCase::all().expect(\"could not load test cases\");\n\n // generate test cases\n\n let mut out = proc_macro2::TokenStream::new();\n\n cases\n\n .into_iter()\n\n .for_each(|tc| gen_args.gen_testcase::<SyntaxTestCase>(&tc, &mut out));\n\n out.into()\n\n}\n\n\n", "file_path": "mjtest-rs/mjtest_macros/src/lib.rs", "rank": 99, "score": 115421.94448525319 } ]
Rust
src/chirps.rs
vi/desyncmeasure
ed9767c6d2b52e74133c9a8ba8ce6dbf504a48c2
use std::sync::Arc; pub fn get_buckets() -> [[usize; 4]; 7] { let mut ret = [[0; 4]; 7]; let mut freq = 830; for b in &mut ret { for x in b { *x = freq / 10; freq += 87; } } ret } fn encode_bucket(x: usize) -> [f32; 4] { assert!(x < 4); match x { 0 => [1.0, 0.0, 0.0, 0.0], 1 => [0.0, 1.0, 0.0, 0.0], 2 => [0.0, 0.0, 1.0, 0.0], 3 => [0.0, 0.0, 0.0, 1.0], _ => unreachable!(), } } pub fn number_indexes(mut x: usize) -> [usize; 7] { assert!(x < 4096); let mut idxs = [0; 7]; let mut parity = 0; for i in 0..=5 { idxs[i] = x & 0b11; parity ^= x & 0b11; x >>= 2; } idxs[6] = parity; idxs } pub fn encode_number(x: usize) -> [[f32; 4]; 7] { assert!(x < 4096); let idxs = number_indexes(x); let mut cfs = [[0.0f32; 4]; 7]; for i in 0..=6 { cfs[i] = encode_bucket(idxs[i]); } cfs } pub struct ChirpAnalyzer { fft: Arc<dyn rustfft::Fft<f32>>, buckets : [[usize; 4]; 7], saved_num: usize, save_num_ctr: usize, } impl ChirpAnalyzer { pub fn new() -> ChirpAnalyzer { ChirpAnalyzer { fft: rustfft::FftPlanner::new().plan_fft_inverse(800), buckets : get_buckets(), saved_num: usize::MAX, save_num_ctr: 0, } } pub fn analyze_block(&mut self, _ts: f64, mut block: Vec<num_complex::Complex32>) -> Option<usize> { assert_eq!(block.len(), 800); self.fft.process(&mut block); let mut signal_quality = 100.0; let mut indexes = Vec::with_capacity(7); for bucket in self.buckets { let mut magnitutes = Vec::with_capacity(4); for string in bucket { let mut x = 0.0; x += 0.3*block[string].norm(); x += 0.7*block[string].norm(); x += 1.0*block[string].norm(); x += 0.7*block[string].norm(); x += 0.3*block[string].norm(); magnitutes.push(x); } let mut sum : f32 = magnitutes.iter().sum(); let best_idx = magnitutes.iter().enumerate().max_by_key(|(_,v)|ordered_float::OrderedFloat(**v)).unwrap().0; let mut losers_magnitutes = 0.0; for (n,x) in magnitutes.iter().enumerate() { if n == best_idx { continue } losers_magnitutes += *x; } sum += 0.0001; let mut qual = (magnitutes[best_idx] - losers_magnitutes)/sum; if qual < 0.05 { qual = 0.05; } signal_quality *= qual; indexes.push(best_idx); } let mut the_num = 0; let mut multiplier = 1; for i in 0..=5 { the_num += multiplier * indexes[i]; multiplier *= 4; } let canonical_indexes_for_this_number = number_indexes(the_num); if indexes[6] != canonical_indexes_for_this_number[6] { signal_quality = 0.0; } if signal_quality > 0.01 { if the_num == self.saved_num { self.save_num_ctr += 1; if self.save_num_ctr >= 3 { Some(the_num) } else { None } } else { self.saved_num = the_num; self.save_num_ctr = 1; None } } else { self.save_num_ctr = 0; None } } }
use std::sync::Arc; pub fn get_buckets() -> [[usize; 4]; 7] { let mut ret = [[0; 4]; 7]; let mut freq = 830; for b in &mut ret { for x in b { *x = freq / 10; freq += 87; } } ret } fn encode_bucket(x: usize) -> [f
pub fn number_indexes(mut x: usize) -> [usize; 7] { assert!(x < 4096); let mut idxs = [0; 7]; let mut parity = 0; for i in 0..=5 { idxs[i] = x & 0b11; parity ^= x & 0b11; x >>= 2; } idxs[6] = parity; idxs } pub fn encode_number(x: usize) -> [[f32; 4]; 7] { assert!(x < 4096); let idxs = number_indexes(x); let mut cfs = [[0.0f32; 4]; 7]; for i in 0..=6 { cfs[i] = encode_bucket(idxs[i]); } cfs } pub struct ChirpAnalyzer { fft: Arc<dyn rustfft::Fft<f32>>, buckets : [[usize; 4]; 7], saved_num: usize, save_num_ctr: usize, } impl ChirpAnalyzer { pub fn new() -> ChirpAnalyzer { ChirpAnalyzer { fft: rustfft::FftPlanner::new().plan_fft_inverse(800), buckets : get_buckets(), saved_num: usize::MAX, save_num_ctr: 0, } } pub fn analyze_block(&mut self, _ts: f64, mut block: Vec<num_complex::Complex32>) -> Option<usize> { assert_eq!(block.len(), 800); self.fft.process(&mut block); let mut signal_quality = 100.0; let mut indexes = Vec::with_capacity(7); for bucket in self.buckets { let mut magnitutes = Vec::with_capacity(4); for string in bucket { let mut x = 0.0; x += 0.3*block[string].norm(); x += 0.7*block[string].norm(); x += 1.0*block[string].norm(); x += 0.7*block[string].norm(); x += 0.3*block[string].norm(); magnitutes.push(x); } let mut sum : f32 = magnitutes.iter().sum(); let best_idx = magnitutes.iter().enumerate().max_by_key(|(_,v)|ordered_float::OrderedFloat(**v)).unwrap().0; let mut losers_magnitutes = 0.0; for (n,x) in magnitutes.iter().enumerate() { if n == best_idx { continue } losers_magnitutes += *x; } sum += 0.0001; let mut qual = (magnitutes[best_idx] - losers_magnitutes)/sum; if qual < 0.05 { qual = 0.05; } signal_quality *= qual; indexes.push(best_idx); } let mut the_num = 0; let mut multiplier = 1; for i in 0..=5 { the_num += multiplier * indexes[i]; multiplier *= 4; } let canonical_indexes_for_this_number = number_indexes(the_num); if indexes[6] != canonical_indexes_for_this_number[6] { signal_quality = 0.0; } if signal_quality > 0.01 { if the_num == self.saved_num { self.save_num_ctr += 1; if self.save_num_ctr >= 3 { Some(the_num) } else { None } } else { self.saved_num = the_num; self.save_num_ctr = 1; None } } else { self.save_num_ctr = 0; None } } }
32; 4] { assert!(x < 4); match x { 0 => [1.0, 0.0, 0.0, 0.0], 1 => [0.0, 1.0, 0.0, 0.0], 2 => [0.0, 0.0, 1.0, 0.0], 3 => [0.0, 0.0, 0.0, 1.0], _ => unreachable!(), } }
function_block-function_prefixed
[ { "content": "fn main() {\n\n let buckets = desyncmeasure::chirps::get_buckets();\n\n\n\n let zeroes = [0.0f32; 800];\n\n\n\n let mut audio_data = Vec::with_capacity(800 * 2 * 8192);\n\n\n\n let fft = rustfft::FftPlanner::<f32>::new().plan_fft_forward(800);\n\n\n\n\n\n for x in 0..4096 {\n\n let mut coefs = [num_complex::Complex32::new(0.0, 0.0); 800];\n\n let data = desyncmeasure::chirps::encode_number(x);\n\n for (bn, n) in IntoIterator::into_iter(data).enumerate() {\n\n for (sn, sv) in IntoIterator::into_iter(n).enumerate() {\n\n let f = buckets[bn][sn];\n\n //coefs[f].re = 1.0 * sv - (f as f32) / 4000.0;\n\n //coefs[f].im = (f as f32)*59.452;\n\n coefs[f] = num_complex::Complex32::from_polar(1.0 * sv - (f as f32) / 4000.0, (f as f32)*59.452);\n\n }\n", "file_path": "examples/genchirps.rs", "rank": 4, "score": 32264.862266868764 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let opts : Opts = gumdrop::Options::parse_args_default_or_exit();\n\n if opts.help {\n\n println!(\"Analyse video for audio-video desynchronisation.\");\n\n println!(\"This tool can only analyse one specific video (or its parts):\");\n\n println!(\" https://vi-server.org/pub/av_sync.mkv\");\n\n println!(\"Recommended command line:\");\n\n println!(\" ffmpeg -v warning -i input_video.mp4 -pix_fmt gray -c:v rawvideo -c:a pcm_f32le -ar 8000 -ac 1 -f matroska - | desyncmeasure\");\n\n println!(\"Legend:\");\n\n println!(\" a{{V,A}} - receive timestamp against send {{video,audio}} timestamp. \");\n\n println!(\" d{{V,A}} - Relative {{video,audio}} delay. \");\n\n println!(\" De - A/V desyncronisation again send timestamp\");\n\n println!(\"{}\", <Opts as gumdrop::Options>::usage());\n\n return Ok(());\n\n }\n\n env_logger::init();\n\n let si = std::io::stdin();\n\n let mut si = si.lock();\n\n //let si = std::io::BufReader::with_capacity(80_000, si);\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 25127.51018911272 }, { "content": "fn video_decoders(num_threads: usize, data_collector: flume::Sender<MessageToDataCollector>) -> flume::Sender<MessageToVideoDecoder> {\n\n let (tx,rx) = flume::bounded(num_threads);\n\n\n\n for _ in 0..num_threads {\n\n let rx = rx.clone();\n\n let data_collector = data_collector.clone();\n\n std::thread::spawn(move || {\n\n #[cfg(feature = \"zbar-rust\")]\n\n let mut decoder = zbar_rust::ZBarImageScanner::new();\n\n let decoded = std::cell::Cell::new(false);\n\n\n\n data_collector.send(MessageToDataCollector::VideoThreadStarted).unwrap();\n\n 'msgloop: for msg in rx {\n\n let msg : MessageToVideoDecoder = msg;\n\n if msg.buf.len() == 0 { break }\n\n\n\n let pts = msg.pts;\n\n\n\n let decoded_code_handler = |qr:&[u8]| {\n\n if qr.len() != 4+4+1 { return; }\n", "file_path": "src/main.rs", "rank": 6, "score": 20809.99528323483 }, { "content": " }\n\n }\n\n }\n\n\n\n fn segment_tracks(&mut self, e: &std::rc::Rc<mkv::elements::Element>) {\n\n\n\n use mkv::elements::database::Class;\n\n use mkv::elements::ElementContent;\n\n\n\n match &e.content {\n\n mkv::elements::ElementContent::Master(tracks) => {\n\n for track in tracks {\n\n if track.class != Class::TrackEntry {\n\n continue;\n\n }\n\n\n\n let mut tn = None;\n\n let mut ci = None;\n\n let mut tt = None;\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 4.87766224503836 }, { "content": " if ! qr[0..4].iter().all(|x| *x >= b'0' && *x <= b'9') { return; }\n\n if qr[4] != b' ' { return; }\n\n if ! qr[5..9].iter().all(|x| *x >= b'0' && *x <= b'9') { return; }\n\n\n\n let ots : u32 = String::from_utf8_lossy(&qr[0..4]).parse().unwrap();\n\n let ots2 : u32 = String::from_utf8_lossy(&qr[5..9]).parse().unwrap();\n\n\n\n if ots+ots2 != 8192 { return; }\n\n data_collector.send(MessageToDataCollector::VideoTs{pts: pts, ots}).unwrap();\n\n decoded.set(true);\n\n };\n\n\n\n #[cfg(feature = \"rqrr\")] {\n\n let mut pi = rqrr::PreparedImage::prepare_from_greyscale(msg.width as usize, msg.height as usize, |x,y| {\n\n msg.buf[y*(msg.width as usize) + x]\n\n });\n\n for grid in pi.detect_grids() {\n\n if let Ok((_, qr)) = grid.decode() {\n\n let qr = qr.as_bytes();\n\n decoded_code_handler(qr);\n", "file_path": "src/main.rs", "rank": 9, "score": 3.3174961381239565 }, { "content": " };\n\n assert_eq!(scaled.as_flat_samples().strides_cwh(), (1,1,scaled.width() as usize));\n\n for qr in decoder.scan_y800(scaled.as_flat_samples().as_slice(), scaled.width(), scaled.height()).unwrap() {\n\n decoded_code_handler(&qr.data);\n\n if decoded.get() { continue 'msgloop; }\n\n }\n\n }\n\n }\n\n }\n\n data_collector.send(MessageToDataCollector::VideoThreadFinished).unwrap();\n\n });\n\n }\n\n\n\n tx\n\n}\n\n\n\nimpl Handler {\n\n fn process_video(&mut self, f: mkv::events::MatroskaFrame) {\n\n let v = self.video.as_mut().unwrap();\n\n if f.buffers.len() != 1 {\n", "file_path": "src/main.rs", "rank": 10, "score": 3.2769271194689473 }, { "content": "pub mod chirps;\n", "file_path": "src/lib.rs", "rank": 11, "score": 2.8152437837414244 }, { "content": " let mut width = None;\n\n let mut height = None;\n\n let mut colsp = None;\n\n\n\n let mut channels = None;\n\n let mut samplerate = None;\n\n let mut samplebits = None;\n\n\n\n match track.content {\n\n ElementContent::Master(ref v) => {\n\n\n\n for x in v {\n\n match x.class {\n\n Class::TrackNumber => match x.content {\n\n ElementContent::Unsigned(c) => tn = Some(c),\n\n _ => log::error!(\"Internal error 2\"),\n\n }\n\n Class::CodecID => match &x.content {\n\n ElementContent::Text(c) => ci = Some(c),\n\n _ => log::error!(\"Internal error 2\"),\n", "file_path": "src/main.rs", "rank": 12, "score": 2.797611464765134 }, { "content": "use mkv::elements::parser::Parser as _;\n\nuse std::{collections::VecDeque, convert::TryInto, io::Read};\n\n\n\n#[derive(gumdrop::Options)]\n", "file_path": "src/main.rs", "rank": 13, "score": 2.786308575172029 }, { "content": " //for qr in v.decoder.decode(&img) {\n\n }\n\n\n\n fn process_audio(&mut self, f : mkv::events::MatroskaFrame) {\n\n let a = self.audio.as_mut().unwrap();\n\n\n\n if f.buffers.len() != 1 {\n\n log::error!(\"Unexpected number of laced frames. Should be sole unlaced buffer.\");\n\n return;\n\n }\n\n let buf = f.buffers.into_iter().next().unwrap();\n\n let mut bufview = &buf[..];\n\n\n\n let mut tc_s = f.timecode_nanoseconds as f64 / 1000_000_000.0;\n\n //println!(\"{} len={} debt={}\", tc_s, buf.len(), a.debt.len());\n\n\n\n loop {\n\n if bufview.len() + a.debt.len() >= AUDIO_MINIBLOCK_SIZE {\n\n let mut v = Vec::with_capacity(AUDIO_MINIBLOCK_SIZE);\n\n v.extend_from_slice(&a.debt);\n", "file_path": "src/main.rs", "rank": 15, "score": 2.5740922296376274 }, { "content": " let dc = DataCollector::new(opts.threads);\n\n let (dctx, dch) = dc.start_agent();\n\n let dctx2 = dctx.clone();\n\n\n\n {\n\n let h = Handler::new(dctx, opts.threads);\n\n let hl_p = mkv::events::MatroskaDemuxer::new(h);\n\n let mut ml_p = mkv::elements::midlevel::MidlevelParser::new(hl_p);\n\n let mut ll_p = mkv::elements::parser::new();\n\n \n\n let mut buf = vec![0; 79872];\n\n loop {\n\n let len = si.read(&mut buf[..])?;\n\n if len == 0 { break }\n\n let buf = &buf[0..len];\n\n \n\n ll_p.feed_bytes(buf, &mut ml_p);\n\n }\n\n }\n\n dctx2.send(MessageToDataCollector::AudioFinished).unwrap();\n\n \n\n dch.join().unwrap();\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 16, "score": 2.4912569499185526 }, { "content": " Some(_) => {\n\n log::error!(\"Only colour space Y800 (grayscale) is supported\");\n\n continue;\n\n }\n\n }\n\n match (width, height) {\n\n (Some(w), Some(h)) => {\n\n log::info!(\"Video track {}x{} at track number {}\", w,h,tn);\n\n if w > 1000 || h > 1000 {\n\n log::warn!(\"Too large video. Consider downscaling it\");\n\n }\n\n if self.video.is_some() {\n\n log::error!(\"Multiple suitable video tracks found\");\n\n continue;\n\n }\n\n self.video = Some(VideoData {\n\n track: tn as usize,\n\n width: w as usize,\n\n heigth: h as usize,\n\n //decoder : zbar_rust::ZBarImageScanner::new(), //bardecoder::default_decoder(),\n", "file_path": "src/main.rs", "rank": 18, "score": 2.4424404995120206 }, { "content": "# desyncmeasure\n\nA tool to measure audio/video synchronosation errors using special source video\n", "file_path": "README.md", "rank": 19, "score": 2.4094454219238846 }, { "content": " }\n\n fft.process(&mut coefs);\n\n for i in 0..800 {\n\n let mut v = coefs[i].re * 0.04;\n\n if i < 10 { v *= ((i+1) as f32)/10.0}\n\n if i >= 790 { v *= ((800-i) as f32)/10.0}\n\n audio_data.push(v);\n\n }\n\n\n\n audio_data.extend_from_slice(&zeroes);\n\n }\n\n\n\n let mut audio_file = std::fs::File::create(\"chirps.wav\").unwrap();\n\n wav::write(wav::Header {\n\n audio_format: wav::header::WAV_FORMAT_IEEE_FLOAT,\n\n channel_count: 1,\n\n sampling_rate: 8000,\n\n bytes_per_second: 4*8000,\n\n bytes_per_sample: 4,\n\n bits_per_sample: 32,\n\n }, &wav::BitDepth::ThirtyTwoFloat(audio_data), &mut audio_file).unwrap();\n\n}\n", "file_path": "examples/genchirps.rs", "rank": 20, "score": 2.3974849186575735 }, { "content": " epoch: 0,\n\n ots_rachet_spike_charge: 0.0,\n\n first_ots_ever: true,\n\n audio_timestamps_received: 0,\n\n }\n\n }\n\n\n\n fn process_ots_wraparound(&mut self, ots: u32) -> u32 {\n\n if self.first_ots_ever && ots > 4096 {\n\n self.ots_rachet = ots - 2048;\n\n }\n\n self.first_ots_ever = false;\n\n\n\n let mut ots = ots as i32;\n\n if ots < self.ots_rachet as i32 {\n\n ots += 8192;\n\n }\n\n\n\n if ots > self.ots_rachet as i32 + 4096 {\n\n ots -= 8192\n", "file_path": "src/main.rs", "rank": 22, "score": 2.0779175436204413 }, { "content": "\n\n //println!(\"{} {}\",tc_s, buf.len());\n\n }\n\n}\n\n\n\nimpl mkv::events::MatroskaEventHandler for Handler {\n\n fn frame_encountered(&mut self, f: mkv::events::MatroskaFrame) {\n\n if self.video.is_none() && self.audio.is_none() {\n\n log::error!(\"No suitable audio or video tracks found\");\n\n }\n\n if let Some(ref mut v) = self.video {\n\n if v.track == f.track_number {\n\n self.process_video(f);\n\n return;\n\n }\n\n }\n\n if let Some(ref a) = self.audio {\n\n if a.track == f.track_number {\n\n self.process_audio(f);\n\n return;\n", "file_path": "src/main.rs", "rank": 23, "score": 2.0779175436204413 }, { "content": " ots += self.epoch as i32 * 8192;\n\n\n\n if self.ots_rachet >= 8192 {\n\n self.ots_rachet = 0;\n\n self.epoch += 1;\n\n }\n\n\n\n\n\n ots as u32\n\n }\n\n\n\n fn start_agent(mut self) -> (flume::Sender<MessageToDataCollector>, std::thread::JoinHandle<()>) {\n\n let (tx,rx) = flume::unbounded();\n\n let jh = std::thread::spawn(move || {\n\n let max_video_msg = if self.threads_for_video == 1 {\n\n 1\n\n } else {\n\n 2 * self.threads_for_video\n\n };\n\n let mut video_msg_sorter = std::collections::BinaryHeap::with_capacity(max_video_msg);\n", "file_path": "src/main.rs", "rank": 24, "score": 1.9889417559967286 }, { "content": " let mut block = Vec::with_capacity(800);\n\n for (_,miniblock) in a.miniblocks.iter().take(AUDIO_MINIBLOCK_COUNT) {\n\n for sample in itertools::Itertools::chunks(miniblock.iter(), 4).into_iter() {\n\n let mut sample_bytes = Vec::with_capacity(4);\n\n sample_bytes.extend(sample);\n\n let sample = f32::from_le_bytes(sample_bytes.try_into().unwrap());\n\n block.push(num_complex::Complex32::new(sample, 0.0));\n\n }\n\n }\n\n assert_eq!(block.len(), 800);\n\n\n\n if let Some(the_num) = a.analyzer.analyze_block(ts, block) {\n\n let ots = the_num as u32 * 2;\n\n\n\n self.data_collector.send(MessageToDataCollector::AudioTs{pts: ts, ots}).unwrap();\n\n }\n\n\n\n a.miniblocks.pop_front();\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 25, "score": 1.7514523658680954 }, { "content": " match samplebits {\n\n None => log::error!(\"No BitDepth info in audio track\"),\n\n Some(32) => (),\n\n Some(_) => {\n\n log::error!(\"Audio sample size shuld be 32 bits\");\n\n continue;\n\n }\n\n }\n\n\n\n if self.audio.is_some() {\n\n log::error!(\"Multiple audio video tracks found\");\n\n continue;\n\n }\n\n\n\n self.audio = Some(AudioData {\n\n track: tn as usize,\n\n debt: Vec::with_capacity(800),\n\n miniblocks: VecDeque::with_capacity(AUDIO_MINIBLOCK_COUNT),\n\n analyzer: desyncmeasure::chirps::ChirpAnalyzer::new(),\n\n });\n", "file_path": "src/main.rs", "rank": 26, "score": 1.4932952533263202 }, { "content": " (pts - self.video_baseline_pts) - ((ots - self.video_minimal_ots) as f64)/10.0,\n\n );\n\n \n\n enc_tss.maybe_print_delta(ots);\n\n }\n\n }\n\n\n\n if exiting {\n\n break;\n\n }\n\n }\n\n });\n\n (tx, jh)\n\n }\n\n}\n\n\n\n\n\nimpl EncountreedStampData {\n\n fn maybe_print_delta(&mut self, ots: u32) {\n\n if ! self.delta_reported && self.video_ts.is_some() && self.audio_ts.is_some() {\n\n self.delta_reported = true;\n\n println!(\"{} De {:.3}\", ots as f32 / 10.0, self.audio_ts.unwrap() - self.video_ts.unwrap());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 27, "score": 1.4084234835678155 }, { "content": " for msg in rx {\n\n use MessageToDataCollector::*;\n\n match msg {\n\n VideoThreadStarted => self.video_threads_active += 1,\n\n VideoThreadFinished => self.video_threads_active -= 1,\n\n AudioFinished => self.audio_finished = true,\n\n VideoTs { pts, ots } => {\n\n video_msg_sorter.push((std::cmp::Reverse(ordered_float::OrderedFloat(pts)),ots));\n\n }\n\n AudioTs { pts, ots } => {\n\n //eprintln!(\"pre-wraparound audio ots: {}\", ots);\n\n let ots = self.process_ots_wraparound(ots);\n\n let enc_tss = self.encountered_stamps.entry(ots).or_insert_with(Default::default);\n\n if enc_tss.audio_ts.is_none() {\n\n println!(\"{} aA {:.3}\", (ots as f32) / 10.0, pts);\n\n enc_tss.audio_ts = Some(pts);\n\n self.audio_timestamps_received += 1;\n\n \n\n\n\n if ots < self.audio_minimal_ots && self.audio_timestamps_received < 10 {\n", "file_path": "src/main.rs", "rank": 28, "score": 1.0588860202054722 } ]
Rust
mindup_server/src/lib.rs
noogen-projects/mindup
74bcb9288337ea4daba473c55f30fa222c8cb8bc
use borsh::BorshSerialize; pub use dapla_wasm::{alloc, dealloc}; use dapla_wasm::{ database::{execute, query, Value}, WasmSlice, }; use mindup_common::{Response, Task}; use sql_builder::{quote, SqlBuilder, SqlBuilderError}; use thiserror::Error; const TASKS_TABLE_NAME: &str = "Tasks"; #[no_mangle] pub unsafe extern "C" fn init() -> WasmSlice { let result = execute(format!( r"CREATE TABLE IF NOT EXISTS {table}( description TEXT NOT NULL, completed INTEGER NOT NULL DEFAULT 0 CHECK(completed IN (0,1)) );", table = TASKS_TABLE_NAME )); let data = result .map(drop) .try_to_vec() .expect("Init result should be serializable"); WasmSlice::from(data) } #[no_mangle] pub unsafe extern "C" fn get(uri: WasmSlice) -> WasmSlice { WasmSlice::from(do_get(uri.into_string_in_wasm())) } fn do_get(uri: String) -> String { let response = TodoRequest::parse(&uri, None) .map(|request| request.process()) .unwrap_or_else(Response::Error); serde_json::to_string(&response).unwrap_or_else(Response::json_error_from) } #[no_mangle] pub unsafe extern "C" fn post(uri: WasmSlice, body: WasmSlice) -> WasmSlice { WasmSlice::from(do_post(uri.into_string_in_wasm(), body.into_string_in_wasm())) } fn do_post(uri: String, body: String) -> String { let response = TodoRequest::parse(&uri, Some(&body)) .map(|request| request.process()) .unwrap_or_else(Response::Error); serde_json::to_string(&response).unwrap_or_else(Response::json_error_from) } #[derive(Debug, Error)] enum TaskError { #[error("Invalid SQL query: {0}")] Sql(#[from] SqlBuilderError), #[error("Error: {0}")] AnyhowError(#[from] anyhow::Error), #[error("Error message: {0}")] ErrorMessage(String), } impl From<String> for TaskError { fn from(message: String) -> Self { Self::ErrorMessage(message) } } impl From<TaskError> for Response { fn from(err: TaskError) -> Self { Response::Error(format!("{}", err)) } } enum TodoRequest { List, Add(Task), Update(u32, Task), Delete(u32), ClearCompleted, } impl TodoRequest { fn parse(uri: &str, body: Option<&str>) -> Result<Self, String> { let chunks: Vec<_> = uri.split(|c| c == '/').collect(); match &chunks[..] { [.., "list"] => Ok(Self::List), [.., "add"] => { let body = body.ok_or_else(|| "Task not specified".to_string())?; parse_task(body).map(Self::Add) } [.., "update", idx] => { let idx = parse_idx(idx)?; let body = body.ok_or_else(|| "Task not specified".to_string())?; parse_task(body).map(|task| Self::Update(idx, task)) } [.., "delete", idx] => parse_idx(idx).map(Self::Delete), [.., "clear_completed"] => Ok(Self::ClearCompleted), _ => Err(format!("Cannot parse uri {}, {:?}", uri, chunks)), } } fn process(self) -> Response { match self { Self::List => process_list().map(Response::List), Self::Add(task) => process_add(task).map(Response::List), Self::Update(idx, task) => process_update(idx, task).map(|_| Response::Empty), Self::Delete(idx) => process_delete(idx).map(Response::List), Self::ClearCompleted => process_clear_completed().map(Response::List), } .unwrap_or_else(Response::from) } } fn parse_idx(source: &str) -> Result<u32, String> { source .parse() .map_err(|err| format!("Parse task index error: {:?}", err)) } fn parse_task(source: &str) -> Result<Task, String> { serde_json::from_str(source).map_err(|err| format!("Parse task error: {:?}", err)) } fn process_list() -> Result<Vec<Task>, TaskError> { let sql = SqlBuilder::select_from(TASKS_TABLE_NAME).sql()?; let rows = query(sql)?; let mut tasks = Vec::with_capacity(rows.len()); for row in rows { tasks.push(task_from(row.into_values())?); } Ok(tasks) } fn process_add(task: Task) -> Result<Vec<Task>, TaskError> { let sql = SqlBuilder::insert_into(TASKS_TABLE_NAME) .fields(&["description", "completed"]) .values(&[quote(task.description), if task.completed { 1 } else { 0 }.to_string()]) .sql()?; execute(sql)?; process_list() } fn process_update(idx: u32, update: Task) -> Result<(), TaskError> { let sql = SqlBuilder::update_table(TASKS_TABLE_NAME) .set("description", quote(update.description)) .set("completed", update.completed) .and_where_eq("rowid", idx) .sql()?; execute(sql)?; execute("VACUUM")?; Ok(()) } fn process_delete(idx: u32) -> Result<Vec<Task>, TaskError> { let sql = SqlBuilder::delete_from(TASKS_TABLE_NAME) .and_where_eq("rowid", idx) .sql()?; execute(sql)?; execute("VACUUM")?; process_list() } fn process_clear_completed() -> Result<Vec<Task>, TaskError> { let sql = SqlBuilder::delete_from(TASKS_TABLE_NAME) .and_where_ne("completed", 0) .sql()?; execute(sql)?; execute("VACUUM")?; process_list() } fn task_from(values: Vec<Value>) -> Result<Task, String> { let mut task = Task::default(); let mut iter = values.into_iter(); match iter.next() { Some(Value::Text(description)) => task.description = description, Some(value) => Err(format!("Incorrect task description value: {:?}", value))?, None => Err("Task description value does not exist".to_string())?, } match iter.next() { Some(Value::Integer(completed)) => task.completed = completed != 0, Some(value) => Err(format!("Incorrect task completed value: {:?}", value))?, None => Err("Task completed value does not exist".to_string())?, } Ok(task) }
use borsh::BorshSerialize; pub use dapla_wasm::{alloc, dealloc}; use dapla_wasm::{ database::{execute, query, Value}, WasmSlice, }; use mindup_common::{Response, Task}; use sql_builder::{quote, SqlBuilder, SqlBuilderError}; use thiserror::Error; const TASKS_TABLE_NAME: &str = "Tasks"; #[no_mangle] pub unsafe extern "C" fn init() -> WasmSlice { let result = execute(format!( r"CREATE TABLE IF NOT EXISTS {table}( description TEXT NOT NULL,
.map_err(|err| format!("Parse task index error: {:?}", err)) } fn parse_task(source: &str) -> Result<Task, String> { serde_json::from_str(source).map_err(|err| format!("Parse task error: {:?}", err)) } fn process_list() -> Result<Vec<Task>, TaskError> { let sql = SqlBuilder::select_from(TASKS_TABLE_NAME).sql()?; let rows = query(sql)?; let mut tasks = Vec::with_capacity(rows.len()); for row in rows { tasks.push(task_from(row.into_values())?); } Ok(tasks) } fn process_add(task: Task) -> Result<Vec<Task>, TaskError> { let sql = SqlBuilder::insert_into(TASKS_TABLE_NAME) .fields(&["description", "completed"]) .values(&[quote(task.description), if task.completed { 1 } else { 0 }.to_string()]) .sql()?; execute(sql)?; process_list() } fn process_update(idx: u32, update: Task) -> Result<(), TaskError> { let sql = SqlBuilder::update_table(TASKS_TABLE_NAME) .set("description", quote(update.description)) .set("completed", update.completed) .and_where_eq("rowid", idx) .sql()?; execute(sql)?; execute("VACUUM")?; Ok(()) } fn process_delete(idx: u32) -> Result<Vec<Task>, TaskError> { let sql = SqlBuilder::delete_from(TASKS_TABLE_NAME) .and_where_eq("rowid", idx) .sql()?; execute(sql)?; execute("VACUUM")?; process_list() } fn process_clear_completed() -> Result<Vec<Task>, TaskError> { let sql = SqlBuilder::delete_from(TASKS_TABLE_NAME) .and_where_ne("completed", 0) .sql()?; execute(sql)?; execute("VACUUM")?; process_list() } fn task_from(values: Vec<Value>) -> Result<Task, String> { let mut task = Task::default(); let mut iter = values.into_iter(); match iter.next() { Some(Value::Text(description)) => task.description = description, Some(value) => Err(format!("Incorrect task description value: {:?}", value))?, None => Err("Task description value does not exist".to_string())?, } match iter.next() { Some(Value::Integer(completed)) => task.completed = completed != 0, Some(value) => Err(format!("Incorrect task completed value: {:?}", value))?, None => Err("Task completed value does not exist".to_string())?, } Ok(task) }
completed INTEGER NOT NULL DEFAULT 0 CHECK(completed IN (0,1)) );", table = TASKS_TABLE_NAME )); let data = result .map(drop) .try_to_vec() .expect("Init result should be serializable"); WasmSlice::from(data) } #[no_mangle] pub unsafe extern "C" fn get(uri: WasmSlice) -> WasmSlice { WasmSlice::from(do_get(uri.into_string_in_wasm())) } fn do_get(uri: String) -> String { let response = TodoRequest::parse(&uri, None) .map(|request| request.process()) .unwrap_or_else(Response::Error); serde_json::to_string(&response).unwrap_or_else(Response::json_error_from) } #[no_mangle] pub unsafe extern "C" fn post(uri: WasmSlice, body: WasmSlice) -> WasmSlice { WasmSlice::from(do_post(uri.into_string_in_wasm(), body.into_string_in_wasm())) } fn do_post(uri: String, body: String) -> String { let response = TodoRequest::parse(&uri, Some(&body)) .map(|request| request.process()) .unwrap_or_else(Response::Error); serde_json::to_string(&response).unwrap_or_else(Response::json_error_from) } #[derive(Debug, Error)] enum TaskError { #[error("Invalid SQL query: {0}")] Sql(#[from] SqlBuilderError), #[error("Error: {0}")] AnyhowError(#[from] anyhow::Error), #[error("Error message: {0}")] ErrorMessage(String), } impl From<String> for TaskError { fn from(message: String) -> Self { Self::ErrorMessage(message) } } impl From<TaskError> for Response { fn from(err: TaskError) -> Self { Response::Error(format!("{}", err)) } } enum TodoRequest { List, Add(Task), Update(u32, Task), Delete(u32), ClearCompleted, } impl TodoRequest { fn parse(uri: &str, body: Option<&str>) -> Result<Self, String> { let chunks: Vec<_> = uri.split(|c| c == '/').collect(); match &chunks[..] { [.., "list"] => Ok(Self::List), [.., "add"] => { let body = body.ok_or_else(|| "Task not specified".to_string())?; parse_task(body).map(Self::Add) } [.., "update", idx] => { let idx = parse_idx(idx)?; let body = body.ok_or_else(|| "Task not specified".to_string())?; parse_task(body).map(|task| Self::Update(idx, task)) } [.., "delete", idx] => parse_idx(idx).map(Self::Delete), [.., "clear_completed"] => Ok(Self::ClearCompleted), _ => Err(format!("Cannot parse uri {}, {:?}", uri, chunks)), } } fn process(self) -> Response { match self { Self::List => process_list().map(Response::List), Self::Add(task) => process_add(task).map(Response::List), Self::Update(idx, task) => process_update(idx, task).map(|_| Response::Empty), Self::Delete(idx) => process_delete(idx).map(Response::List), Self::ClearCompleted => process_clear_completed().map(Response::List), } .unwrap_or_else(Response::from) } } fn parse_idx(source: &str) -> Result<u32, String> { source .parse()
random
[ { "content": "fn main() {\n\n yew::start_app::<Root>();\n\n}\n", "file_path": "mindup_client/src/main.rs", "rank": 8, "score": 31229.389855600333 }, { "content": "use std::fmt;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Default, Clone, Deserialize, Serialize)]\n\npub struct Task {\n\n pub description: String,\n\n pub completed: bool,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub enum Response {\n\n List(Vec<Task>),\n\n Task(Task),\n\n Empty,\n\n Error(String),\n\n}\n\n\n\nimpl Response {\n\n pub fn json_error_from<E: fmt::Debug>(err: E) -> String {\n\n format!(r#\"{{\"Error\":\"{:?}\"}}\"#, err)\n\n }\n\n}\n", "file_path": "mindup_common/src/lib.rs", "rank": 14, "score": 6.657972028733939 }, { "content": " self.state.value.clear();\n\n false\n\n }\n\n Msg::Edit => {\n\n if let Some(edit) = self.state.edit.take() {\n\n let idx = self.state.filtered_task_idx(edit.task_idx);\n\n let description = edit.value.trim();\n\n\n\n let msg = if !description.is_empty() {\n\n self.state.list[idx].description = description.to_string();\n\n Msg::Save(idx)\n\n } else {\n\n Msg::Remove(idx)\n\n };\n\n self.link.send_message(msg);\n\n }\n\n false\n\n }\n\n Msg::TypeNew(value) => {\n\n self.state.value = value;\n", "file_path": "mindup_client/src/main.rs", "rank": 15, "score": 5.505572741618248 }, { "content": " false\n\n }\n\n Msg::TypeEdit(value) => {\n\n if let Some(edit) = &mut self.state.edit {\n\n edit.value = value;\n\n }\n\n false\n\n }\n\n Msg::Save(idx) => {\n\n let task = &self.state.list[idx];\n\n self.fetcher\n\n .send_post(\n\n format!(\"/mindup/update/{}\", idx + 1),\n\n format!(\n\n r#\"{{\"description\":\"{}\",\"completed\":{}}}\"#,\n\n task.description, task.completed\n\n ),\n\n JsonFetcher::callback(&self.link, Msg::Fetch, Msg::Error),\n\n )\n\n .context(\"Save task error\")\n", "file_path": "mindup_client/src/main.rs", "rank": 16, "score": 5.163785476652361 }, { "content": " fn view_task_edit_input(&self, idx: usize) -> Html {\n\n if let Some(Edit { value, task_idx }) = &self.state.edit {\n\n if *task_idx == idx {\n\n return html! {\n\n <input class = \"edit\" type = \"text\" ref = self.focus_ref.clone() value = value\n\n onmouseover = self.link.callback(|_| Msg::Focus)\n\n oninput = self.link.callback(|event: InputData| Msg::TypeEdit(event.value))\n\n onblur = self.link.callback(move |_| Msg::Edit)\n\n onkeypress = self.link.callback(move |event: KeyboardEvent| {\n\n if event.key() == \"Enter\" { Msg::Edit } else { Msg::Nope }\n\n }) />\n\n };\n\n }\n\n }\n\n html! { <input type = \"hidden\" /> }\n\n }\n\n}\n\n\n", "file_path": "mindup_client/src/main.rs", "rank": 17, "score": 4.216656773673522 }, { "content": " fetcher,\n\n state: Default::default(),\n\n focus_ref: Default::default(),\n\n }\n\n }\n\n\n\n fn update(&mut self, msg: Self::Message) -> bool {\n\n match msg {\n\n Msg::Add => {\n\n let description = self.state.value.trim();\n\n if !description.is_empty() {\n\n self.fetcher\n\n .send_post(\n\n \"/mindup/add\",\n\n format!(r#\"{{\"description\":\"{}\",\"completed\":false}}\"#, description),\n\n JsonFetcher::callback(&self.link, Msg::Fetch, Msg::Error),\n\n )\n\n .context(\"Add task error\")\n\n .msg_error(&self.link);\n\n }\n", "file_path": "mindup_client/src/main.rs", "rank": 18, "score": 3.9821088232413997 }, { "content": "use std::{io, path::Path};\n\n\n\nuse dapla_server::{actix_web, settings::Settings};\n\n\n\n#[actix_web::main]\n\nasync fn main() -> io::Result<()> {\n\n let settings = Settings::new(Path::new(\"app_runner\").join(\"settings.toml\")).expect(\"Settings should be configured\");\n\n env_logger::init_from_env(env_logger::Env::new().default_filter_or(settings.log.level.to_string()));\n\n\n\n dapla_server::run(settings).await\n\n}\n", "file_path": "app_runner/src/main.rs", "rank": 19, "score": 3.8520626029084566 }, { "content": " self.state.edit = Some(Edit {\n\n value: self.state.list[idx].description.clone(),\n\n task_idx: idx,\n\n });\n\n true\n\n }\n\n Msg::ToggleAll => {\n\n let status = !self.state.is_all_completed();\n\n for (idx, task) in self.state.list.iter_mut().enumerate() {\n\n if self.state.filter.fit(task) && task.completed != status {\n\n task.completed = status;\n\n self.link.send_message(Msg::Save(idx));\n\n }\n\n }\n\n false\n\n }\n\n Msg::Toggle(idx) => {\n\n let idx = self.state.toggle(idx);\n\n self.link.send_message(Msg::Save(idx));\n\n false\n", "file_path": "mindup_client/src/main.rs", "rank": 20, "score": 3.6800250117581266 }, { "content": " }\n\n\n\n fn view_input(&self) -> Html {\n\n html! {\n\n <input class = \"new-todo\" placeholder = \"What needs to be done?\"\n\n value = &self.state.value\n\n oninput = self.link.callback(|event: InputData| Msg::TypeNew(event.value))\n\n onkeypress = self.link.callback(|event: KeyboardEvent| {\n\n if event.key() == \"Enter\" { Msg::Add } else { Msg::Nope }\n\n }) />\n\n }\n\n }\n\n\n\n fn view_task(&self, (idx, task): (usize, &Task)) -> Html {\n\n let mut classes = vec![\"todo\"];\n\n if self\n\n .state\n\n .edit\n\n .as_ref()\n\n .map(|edit| edit.task_idx == idx)\n", "file_path": "mindup_client/src/main.rs", "rank": 21, "score": 3.3221298866768185 }, { "content": "#![recursion_limit = \"512\"]\n\n\n\nuse anyhow::{anyhow, Context, Error};\n\nuse dapla_yew::{JsonFetcher, MsgError};\n\nuse mindup_common::{Response, Task};\n\nuse strum::{EnumIter, IntoEnumIterator, ToString};\n\nuse web_sys::HtmlInputElement;\n\nuse yew::{html, services::console::ConsoleService, Component, ComponentLink, Html, InputData, KeyboardEvent, NodeRef};\n\n\n\n#[derive(EnumIter, ToString, Clone, Copy, PartialEq)]\n", "file_path": "mindup_client/src/main.rs", "rank": 22, "score": 3.0772140132197396 }, { "content": " .unwrap_or(false)\n\n {\n\n classes.push(\"editing\");\n\n }\n\n if task.completed {\n\n classes.push(\"completed\");\n\n }\n\n html! {\n\n <li class = classes>\n\n <div class = \"view\">\n\n <input type = \"checkbox\" class = \"toggle\" checked = task.completed\n\n onclick = self.link.callback(move |_| Msg::Toggle(idx)) />\n\n <label ondblclick = self.link.callback(move |_| Msg::ToggleEdit(idx))>{ &task.description }</label>\n\n <button class = \"destroy\" onclick = self.link.callback(move |_| Msg::Remove(idx)) />\n\n </div>\n\n { self.view_task_edit_input(idx) }\n\n </li>\n\n }\n\n }\n\n\n", "file_path": "mindup_client/src/main.rs", "rank": 23, "score": 2.5407649764126026 }, { "content": " self.list.len()\n\n }\n\n\n\n fn total_completed(&self) -> usize {\n\n self.list.iter().filter(|task| Filter::Completed.fit(task)).count()\n\n }\n\n\n\n fn is_all_completed(&self) -> bool {\n\n let mut filtered_iter = self.list.iter().filter(|task| self.filter.fit(task)).peekable();\n\n\n\n if filtered_iter.peek().is_none() {\n\n return false;\n\n }\n\n\n\n filtered_iter.all(|task| task.completed)\n\n }\n\n\n\n fn toggle(&mut self, idx: usize) -> usize {\n\n let idx = self.filtered_task_idx(idx);\n\n let task = &mut self.list[idx];\n", "file_path": "mindup_client/src/main.rs", "rank": 24, "score": 1.8855699660482634 }, { "content": " task.completed = !task.completed;\n\n idx\n\n }\n\n\n\n fn remove(&mut self, idx: usize) -> usize {\n\n let idx = self.filtered_task_idx(idx);\n\n self.list.remove(idx);\n\n idx\n\n }\n\n}\n\n\n", "file_path": "mindup_client/src/main.rs", "rank": 25, "score": 1.785123186644432 }, { "content": " checked = self.state.is_all_completed()\n\n onclick = self.link.callback(|_| Msg::ToggleAll) />\n\n <label for = \"toggle-all\" />\n\n <ul class = \"todo-list\">\n\n { for self.state.list.iter().filter(|task| self.state.filter.fit(task)).enumerate().map(|task| self.view_task(task)) }\n\n </ul>\n\n </section>\n\n <footer class = (\"footer\", hidden_class)>\n\n <span class = \"todo-count\">\n\n <strong>{ self.state.total() }</strong>\n\n { \" item(s) left\" }\n\n </span>\n\n <ul class = \"filters\">\n\n { for Filter::iter().map(|filter| self.view_filter(filter)) }\n\n </ul>\n\n <button class = \"clear-completed\" onclick = self.link.callback(|_| Msg::ClearCompleted)>\n\n { format!(\"Clear completed ({})\", self.state.total_completed()) }\n\n </button>\n\n </footer>\n\n </section>\n", "file_path": "mindup_client/src/main.rs", "rank": 27, "score": 1.547998765849273 }, { "content": " }\n\n Msg::Fetch(Response::List(list)) => {\n\n self.state.list = list;\n\n true\n\n }\n\n Msg::Fetch(Response::Task(task)) => {\n\n self.state.list.push(task);\n\n true\n\n }\n\n Msg::Fetch(Response::Empty) => true,\n\n Msg::Fetch(Response::Error(err)) => {\n\n self.link.send_message(Msg::Error(anyhow!(\"{}\", err)));\n\n false\n\n }\n\n Msg::Error(err) => {\n\n ConsoleService::error(&format!(\"{}\", err));\n\n true\n\n }\n\n Msg::Nope => false,\n\n }\n", "file_path": "mindup_client/src/main.rs", "rank": 28, "score": 1.4465054615513733 }, { "content": "# Mind Up\n\n\n\nThe [dapla](https://github.com/noogen-projects/dapla)-application that upgrade the mind.\n\n\n\n## Build\n\n\n\nBuilding mindup requires the latest `stable` Rust toolchain, the `wasm32` target and `cargo-make` and\n\n`wasm-bindgen` build tools.\n\n\n\nTo install Rust and its toolchain and target via [rustup](https://rustup.rs/), if it is not already installed, run:\n\n\n\n```shell\n\ncurl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh\n\nsource $HOME/.cargo/env\n\n\n\nrustup toolchain install stable\n\nrustup target add wasm32-unknown-unknown\n\n```\n\n\n\nTo install [cargo-make](https://github.com/sagiegurari/cargo-make) and\n\n[wasm-bindgen](https://github.com/rustwasm/wasm-bindgen), run:\n\n\n\n```shell\n\ncargo install --force cargo-make wasm-bindgen-cli\n\n```\n\n\n\nTo build mindup and its example, run:\n\n\n\n```shell\n\ncargo make all\n\n```\n\n\n\nOr for a debug build, run:\n\n\n\n```shell\n\ncargo make all -p debug\n\n```\n\n\n\n## Run example\n\n\n\nRun the dapla server with mindup application:\n\n\n\n```shell\n\ncargo make run\n\n```\n\n\n\nOr for a debug build:\n\n\n\n```shell\n\ncargo make run -p debug\n\n```\n\n\n\nThen visit [http://localhost:8080/mindup](http://localhost:8080/mindup). You can change the default port and other settings\n\nby editing `app_runner/settings.toml` file.\n\n\n\n## Development notes\n\n\n\nTo check the project, use the following command:\n\n\n\n```shell script\n\ncargo check --workspace --all-features --all-targets\n\n```\n\n\n\nTo run all tests, use the following command:\n\n\n\n```shell script\n\ncargo test --all-features --all-targets\n\n```\n\n\n\nTo check and perform formatting, use the following commands:\n\n\n\n```shell script\n\ncargo +nightly fmt -- --check\n\ncargo +nightly fmt\n\n```\n\n\n\nTo enable autoformatting for IntelliJ IDEA with the Rust plugin:\n\n\n\n`File -> Settings -> Languages & Frameworks -> Rust -> Rustfmt, check \"Run rustfmt on Save\"`\n\n\n\nTo run clippy, use the following command:\n\n\n\n```shell script\n\ncargo clippy --all-targets --all-features -- -D warnings\n\n```\n\n\n\nTo setup git hook, use the following command:\n\n\n\n```shell script\n\ncp .git-pre-push.sh .git/hooks/pre-push\n\n```\n", "file_path": "README.md", "rank": 29, "score": 1.2692365566785588 }, { "content": " .msg_error(&self.link);\n\n false\n\n }\n\n Msg::Remove(idx) => {\n\n let idx = self.state.remove(idx);\n\n self.fetcher\n\n .send_post(\n\n format!(\"/mindup/delete/{}\", idx + 1),\n\n \"\",\n\n JsonFetcher::callback(&self.link, Msg::Fetch, Msg::Error),\n\n )\n\n .context(\"Remove task error\")\n\n .msg_error(&self.link);\n\n false\n\n }\n\n Msg::SetFilter(filter) => {\n\n self.state.filter = filter;\n\n true\n\n }\n\n Msg::ToggleEdit(idx) => {\n", "file_path": "mindup_client/src/main.rs", "rank": 30, "score": 0.8568179858685172 }, { "content": " }\n\n Msg::ClearCompleted => {\n\n self.fetcher\n\n .send_post(\n\n \"/mindup/clear_completed\",\n\n \"\",\n\n JsonFetcher::callback(&self.link, Msg::Fetch, Msg::Error),\n\n )\n\n .context(\"Clear completed tasks error\")\n\n .msg_error(&self.link);\n\n false\n\n }\n\n Msg::Focus => {\n\n if let Some(input) = self.focus_ref.cast::<HtmlInputElement>() {\n\n input\n\n .focus()\n\n .map_err(|err| anyhow!(\"Input focus error: {:?}\", err))\n\n .msg_error(&self.link);\n\n }\n\n false\n", "file_path": "mindup_client/src/main.rs", "rank": 31, "score": 0.8288782689380223 } ]
Rust
apis/clyde-3g-eps-api/examples/eps.rs
wau/kubos
074b61738a49ef87fbb08814285fa3173f4f32df
/* * Copyright (C) 2018 Kubos Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use clyde_3g_eps_api::*; use rust_i2c::*; use std::thread; use std::time::Duration; macro_rules! print_result { ($p:expr, $r:expr) => { match $r { Ok(v) => println!("{} - {:#?}", $p, v), Err(e) => println!("{} Err - {:#?}", $p, e), } }; } macro_rules! dump_mother_telem { ( $eps:expr, $($type:ident,)+ ) => { $( print_result!( format!("{:?}", MotherboardTelemetry::Type::$type), $eps.get_motherboard_telemetry(MotherboardTelemetry::Type::$type) ); thread::sleep(Duration::from_millis(100)); )+ }; } macro_rules! dump_daughter_telem { ( $eps:expr, $($type:ident,)+ ) => { $( print_result!( format!("{:?}", DaughterboardTelemetry::Type::$type), $eps.get_daughterboard_telemetry(DaughterboardTelemetry::Type::$type) ); thread::sleep(Duration::from_millis(100)); )+ }; } macro_rules! dump_reset_telem { ( $eps:expr, $($type:ident,)+ ) => { $( print_result!( format!("{:?}", ResetTelemetry::Type::$type), $eps.get_reset_telemetry(ResetTelemetry::Type::$type) ); thread::sleep(Duration::from_millis(100)); )+ }; } pub fn main() { let eps = Eps::new(Connection::from_path("/dev/i2c-1", 0x2B)); print_result!("Version Info", eps.get_version_info()); thread::sleep(Duration::from_millis(100)); print_result!("Board Status", eps.get_board_status()); thread::sleep(Duration::from_millis(100)); print_result!("Checksum", eps.get_checksum()); thread::sleep(Duration::from_millis(100)); print_result!("Last Error", eps.get_last_error()); thread::sleep(Duration::from_millis(100)); print_result!("Watchdog Period", eps.get_comms_watchdog_period()); thread::sleep(Duration::from_millis(100)); dump_mother_telem!( eps, VoltageFeedingBcr1, CurrentBcr1Sa1a, CurrentBcr1Sa1b, ArrayTempSa1a, ArrayTempSa1b, SunDetectorSa1a, SunDetectorSa1b, VoltageFeedingBcr2, CurrentBcr2Sa2a, CurrentBcr2Sa2b, ArrayTempSa2a, ArrayTempSa2b, SunDetectorSa2a, SunDetectorSa2b, VoltageFeedingBcr3, CurrentBcr3Sa3a, CurrentBcr3Sa3b, ArrayTempSa3a, ArrayTempSa3b, SunDetectorSa3a, SunDetectorSa3b, BcrOutputCurrent, BcrOutputVoltage, CurrentDraw3V3, CurrentDraw5V, OutputCurrent12V, OutputVoltage12V, OutputCurrentBattery, OutputVoltageBattery, OutputCurrent5v, OutputVoltage5v, OutputCurrent33v, OutputVoltage33v, OutputVoltageSwitch1, OutputCurrentSwitch1, OutputVoltageSwitch2, OutputCurrentSwitch2, OutputVoltageSwitch3, OutputCurrentSwitch3, OutputVoltageSwitch4, OutputCurrentSwitch4, OutputVoltageSwitch5, OutputCurrentSwitch5, OutputVoltageSwitch6, OutputCurrentSwitch6, OutputVoltageSwitch7, OutputCurrentSwitch7, OutputVoltageSwitch8, OutputCurrentSwitch8, OutputVoltageSwitch9, OutputCurrentSwitch9, OutputVoltageSwitch10, OutputCurrentSwitch10, BoardTemperature, ); dump_daughter_telem!( eps, VoltageFeedingBcr4, CurrentBcr4Sa4a, CurrentBcr4Sa4b, ArrayTempSa4a, ArrayTempSa4b, SunDetectorSa4a, SunDetectorSa4b, VoltageFeedingBcr5, CurrentBcr5Sa5a, CurrentBcr5Sa5b, ArrayTempSa5a, ArrayTempSa5b, SunDetectorSa5a, SunDetectorSa5b, VoltageFeedingBcr6, CurrentBcr6Sa6a, CurrentBcr6Sa6b, ArrayTempSa6a, ArrayTempSa6b, SunDetectorSa6a, SunDetectorSa6b, VoltageFeedingBcr7, CurrentBcr7Sa7a, CurrentBcr7Sa7b, ArrayTempSa7a, ArrayTempSa7b, SunDetectorSa7a, SunDetectorSa7b, VoltageFeedingBcr8, CurrentBcr8Sa8a, CurrentBcr8Sa8b, ArrayTempSa8a, ArrayTempSa8b, SunDetectorSa8a, SunDetectorSa8b, VoltageFeedingBcr9, CurrentBcr9Sa9a, CurrentBcr9Sa9b, ArrayTempSa9a, ArrayTempSa9b, SunDetectorSa9a, SunDetectorSa9b, BoardTemperature, ); dump_reset_telem!(eps, BrownOut, AutomaticSoftware, Manual, Watchdog,); }
/* * Copyright (C) 2018 Kubos Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance w
, ArrayTempSa1a, ArrayTempSa1b, SunDetectorSa1a, SunDetectorSa1b, VoltageFeedingBcr2, CurrentBcr2Sa2a, CurrentBcr2Sa2b, ArrayTempSa2a, ArrayTempSa2b, SunDetectorSa2a, SunDetectorSa2b, VoltageFeedingBcr3, CurrentBcr3Sa3a, CurrentBcr3Sa3b, ArrayTempSa3a, ArrayTempSa3b, SunDetectorSa3a, SunDetectorSa3b, BcrOutputCurrent, BcrOutputVoltage, CurrentDraw3V3, CurrentDraw5V, OutputCurrent12V, OutputVoltage12V, OutputCurrentBattery, OutputVoltageBattery, OutputCurrent5v, OutputVoltage5v, OutputCurrent33v, OutputVoltage33v, OutputVoltageSwitch1, OutputCurrentSwitch1, OutputVoltageSwitch2, OutputCurrentSwitch2, OutputVoltageSwitch3, OutputCurrentSwitch3, OutputVoltageSwitch4, OutputCurrentSwitch4, OutputVoltageSwitch5, OutputCurrentSwitch5, OutputVoltageSwitch6, OutputCurrentSwitch6, OutputVoltageSwitch7, OutputCurrentSwitch7, OutputVoltageSwitch8, OutputCurrentSwitch8, OutputVoltageSwitch9, OutputCurrentSwitch9, OutputVoltageSwitch10, OutputCurrentSwitch10, BoardTemperature, ); dump_daughter_telem!( eps, VoltageFeedingBcr4, CurrentBcr4Sa4a, CurrentBcr4Sa4b, ArrayTempSa4a, ArrayTempSa4b, SunDetectorSa4a, SunDetectorSa4b, VoltageFeedingBcr5, CurrentBcr5Sa5a, CurrentBcr5Sa5b, ArrayTempSa5a, ArrayTempSa5b, SunDetectorSa5a, SunDetectorSa5b, VoltageFeedingBcr6, CurrentBcr6Sa6a, CurrentBcr6Sa6b, ArrayTempSa6a, ArrayTempSa6b, SunDetectorSa6a, SunDetectorSa6b, VoltageFeedingBcr7, CurrentBcr7Sa7a, CurrentBcr7Sa7b, ArrayTempSa7a, ArrayTempSa7b, SunDetectorSa7a, SunDetectorSa7b, VoltageFeedingBcr8, CurrentBcr8Sa8a, CurrentBcr8Sa8b, ArrayTempSa8a, ArrayTempSa8b, SunDetectorSa8a, SunDetectorSa8b, VoltageFeedingBcr9, CurrentBcr9Sa9a, CurrentBcr9Sa9b, ArrayTempSa9a, ArrayTempSa9b, SunDetectorSa9a, SunDetectorSa9b, BoardTemperature, ); dump_reset_telem!(eps, BrownOut, AutomaticSoftware, Manual, Watchdog,); }
ith the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use clyde_3g_eps_api::*; use rust_i2c::*; use std::thread; use std::time::Duration; macro_rules! print_result { ($p:expr, $r:expr) => { match $r { Ok(v) => println!("{} - {:#?}", $p, v), Err(e) => println!("{} Err - {:#?}", $p, e), } }; } macro_rules! dump_mother_telem { ( $eps:expr, $($type:ident,)+ ) => { $( print_result!( format!("{:?}", MotherboardTelemetry::Type::$type), $eps.get_motherboard_telemetry(MotherboardTelemetry::Type::$type) ); thread::sleep(Duration::from_millis(100)); )+ }; } macro_rules! dump_daughter_telem { ( $eps:expr, $($type:ident,)+ ) => { $( print_result!( format!("{:?}", DaughterboardTelemetry::Type::$type), $eps.get_daughterboard_telemetry(DaughterboardTelemetry::Type::$type) ); thread::sleep(Duration::from_millis(100)); )+ }; } macro_rules! dump_reset_telem { ( $eps:expr, $($type:ident,)+ ) => { $( print_result!( format!("{:?}", ResetTelemetry::Type::$type), $eps.get_reset_telemetry(ResetTelemetry::Type::$type) ); thread::sleep(Duration::from_millis(100)); )+ }; } pub fn main() { let eps = Eps::new(Connection::from_path("/dev/i2c-1", 0x2B)); print_result!("Version Info", eps.get_version_info()); thread::sleep(Duration::from_millis(100)); print_result!("Board Status", eps.get_board_status()); thread::sleep(Duration::from_millis(100)); print_result!("Checksum", eps.get_checksum()); thread::sleep(Duration::from_millis(100)); print_result!("Last Error", eps.get_last_error()); thread::sleep(Duration::from_millis(100)); print_result!("Watchdog Period", eps.get_comms_watchdog_period()); thread::sleep(Duration::from_millis(100)); dump_mother_telem!( eps, VoltageFeedingBcr1, CurrentBcr1Sa1a, CurrentBcr1Sa1b
random
[ { "content": " const char* file;\n", "file_path": "cmocka/cmocka-1.1.0/include/cmocka.h", "rank": 0, "score": 132737.9092915765 }, { "content": "/// Fetch information about the version(s) of KubOS installed in the system\n\n///\n\n/// Returns the current and previous version(s) of KubOS.\n\npub fn kubos_versions() -> KubosVersions {\n\n let vars = UBootVars::new();\n\n KubosVersions {\n\n curr: vars.get_str(uboot::VAR_KUBOS_CURR_VERSION),\n\n prev: vars.get_str(uboot::VAR_KUBOS_PREV_VERSION),\n\n }\n\n}\n\n\n", "file_path": "apis/system-api/src/lib.rs", "rank": 1, "score": 106169.62557365115 }, { "content": "#define TEST_I2C \"/dev/i2c-1\"\n", "file_path": "hal/kubos-hal/test/i2c/i2c.c", "rank": 2, "score": 89904.88976729667 }, { "content": "use clap::{App, AppSettings, Arg, SubCommand};\n\nuse failure::bail;\n\nuse file_protocol::{FileProtocol, FileProtocolConfig, State};\n\nuse log::{error, info};\n\nuse simplelog::*;\n\nuse std::path::Path;\n\nuse std::time::Duration;\n\n\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 3, "score": 89756.82932718689 }, { "content": " // Send our file request to the remote addr and verify that it's\n\n // going to be able to send it\n\n f_protocol.send_import(channel, source_path)?;\n\n\n\n // Wait for the request reply.\n\n // Note/TODO: We don't use a timeout here because we don't know how long it will\n\n // take the server to prepare the file we've requested.\n\n // Larger files (> 100MB) can take over a minute to process.\n\n let reply = match f_protocol.recv(None) {\n\n Ok(message) => message,\n\n Err(error) => bail!(\"Failed to import file: {}\", error),\n\n };\n\n\n\n let state = f_protocol.process_message(\n\n reply,\n\n &State::StartReceive {\n\n path: target_path.to_string(),\n\n },\n\n )?;\n\n\n\n f_protocol.message_engine(|d| f_protocol.recv(Some(d)), Duration::from_secs(2), &state)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 4, "score": 89747.55070109505 }, { "content": " let upload_args = args.subcommand_matches(\"upload\").unwrap();\n\n let source_path = upload_args.value_of(\"source_path\").unwrap();\n\n let target_path = match upload_args.value_of(\"target_path\") {\n\n Some(path) => path.to_owned(),\n\n None => Path::new(&source_path)\n\n .file_name()\n\n .unwrap()\n\n .to_string_lossy()\n\n .into_owned(),\n\n };\n\n\n\n upload(\n\n host_ip,\n\n &remote_addr,\n\n &source_path,\n\n &target_path,\n\n Some(storage_prefix),\n\n chunk_size,\n\n hold_count,\n\n )\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 5, "score": 89744.99845168507 }, { "content": " }\n\n Some(\"download\") => {\n\n let download_args = args.subcommand_matches(\"download\").unwrap();\n\n let source_path = download_args.value_of(\"source_path\").unwrap();\n\n let target_path = match download_args.value_of(\"target_path\") {\n\n Some(path) => path.to_owned(),\n\n None => Path::new(&source_path)\n\n .file_name()\n\n .unwrap()\n\n .to_string_lossy()\n\n .into_owned(),\n\n };\n\n\n\n download(\n\n host_ip,\n\n &remote_addr,\n\n &source_path,\n\n &target_path,\n\n Some(storage_prefix),\n\n chunk_size,\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 6, "score": 89744.99845168507 }, { "content": " // Generate channel id for transaction\n\n let channel = f_protocol.generate_channel()?;\n\n\n\n // Tell our destination the hash and number of chunks to expect\n\n f_protocol.send_metadata(channel, &hash, num_chunks)?;\n\n\n\n // Send export command for file\n\n f_protocol.send_export(channel, &hash, &target_path, mode)?;\n\n\n\n // Start the engine to send the file data chunks\n\n f_protocol.message_engine(\n\n |d| f_protocol.recv(Some(d)),\n\n Duration::from_secs(2),\n\n &State::Transmitting,\n\n )?;\n\n Ok(())\n\n}\n\n\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 7, "score": 89744.99845168507 }, { "content": " .arg(\n\n Arg::with_name(\"remote_port\")\n\n .short(\"-p\")\n\n .takes_value(true)\n\n .default_value(\"7000\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"storage_prefix\")\n\n .short(\"-s\")\n\n .takes_value(true)\n\n .default_value(\"file-storage\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"chunk_size\")\n\n .short(\"-c\")\n\n .takes_value(true)\n\n .default_value(\"4096\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"hold_count\")\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 8, "score": 89744.99845168507 }, { "content": " SubCommand::with_name(\"cleanup\")\n\n .about(\"Requests cleanup of remote temporary storage\")\n\n .arg(\n\n Arg::with_name(\"hash\")\n\n .help(\"Specific file storage to clean up\")\n\n .takes_value(true),\n\n ),\n\n )\n\n .arg(\n\n Arg::with_name(\"host_ip\")\n\n .short(\"h\")\n\n .takes_value(true)\n\n .default_value(\"0.0.0.0\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"remote_ip\")\n\n .short(\"-r\")\n\n .takes_value(true)\n\n .default_value(\"0.0.0.0\"),\n\n )\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 9, "score": 89744.99845168507 }, { "content": " hold_count,\n\n )\n\n }\n\n Some(\"cleanup\") => {\n\n let hash = args\n\n .subcommand_matches(\"cleanup\")\n\n .unwrap()\n\n .value_of(\"hash\")\n\n .to_owned()\n\n .map(|v| v.to_owned());\n\n cleanup(\n\n host_ip,\n\n &remote_addr,\n\n hash,\n\n Some(storage_prefix),\n\n chunk_size,\n\n hold_count,\n\n )\n\n }\n\n _ => panic!(\"Invalid command\"),\n\n };\n\n\n\n if let Err(err) = result {\n\n error!(\"Operation failed: {}\", err);\n\n } else {\n\n info!(\"Operation successful\");\n\n }\n\n}\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 10, "score": 89744.99845168507 }, { "content": " .short(\"-t\")\n\n .takes_value(true)\n\n .default_value(\"6\"),\n\n )\n\n .setting(AppSettings::SubcommandRequiredElseHelp)\n\n .setting(AppSettings::DeriveDisplayOrder)\n\n .get_matches();\n\n\n\n let host_ip = args.value_of(\"host_ip\").unwrap();\n\n let remote_addr = format!(\n\n \"{}:{}\",\n\n args.value_of(\"remote_ip\").unwrap(),\n\n args.value_of(\"remote_port\").unwrap()\n\n );\n\n let chunk_size: usize = args.value_of(\"chunk_size\").unwrap().parse().unwrap();\n\n let hold_count: u16 = args.value_of(\"hold_count\").unwrap().parse().unwrap();\n\n let storage_prefix = args.value_of(\"storage_prefix\").unwrap().to_string();\n\n\n\n let result = match args.subcommand_name() {\n\n Some(\"upload\") => {\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 11, "score": 89744.99845168507 }, { "content": " .help(\"Destination path on remote target\")\n\n .takes_value(true),\n\n ),\n\n )\n\n .subcommand(\n\n SubCommand::with_name(\"download\")\n\n .about(\"Requests download of remote file\")\n\n .arg(\n\n Arg::with_name(\"source_path\")\n\n .help(\"Remote file path to download\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"target_path\")\n\n .help(\"Local destination path\")\n\n .takes_value(true),\n\n ),\n\n )\n\n .subcommand(\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 12, "score": 89744.99845168507 }, { "content": "fn download(\n\n host_ip: &str,\n\n remote_addr: &str,\n\n source_path: &str,\n\n target_path: &str,\n\n prefix: Option<String>,\n\n chunk_size: usize,\n\n hold_count: u16,\n\n) -> Result<(), failure::Error> {\n\n let f_config = FileProtocolConfig::new(prefix, chunk_size, hold_count);\n\n let f_protocol = FileProtocol::new(host_ip, remote_addr, f_config);\n\n\n\n info!(\n\n \"Downloading remote: {} to local: {}\",\n\n source_path, target_path\n\n );\n\n\n\n // Generate channel id for transaction\n\n let channel = f_protocol.generate_channel()?;\n\n\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 13, "score": 86738.78437781657 }, { "content": "fn upload(\n\n host_ip: &str,\n\n remote_addr: &str,\n\n source_path: &str,\n\n target_path: &str,\n\n prefix: Option<String>,\n\n chunk_size: usize,\n\n hold_count: u16,\n\n) -> Result<(), failure::Error> {\n\n let f_config = FileProtocolConfig::new(prefix, chunk_size, hold_count);\n\n let f_protocol = FileProtocol::new(host_ip, remote_addr, f_config);\n\n\n\n info!(\n\n \"Uploading local:{} to remote:{}\",\n\n &source_path, &target_path\n\n );\n\n\n\n // Copy file to upload to temp storage. Calculate the hash and chunk info\n\n let (hash, num_chunks, mode) = f_protocol.initialize_file(&source_path)?;\n\n\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 14, "score": 86738.78437781657 }, { "content": "fn cleanup(\n\n host_ip: &str,\n\n remote_addr: &str,\n\n hash: Option<String>,\n\n prefix: Option<String>,\n\n chunk_size: usize,\n\n hold_count: u16,\n\n) -> Result<(), failure::Error> {\n\n match &hash {\n\n Some(s) => info!(\"Requesting remote cleanup of temp storage for hash {}\", s),\n\n None => info!(\"Requesting remote cleanup of all temp storage\"),\n\n }\n\n\n\n let f_config = FileProtocolConfig::new(prefix, chunk_size, hold_count);\n\n let f_protocol = FileProtocol::new(host_ip, remote_addr, f_config);\n\n\n\n // Generate channel ID for transaction\n\n let channel = f_protocol.generate_channel()?;\n\n\n\n // Send our cleanup request to the remote addr and verify that it's\n\n // going to be able to send it\n\n f_protocol.send_cleanup(channel, hash)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 15, "score": 86738.78437781657 }, { "content": "fn main() {\n\n CombinedLogger::init(vec![\n\n TermLogger::new(LevelFilter::Info, Config::default()).unwrap()\n\n ])\n\n .unwrap();\n\n\n\n info!(\"Starting file transfer client\");\n\n\n\n let args = App::new(\"File transfer client\")\n\n .subcommand(\n\n SubCommand::with_name(\"upload\")\n\n .about(\"Initiates upload of local file\")\n\n .arg(\n\n Arg::with_name(\"source_path\")\n\n .help(\"Local file path to upload\")\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"target_path\")\n", "file_path": "clients/kubos-file-client/src/main.rs", "rank": 16, "score": 86738.78437781657 }, { "content": "static const int exception_signals[] = {\n\n SIGFPE,\n\n SIGILL,\n\n SIGSEGV,\n\n#ifdef SIGBUS\n\n SIGBUS,\n\n#endif\n\n#ifdef SIGSYS\n\n SIGSYS,\n\n#endif\n", "file_path": "cmocka/cmocka-1.1.0/src/cmocka.c", "rank": 17, "score": 84173.754470694 }, { "content": " int y;\n", "file_path": "cmocka/cmocka-1.1.0/tests/test_exception_handler.c", "rank": 18, "score": 84168.08732630665 }, { "content": "static void exception_handler(int sig) {\n\n const char *sig_strerror = \"\";\n\n\n\n#ifdef HAVE_STRSIGNAL\n\n sig_strerror = strsignal(sig);\n\n#endif\n\n\n\n cm_print_error(\"Test failed with exception: %s(%d)\",\n\n sig_strerror, sig);\n\n exit_test(1);\n", "file_path": "cmocka/cmocka-1.1.0/src/cmocka.c", "rank": 19, "score": 84168.08732630665 }, { "content": " int x;\n", "file_path": "cmocka/cmocka-1.1.0/tests/test_exception_handler.c", "rank": 20, "score": 84168.08732630665 }, { "content": "static int file_append;\n", "file_path": "cmocka/cmocka-1.1.0/src/cmocka.c", "rank": 21, "score": 83927.44264710876 }, { "content": "int main(void) {\n\n const struct CMUnitTest tests[] = {\n\n cmocka_unit_test(test_segfault_recovery),\n\n cmocka_unit_test(test_segfault_recovery),\n\n cmocka_unit_test(test_segfault_recovery),\n\n };\n\n\n\n return cmocka_run_group_tests(tests, NULL, NULL);\n", "file_path": "cmocka/cmocka-1.1.0/tests/test_exception_handler.c", "rank": 22, "score": 81525.70962593867 }, { "content": "class I2C:\n\n\n\n def __init__(self, bus):\n\n \"\"\"\n\n Retrieves the read/write file handle for the device\n\n \"\"\"\n\n self.filepath = \"/dev/i2c-\"+str(bus)\n\n\n\n def write(self, device, data):\n\n \"\"\"\n\n Sets the address for the device.\n\n Formats the data and writes the data to the device.\n\n Input must be a string or a list.\n\n Returns True and the data (as written to the device) if successful\n\n \"\"\"\n\n\n\n with io.open(self.filepath, \"r+b\", buffering=0) as file:\n\n\n\n fcntl.ioctl(file, I2C_SLAVE, device)\n\n\n\n if type(data) is list:\n\n data = bytearray(data)\n\n elif type(data) is bytes:\n\n pass\n\n else:\n\n raise TypeError('Invalid data format: ' +\n\n str(type(data))+', must be bytes or list')\n\n file.write(data)\n\n return True, data\n\n\n\n def read(self, device, count):\n\n \"\"\"\n\n Reads the specified number of bytes from the device.\n\n \"\"\"\n\n with io.open(self.filepath, \"r+b\", buffering=0) as file:\n\n fcntl.ioctl(file, I2C_SLAVE, device)\n\n\n", "file_path": "hal/python-hal/i2c/i2c.py", "rank": 23, "score": 79046.43699775374 }, { "content": "struct test_segv {\n\n int x;\n\n int y;\n", "file_path": "cmocka/cmocka-1.1.0/tests/test_exception_handler.c", "rank": 24, "score": 79044.19179216065 }, { "content": "static void test_segfault_recovery(void **state)\n\n{\n\n struct test_segv *s = NULL;\n\n\n\n (void) state; /* unused */\n\n\n\n s->x = 1;\n", "file_path": "cmocka/cmocka-1.1.0/tests/test_exception_handler.c", "rank": 25, "score": 76709.27874336524 }, { "content": "bool supervisor_get_version(supervisor_version_t * version)\n\n{\n\n uint8_t bytesToSendSampleVersion[LENGTH_TELEMETRY_SAMPLE_VERSION] = { CMD_SUPERVISOR_OBTAIN_VERSION_CONFIG, 0x00, 0x00 };\n\n uint8_t bytesToReceiveSampleVersion[LENGTH_TELEMETRY_SAMPLE_VERSION] = { CMD_SUPERVISOR_OBTAIN_VERSION_CONFIG, 0x00, 0x00 };\n\n uint8_t bytesToSendObtainVersion[LENGTH_TELEMETRY_GET_VERSION] = { 0 };\n\n uint8_t bytesToReceiveObtainVersion[LENGTH_TELEMETRY_GET_VERSION] = { 0 };\n\n\n\n if (!spi_comms(bytesToSendSampleVersion, bytesToReceiveSampleVersion, LENGTH_TELEMETRY_SAMPLE_VERSION))\n\n {\n\n printf(\"Failed to sample version\\n\");\n\n return false;\n\n }\n\n\n\n usleep(10000);\n\n\n\n if (!spi_comms(bytesToSendObtainVersion, bytesToReceiveObtainVersion, LENGTH_TELEMETRY_GET_VERSION))\n\n {\n\n printf(\"Failed to obtain version\\n\");\n\n return false;\n\n }\n\n\n\n if (!verify_checksum(bytesToReceiveObtainVersion, LENGTH_TELEMETRY_GET_VERSION))\n\n {\n\n printf(\"Checksum failed\\n\");\n\n return false;\n\n }\n\n\n\n memcpy(version, bytesToReceiveObtainVersion, LENGTH_TELEMETRY_GET_VERSION);\n\n\n\n return true;\n", "file_path": "apis/isis-iobc-supervisor/source/supervisor.c", "rank": 26, "score": 76665.95515919183 }, { "content": "static void test_fails_if_zero_count_used(void **state)\n\n{\n\n (void)state;\n\n expect_function_calls(mock_test_a_called, 0);\n\n\n\n mock_test_a_called();\n", "file_path": "cmocka/cmocka-1.1.0/test_ordering_fail.c", "rank": 27, "score": 74513.71647586828 }, { "content": "class TestI2C(unittest.TestCase):\n\n\n\n def setUp(self):\n\n self.i2cdevice = i2c.I2C(1)\n\n\n\n def test_filepath(self):\n\n self.assertEqual(\"/dev/i2c-1\", self.i2cdevice.filepath)\n\n\n\n def test_handles_list_data(self):\n\n with mock.patch('io.open'), mock.patch('fcntl.ioctl'):\n\n assert self.i2cdevice.write(1, [0x73])\n\n\n\n def test_sets_i2c_as_slave(self):\n\n fake_device = 1\n\n fake_data = b'fake'\n\n\n\n with mock.patch('io.open'), mock.patch('fcntl.ioctl') as mock_ioctl:\n\n self.i2cdevice.write(fake_device, fake_data)\n\n mock_ioctl.assert_called_with(mock.ANY, i2c.I2C_SLAVE, fake_device)\n\n\n\n def test_wrong_datatype_raises_type_error(self):\n\n bad_datatype = 123 # Not a string or list\n\n with mock.patch('io.open'), mock.patch('fcntl.ioctl'):\n\n with self.assertRaises(TypeError):\n\n self.i2cdevice.write(1, bad_datatype)\n\n\n\n def test_write(self):\n\n fake_device = 1\n\n fake_data = b'fake'\n\n\n\n with mock.patch('io.open'), mock.patch('fcntl.ioctl') as mock_ioctl:\n\n self.assertTrue(self.i2cdevice.write(fake_device, fake_data)[0])\n\n\n\n def test_read_sets_i2c_as_slave(self):\n\n fake_device = 1\n\n fake_count = 30\n\n\n\n with mock.patch('io.open'), mock.patch('fcntl.ioctl') as mock_ioctl:\n\n self.i2cdevice.read(fake_device, fake_count)\n", "file_path": "hal/python-hal/i2c/test_i2c.py", "rank": 28, "score": 74510.46778139168 }, { "content": "#define I2C_BUS \"/dev/i2c-1\"\n\n\n", "file_path": "test/integration/linux/lsm303dlhc-i2c/source/main.c", "rank": 29, "score": 74510.46778139168 }, { "content": "static void test_fails_if_zero_count_used(void **state)\n\n{\n\n (void)state;\n\n expect_function_calls(mock_test_a_called, 0);\n\n\n\n mock_test_a_called();\n", "file_path": "cmocka/cmocka-1.1.0/tests/test_ordering_fail.c", "rank": 30, "score": 72435.4140206824 }, { "content": "bool supervisor_get_version(supervisor_version_t * version);\n", "file_path": "apis/isis-iobc-supervisor/isis-iobc-supervisor/supervisor.h", "rank": 31, "score": 72395.44090345064 }, { "content": "#define CMD_SUPERVISOR_OBTAIN_VERSION_CONFIG 0x55\n\n\n", "file_path": "apis/isis-iobc-supervisor/source/supervisor.c", "rank": 32, "score": 72394.810907022 }, { "content": "#define SOFTWARE_VERSION 0x6800 /**< iMTQ's software version. <em>v[second byte].[third byte].[fourth byte]; first byte is ignored</em> */\n", "file_path": "apis/isis-imtq-api/isis-imtq-api/imtq-config.h", "rank": 33, "score": 72389.2917374144 }, { "content": "#define LENGTH_TELEMETRY_SAMPLE_VERSION 3\n", "file_path": "apis/isis-iobc-supervisor/isis-iobc-supervisor/supervisor.h", "rank": 34, "score": 70430.73736312658 }, { "content": "#define LENGTH_TELEMETRY_GET_VERSION 34\n", "file_path": "apis/isis-iobc-supervisor/isis-iobc-supervisor/supervisor.h", "rank": 35, "score": 70430.73736312658 }, { "content": "#define TEST_ERROR_I2C 0x01 /**< I<sup>2</sup>C Failure */\n", "file_path": "apis/isis-imtq-api/isis-imtq-api/imtq-data.h", "rank": 36, "score": 68605.24385202047 }, { "content": "#!/usr/bin/env python\n\n\n\n# Copyright 2018 Kubos Corporation\n\n# Licensed under the Apache License, Version 2.0\n\n# See LICENSE file for details.\n\n\n\n\"\"\"\n\nTesting library for KubOS.\n\n\n\nCurrently only implements integration testing for hardware services.\n\n\"\"\"\n\n\n\nimport app_api\n\nimport socket\n\n\n\nDEFAULT_CONFIG_PATH = \"/home/system/etc/config.toml\"\n\nSERVICE_MUTATION = (\n\n 'mutation {test(test:NOOP){success,errors,results}}')\n\nQUERY_TIMEOUT = 1.0 # Seconds\n\n\n\n\n\nclass IntegrationTest:\n\n\n\n def __init__(self,\n\n config_filepath=DEFAULT_CONFIG_PATH):\n\n self.api = app_api.Services(config_filepath)\n\n\n\n def test_services(self, query=SERVICE_MUTATION):\n\n for service in self.api.config:\n\n self.test_service(service=service, query=query)\n\n\n\n def test_service(self, service, query=SERVICE_MUTATION):\n\n response = []\n\n try:\n\n # Complete the test mutation\n\n response = self.api.query(\n\n service=service,\n\n query=query,\n\n timeout=QUERY_TIMEOUT)\n\n\n\n # Check for successful test\n\n if response['test']['success']:\n\n print \"Status : SUCCESS\\n {}\".format(service)\n\n print \"Response : {}\\n\".format(response)\n\n else:\n\n print \"Status : FAILED\\n {}\".format(service)\n\n print \"Response : {}\\n\".format(response)\n\n except socket.timeout as e:\n\n print \"Status : TIMEOUT\\n {}\".format(service)\n\n print \"No response from server\"\n\n print \"Timeout : {} seconds\\n\".format(QUERY_TIMEOUT)\n\n except KeyError as e:\n\n print \"Status : FORMAT ERROR\\n {}\".format(service)\n\n print \"Service is sending back invalid response format\"\n\n print \"Error : {}, {}\\n\".format(type(e), e)\n\n except Exception as e:\n\n print \"Status : TEST ERROR\\n {}\".format(service)\n\n print \"Error : {}, {}\\n\".format(type(e), e)\n", "file_path": "libs/kubos-test/kubos_test.py", "rank": 37, "score": 59637.02648082005 }, { "content": "#!/usr/bin/env python\n\n\n\n# Copyright 2018 Kubos Corporation\n\n# Licensed under the Apache License, Version 2.0\n\n# See LICENSE file for details\n\n\n\nimport argparse\n\nimport toml\n\n\n\nDEFAULT_IP = \"127.0.0.1\"\n\nDEFAULT_PORT = 8001\n\nDEFAULT_PATH = \"/home/system/etc/config.toml\"\n\n\n\n\n\ndef get_args(name):\n\n parser = argparse.ArgumentParser(description=name)\n\n parser = argparse.ArgumentParser(description='Example Service')\n\n parser.add_argument(\"-c\", \"--config\", type=str, help='path to config file')\n\n return parser.parse_args()\n\n\n\n\n\nclass Config:\n\n \"\"\"Service configuration\"\"\"\n\n name = \"\"\n\n ip = \"\"\n\n port = 0\n\n raw = {}\n\n\n\n def __init__(self, name):\n\n args = get_args(name)\n\n if args.config:\n\n path = args.config\n\n else:\n\n path = DEFAULT_PATH\n\n try:\n\n data = toml.load(path)\n\n self.name = name\n\n self.ip = data[name]['addr']['ip']\n\n self.port = data[name]['addr']['port']\n\n self.raw = data[name]\n\n\n\n except Exception:\n\n self.ip = DEFAULT_IP\n\n self.port = DEFAULT_PORT\n", "file_path": "libs/kubos-service/kubos_service/config.py", "rank": 38, "score": 58704.249890818435 }, { "content": " def __init__(self,\n\n config_filepath=DEFAULT_CONFIG_PATH):\n", "file_path": "libs/kubos-test/kubos_test.py", "rank": 39, "score": 58704.249890818435 }, { "content": "KI2CStatus k_i2c_init(char * device, int * fp);\n", "file_path": "hal/kubos-hal/kubos-hal/i2c.h", "rank": 40, "score": 57817.131902112815 }, { "content": "void k_i2c_terminate(int * fp);\n", "file_path": "hal/kubos-hal/kubos-hal/i2c.h", "rank": 41, "score": 57812.71987434051 }, { "content": "KI2CStatus k_i2c_write(int i2c, uint16_t addr, uint8_t *ptr, int len);\n", "file_path": "hal/kubos-hal/kubos-hal/i2c.h", "rank": 42, "score": 57807.8897892805 }, { "content": "KI2CStatus k_i2c_read(int i2c, uint16_t addr, uint8_t *ptr, int len);\n", "file_path": "hal/kubos-hal/kubos-hal/i2c.h", "rank": 43, "score": 57807.804103066504 }, { "content": "#!/usr/bin/env python\n\n\n\n# Copyright 2018 Kubos Corporation\n\n# Licensed under the Apache License, Version 2.0\n\n# See LICENSE file for details.\n\n\n\n\n\n\"\"\"\n\nWrapper for creating a HTTP based Kubos service\n\n\"\"\"\n\n\n\nfrom flask import Flask\n\nfrom flask_graphql import GraphQLView\n\n\n\n\n\ndef start(config, schema, context={}):\n\n \"\"\"\n\n Creates flask based graphql and graphiql endpoints\n\n \"\"\"\n\n\n\n app = Flask(__name__)\n\n app.debug = True\n\n\n\n app.add_url_rule(\n\n '/',\n\n view_func=GraphQLView.as_view(\n\n 'graphql',\n\n schema=schema,\n\n context=context,\n\n graphiql=False\n\n )\n\n )\n\n\n\n app.add_url_rule(\n\n '/graphiql',\n\n view_func=GraphQLView.as_view(\n\n 'graphiql',\n\n schema=schema,\n\n context=context,\n\n graphiql=True\n\n )\n\n )\n\n\n\n app.run(config.ip, config.port)\n", "file_path": "libs/kubos-service/kubos_service/http_service.py", "rank": 44, "score": 57805.66149122837 }, { "content": "#!/usr/bin/env python\n\n\n\n# Copyright 2018 Kubos Corporation\n\n# Licensed under the Apache License, Version 2.0\n\n# See LICENSE file for details.\n\n\n\n\n\n\"\"\"\n\nWrapper for creating a UDP based Kubos service\n\n\"\"\"\n\n\n\nimport socket\n\nimport json\n\nimport logging\n\n\n\n\n\ndef start(logger, config, schema, context={}):\n\n logger.info(\"{} starting on {}:{}\".format(config.name, config.ip, config.port))\n\n sock = socket.socket(socket.AF_INET, # Internet\n\n socket.SOCK_DGRAM) # UDP\n\n sock.bind((config.ip, config.port))\n\n base_schema = schema.schema\n\n\n\n while True:\n\n try:\n\n data, source = sock.recvfrom(1024)\n\n errs = None\n\n msg = None\n\n try:\n\n result = base_schema.execute(data.decode(), context_value=context)\n\n msg = result.data\n\n if result.errors:\n\n errs = []\n\n for e in result.errors:\n\n errs.append(str(e))\n\n\n\n except Exception as e:\n\n errs = \"Exception encountered {}\".format(e)\n\n\n\n result = json.dumps({\n\n \"data\": msg,\n\n \"errors\": errs\n\n })\n\n sock.sendto(str.encode(result), source)\n\n except Exception as e:\n\n logging.error(\"Exception encountered {}\".format(e))\n", "file_path": "libs/kubos-service/kubos_service/udp_service.py", "rank": 45, "score": 57805.66149122837 }, { "content": "class IntegrationTest:\n\n\n\n def __init__(self,\n\n config_filepath=DEFAULT_CONFIG_PATH):\n\n self.api = app_api.Services(config_filepath)\n\n\n\n def test_services(self, query=SERVICE_MUTATION):\n\n for service in self.api.config:\n\n self.test_service(service=service, query=query)\n\n\n\n def test_service(self, service, query=SERVICE_MUTATION):\n\n response = []\n\n try:\n\n # Complete the test mutation\n\n response = self.api.query(\n\n service=service,\n\n query=query,\n\n timeout=QUERY_TIMEOUT)\n\n\n\n # Check for successful test\n\n if response['test']['success']:\n\n print \"Status : SUCCESS\\n {}\".format(service)\n\n print \"Response : {}\\n\".format(response)\n\n else:\n\n print \"Status : FAILED\\n {}\".format(service)\n\n print \"Response : {}\\n\".format(response)\n\n except socket.timeout as e:\n\n print \"Status : TIMEOUT\\n {}\".format(service)\n\n print \"No response from server\"\n\n print \"Timeout : {} seconds\\n\".format(QUERY_TIMEOUT)\n\n except KeyError as e:\n\n print \"Status : FORMAT ERROR\\n {}\".format(service)\n\n print \"Service is sending back invalid response format\"\n\n print \"Error : {}, {}\\n\".format(type(e), e)\n\n except Exception as e:\n\n print \"Status : TEST ERROR\\n {}\".format(service)\n", "file_path": "libs/kubos-test/kubos_test.py", "rank": 46, "score": 57800.20286976845 }, { "content": " def test_services(self, query=SERVICE_MUTATION):\n\n for service in self.api.config:\n", "file_path": "libs/kubos-test/kubos_test.py", "rank": 47, "score": 57800.20286976845 }, { "content": " def test_service(self, service, query=SERVICE_MUTATION):\n\n response = []\n\n try:\n\n # Complete the test mutation\n\n response = self.api.query(\n\n service=service,\n\n query=query,\n\n timeout=QUERY_TIMEOUT)\n\n\n\n # Check for successful test\n\n if response['test']['success']:\n\n print \"Status : SUCCESS\\n {}\".format(service)\n\n print \"Response : {}\\n\".format(response)\n\n else:\n\n print \"Status : FAILED\\n {}\".format(service)\n\n print \"Response : {}\\n\".format(response)\n\n except socket.timeout as e:\n\n print \"Status : TIMEOUT\\n {}\".format(service)\n\n print \"No response from server\"\n\n print \"Timeout : {} seconds\\n\".format(QUERY_TIMEOUT)\n\n except KeyError as e:\n\n print \"Status : FORMAT ERROR\\n {}\".format(service)\n\n print \"Service is sending back invalid response format\"\n\n print \"Error : {}, {}\\n\".format(type(e), e)\n\n except Exception as e:\n\n print \"Status : TEST ERROR\\n {}\".format(service)\n", "file_path": "libs/kubos-test/kubos_test.py", "rank": 48, "score": 57800.20286976845 }, { "content": "class Config:\n\n \"\"\"Service configuration\"\"\"\n\n name = \"\"\n\n ip = \"\"\n\n port = 0\n\n raw = {}\n\n\n\n def __init__(self, name):\n\n args = get_args(name)\n\n if args.config:\n\n path = args.config\n\n else:\n\n path = DEFAULT_PATH\n\n try:\n\n data = toml.load(path)\n\n self.name = name\n\n self.ip = data[name]['addr']['ip']\n\n self.port = data[name]['addr']['port']\n\n self.raw = data[name]\n\n\n\n except Exception:\n\n self.ip = DEFAULT_IP\n", "file_path": "libs/kubos-service/kubos_service/config.py", "rank": 49, "score": 57800.20286976845 }, { "content": " def __init__(self, name):\n\n args = get_args(name)\n\n if args.config:\n\n path = args.config\n\n else:\n\n path = DEFAULT_PATH\n\n try:\n\n data = toml.load(path)\n\n self.name = name\n\n self.ip = data[name]['addr']['ip']\n\n self.port = data[name]['addr']['port']\n\n self.raw = data[name]\n\n\n\n except Exception:\n\n self.ip = DEFAULT_IP\n", "file_path": "libs/kubos-service/kubos_service/config.py", "rank": 50, "score": 57800.20286976845 }, { "content": "/// Converts raw bytes from iOBC into SupervisorVersion\n\nfn convert_raw_version(raw: &ffi::supervisor_version) -> SupervisorVersion {\n\n SupervisorVersion {\n\n dummy: raw.0[0] as u8,\n\n spi_command_status: raw.0[1] as u8,\n\n index_of_subsystem: raw.0[2] as u8,\n\n major_version: raw.0[3] as u8,\n\n minor_version: raw.0[4] as u8,\n\n patch_version: raw.0[5] as u8,\n\n git_head_version: {\n\n u32::from(raw.0[6])\n\n | u32::from(raw.0[7]) << 8\n\n | u32::from(raw.0[8]) << 16\n\n | u32::from(raw.0[9]) << 24\n\n },\n\n serial_number: { u16::from(raw.0[10]) | u16::from(raw.0[11]) << 8 },\n\n compile_information: {\n\n let mut a = [0; ffi::LENGTH_COMPILE_INFORMATION];\n\n for (i, element) in a.iter_mut().enumerate() {\n\n // 12 is the offset to locate compile_information\n\n // in the version data\n\n *element = raw.0[i + 12] as i8;\n\n }\n\n a\n\n },\n\n clock_speed: raw.0[31] as u8,\n\n code_type: raw.0[32] as i8,\n\n crc: raw.0[33] as u8,\n\n }\n\n}\n\n\n", "file_path": "apis/isis-iobc-supervisor/src/lib.rs", "rank": 51, "score": 57671.899221664324 }, { "content": "// Copy temporary data chunks into permanent file?\n\npub fn finalize_file(\n\n prefix: &str,\n\n hash: &str,\n\n target_path: &str,\n\n mode: Option<u32>,\n\n) -> Result<(), ProtocolError> {\n\n // Double check that all the chunks of the file are present and the hash matches up\n\n let (result, _) = validate_file(prefix, hash, None)?;\n\n\n\n if result != true {\n\n return Err(ProtocolError::FinalizeError {\n\n cause: \"file missing chunks\".to_owned(),\n\n });\n\n }\n\n\n\n // Get the total number of chunks we're saving\n\n let num_chunks = load_meta(prefix, hash)?;\n\n\n\n // Q: Do we want to create the parent directories if they don't exist?\n\n let mut file = File::create(target_path).map_err(|err| ProtocolError::StorageError {\n", "file_path": "libs/file-protocol/src/storage.rs", "rank": 52, "score": 57084.92659256545 }, { "content": "// Check if all of a files chunks are present in the temporary directory\n\npub fn validate_file(\n\n prefix: &str,\n\n hash: &str,\n\n num_chunks: Option<u32>,\n\n) -> Result<(bool, Vec<u32>), ProtocolError> {\n\n let num_chunks = if let Some(num) = num_chunks {\n\n store_meta(prefix, hash, num)?;\n\n num\n\n } else {\n\n load_meta(prefix, hash)?\n\n };\n\n\n\n let mut missing_ranges: Vec<u32> = vec![];\n\n\n\n let hash_path = Path::new(&format!(\"{}/storage\", prefix)).join(hash);\n\n\n\n let mut prev_entry: i32 = -1;\n\n\n\n let entries = fs::read_dir(hash_path.clone()).map_err(|err| ProtocolError::StorageError {\n\n action: format!(\"read {:?} directory\", hash_path),\n", "file_path": "libs/file-protocol/src/storage.rs", "rank": 53, "score": 57084.92659256545 }, { "content": "/// Create temporary folder for chunks\n\n/// Stream copy file from mutable space to immutable space\n\n/// Move folder to hash of contents\n\npub fn initialize_file(\n\n prefix: &str,\n\n source_path: &str,\n\n chunk_size: usize,\n\n) -> Result<(String, u32, u32), ProtocolError> {\n\n let storage_path = format!(\"{}/storage\", prefix);\n\n\n\n fs::metadata(source_path).map_err(|err| ProtocolError::StorageError {\n\n action: format!(\"stat file {}\", source_path),\n\n err,\n\n })?;\n\n\n\n // Copy input file to storage area and calculate hash\n\n fs::create_dir_all(&storage_path).map_err(|err| ProtocolError::StorageError {\n\n action: format!(\"create dir {}\", storage_path),\n\n err,\n\n })?;\n\n\n\n let temp_path = Path::new(&storage_path).join(format!(\".{}\", time::get_time().nsec));\n\n let mut hasher = Blake2s::new(HASH_SIZE);\n", "file_path": "libs/file-protocol/src/storage.rs", "rank": 54, "score": 57084.221184862356 }, { "content": "def get_args(name):\n\n parser = argparse.ArgumentParser(description=name)\n\n parser = argparse.ArgumentParser(description='Example Service')\n\n parser.add_argument(\"-c\", \"--config\", type=str, help='path to config file')\n", "file_path": "libs/kubos-service/kubos_service/config.py", "rank": 55, "score": 56923.578239848954 }, { "content": "def start(config, schema, context={}):\n\n \"\"\"\n\n Creates flask based graphql and graphiql endpoints\n\n \"\"\"\n\n\n\n app = Flask(__name__)\n\n app.debug = True\n\n\n\n app.add_url_rule(\n\n '/',\n\n view_func=GraphQLView.as_view(\n\n 'graphql',\n\n schema=schema,\n\n context=context,\n\n graphiql=False\n\n )\n\n )\n\n\n\n app.add_url_rule(\n\n '/graphiql',\n\n view_func=GraphQLView.as_view(\n\n 'graphiql',\n\n schema=schema,\n\n context=context,\n\n graphiql=True\n\n )\n\n )\n\n\n", "file_path": "libs/kubos-service/kubos_service/http_service.py", "rank": 56, "score": 56923.578239848954 }, { "content": "def start(logger, config, schema, context={}):\n\n logger.info(\"{} starting on {}:{}\".format(config.name, config.ip, config.port))\n\n sock = socket.socket(socket.AF_INET, # Internet\n\n socket.SOCK_DGRAM) # UDP\n\n sock.bind((config.ip, config.port))\n\n base_schema = schema.schema\n\n\n\n while True:\n\n try:\n\n data, source = sock.recvfrom(1024)\n\n errs = None\n\n msg = None\n\n try:\n\n result = base_schema.execute(data.decode(), context_value=context)\n\n msg = result.data\n\n if result.errors:\n\n errs = []\n\n for e in result.errors:\n\n errs.append(str(e))\n\n\n\n except Exception as e:\n\n errs = \"Exception encountered {}\".format(e)\n\n\n\n result = json.dumps({\n\n \"data\": msg,\n\n \"errors\": errs\n\n })\n\n sock.sendto(str.encode(result), source)\n\n except Exception as e:\n", "file_path": "libs/kubos-service/kubos_service/udp_service.py", "rank": 57, "score": 56923.578239848954 }, { "content": "#[test]\n\nfn set_version_same() {\n\n let registry_dir = TempDir::new().unwrap();\n\n let service = mock_service!(registry_dir);\n\n\n\n test_setup(&service);\n\n\n\n let query = r#\"mutation {\n\n setVersion(name: \\\"dummy\\\", version: \\\"0.0.2\\\") {\n\n errors,\n\n success\n\n }\n\n }\"#;\n\n\n\n let expected = json!({\n\n \"setVersion\": {\n\n \"errors\": \"\",\n\n \"success\": true,\n\n }\n\n });\n\n\n", "file_path": "services/app-service/src/tests/set_version.rs", "rank": 58, "score": 56136.42589391907 }, { "content": "#[test]\n\nfn test_get_version() {\n\n let mut mock = MockStream::default();\n\n\n\n mock.read.set_output(vec![\n\n 0xAA, 0x44, 0x12, 0x1C, 0x25, 0x0, 0x0, 0x20, 0x70, 0x0, 0x0, 0x0, 0x7D, 0x78, 0xD1, 0xB,\n\n 0x38, 0x5E, 0xC9, 0x9, 0x0, 0x0, 0x48, 0x0, 0x81, 0x36, 0xFA, 0x33, 0x1, 0x0, 0x0, 0x0,\n\n 0x1, 0x0, 0x0, 0x0, 0x47, 0x31, 0x53, 0x42, 0x30, 0x47, 0x54, 0x54, 0x30, 0x0, 0x0, 0x0,\n\n 0x0, 0x0, 0x0, 0x0, 0x42, 0x4A, 0x59, 0x41, 0x31, 0x35, 0x31, 0x32, 0x30, 0x30, 0x33, 0x38,\n\n 0x48, 0x0, 0x0, 0x0, 0x4F, 0x45, 0x4D, 0x36, 0x31, 0x35, 0x2D, 0x32, 0x2E, 0x30, 0x30, 0x0,\n\n 0x0, 0x0, 0x0, 0x0, 0x4F, 0x45, 0x4D, 0x30, 0x36, 0x30, 0x36, 0x30, 0x30, 0x52, 0x4E, 0x30,\n\n 0x30, 0x30, 0x30, 0x0, 0x4F, 0x45, 0x4D, 0x30, 0x36, 0x30, 0x32, 0x30, 0x31, 0x52, 0x42,\n\n 0x30, 0x30, 0x30, 0x30, 0x0, 0x32, 0x30, 0x31, 0x35, 0x2F, 0x4A, 0x61, 0x6E, 0x2F, 0x32,\n\n 0x38, 0x0, 0x31, 0x35, 0x3A, 0x32, 0x37, 0x3A, 0x32, 0x39, 0x0, 0x0, 0x0, 0x0, 0xC6, 0x5E,\n\n 0x86, 0x47,\n\n ]);\n\n\n\n let oem = mock_new!(mock);\n\n\n\n let expected: Log = Log::Version(VersionLog {\n\n recv_status: ReceiverStatusFlags::CLOCK_MODEL_INVALID\n", "file_path": "apis/novatel-oem6-api/src/tests/version.rs", "rank": 59, "score": 54988.302198727004 }, { "content": "#[test]\n\nfn set_version_bad() {\n\n let registry_dir = TempDir::new().unwrap();\n\n let service = mock_service!(registry_dir);\n\n\n\n test_setup(&service);\n\n\n\n let query = r#\"mutation {\n\n setVersion(name: \\\"dummy\\\", version: \\\"0.0.3\\\") {\n\n errors,\n\n success\n\n }\n\n }\"#;\n\n\n\n let expected = json!({\n\n \"setVersion\": {\n\n \"errors\": \"Registry Error: App dummy version 0.0.3 not found in registry\",\n\n \"success\": false,\n\n }\n\n });\n\n\n\n test!(service, query, expected);\n\n}\n", "file_path": "services/app-service/src/tests/set_version.rs", "rank": 60, "score": 54988.302198727004 }, { "content": "#[test]\n\nfn set_version_good() {\n\n let registry_dir = TempDir::new().unwrap();\n\n let service = mock_service!(registry_dir);\n\n\n\n test_setup(&service);\n\n\n\n let query = r#\"mutation {\n\n setVersion(name: \\\"dummy\\\", version: \\\"0.0.1\\\") {\n\n errors,\n\n success\n\n }\n\n }\"#;\n\n\n\n let expected = json!({\n\n \"setVersion\": {\n\n \"errors\": \"\",\n\n \"success\": true,\n\n }\n\n });\n\n\n", "file_path": "services/app-service/src/tests/set_version.rs", "rank": 61, "score": 54988.302198727004 }, { "content": "#!/usr/bin/env python\n\n\"\"\"\n\nA setuptools based setup module for the kubos test package.\n\nSee:\n\nhttps://github.com/pypa/sampleproject\n\n\"\"\"\n\n\n\nfrom setuptools import setup\n\n\n\nsetup(name='kubos_test',\n\n version='0.1.0',\n\n description='Manual integration testing library for KubOS Services',\n\n py_modules=[\"kubos_test\"],\n\n install_requires=[\"app_api\"]\n\n )\n", "file_path": "libs/kubos-test/setup.py", "rank": 62, "score": 54525.442726449335 }, { "content": "#!/usr/bin/env python\n\n\n\nfrom distutils.core import setup\n\n\n\nsetup(name='kubos_service',\n\n version='1.0',\n\n packages=['kubos_service'],\n\n license='MIT',\n\n description='Python interface for creating kubos services',\n\n )\n", "file_path": "libs/kubos-service/setup.py", "rank": 63, "score": 54520.54299378845 }, { "content": "#[test]\n\nfn test_request_version_good() {\n\n let mut mock = MockStream::default();\n\n\n\n mock.write.set_input(vec![\n\n 0xAA, 0x44, 0x12, 0x1C, 0x1, 0x0, 0x0, 0xC0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,\n\n 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x25, 0x0, 0x0,\n\n 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,\n\n 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x8E, 0x33, 0x3C,\n\n ]);\n\n\n\n mock.read.set_output(vec![\n\n 0xAA, 0x44, 0x12, 0x1C, 0x1, 0x0, 0x80, 0x20, 0x6, 0x0, 0x0, 0x0, 0xFF, 0x78, 0xD1, 0xB,\n\n 0x6E, 0x5D, 0xC9, 0x9, 0x0, 0x0, 0x0, 0x0, 0xFB, 0xFD, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x4F,\n\n 0x4B, 0x92, 0x8F, 0x77, 0x4A,\n\n ]);\n\n\n\n let oem = mock_new!(mock);\n\n\n\n assert_eq!(oem.request_version(), Ok(()));\n\n}\n\n\n", "file_path": "apis/novatel-oem6-api/src/tests/version.rs", "rank": 64, "score": 53886.20096609831 }, { "content": "#[test]\n\nfn test_request_version_fail_response() {\n\n let mut mock = MockStream::default();\n\n\n\n mock.write.set_input(vec![\n\n 0xAA, 0x44, 0x12, 0x1C, 0x1, 0x0, 0x0, 0xC0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,\n\n 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x25, 0x0, 0x0,\n\n 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,\n\n 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x8E, 0x33, 0x3C,\n\n ]);\n\n\n\n mock.read.set_output(vec![\n\n 0xAA, 0x44, 0x12, 0x1C, 0x1, 0x0, 0x80, 0x20, 0x15, 0x0, 0x0, 0x0, 0xFF, 0x78, 0xD1, 0xB,\n\n 0x6E, 0x5D, 0xC9, 0x9, 0x0, 0x0, 0x0, 0x0, 0xFB, 0xFD, 0x0, 0x0, 0x1F, 0x0, 0x0, 0x0, 0x4D,\n\n 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x20, 0x74, 0x69, 0x6D, 0x65, 0x64, 0x20, 0x6F, 0x75,\n\n 0x74, 0xCB, 0xE5, 0x83, 0x92,\n\n ]);\n\n\n\n let oem = mock_new!(mock);\n\n\n\n assert_eq!(\n\n oem.request_version().unwrap_err(),\n\n OEMError::CommandError {\n\n id: ResponseID::Timeout,\n\n description: \"Message timed out\".to_owned(),\n\n }\n\n );\n\n}\n\n\n", "file_path": "apis/novatel-oem6-api/src/tests/version.rs", "rank": 65, "score": 52827.40935695799 }, { "content": "#[test]\n\nfn test_request_version_bad_no_response() {\n\n let mut mock = MockStream::default();\n\n\n\n mock.write.set_input(vec![\n\n 0xAA, 0x44, 0x12, 0x1C, 0x1, 0x0, 0x0, 0xC0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,\n\n 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x25, 0x0, 0x0,\n\n 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,\n\n 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x8E, 0x33, 0x3C,\n\n ]);\n\n\n\n let oem = mock_new!(mock);\n\n\n\n assert_eq!(oem.request_version().unwrap_err(), OEMError::NoResponse);\n\n}\n\n\n", "file_path": "apis/novatel-oem6-api/src/tests/version.rs", "rank": 66, "score": 52827.40935695799 }, { "content": "int main(void)\n\n{\n\n printf(\"Hello World!\");\n\n return 0;\n", "file_path": "examples/kubos-linux-example/source/main.c", "rank": 67, "score": 52243.828682454274 }, { "content": "#define PORT 3456\n", "file_path": "examples/kubos-linux-tcprx/source/main.c", "rank": 68, "score": 52243.828682454274 }, { "content": "KI2CStatus k_i2c_read(int i2c, uint16_t addr, uint8_t* ptr, int len)\n\n{\n\n if (i2c == 0 || ptr == NULL)\n\n {\n\n return I2C_ERROR;\n\n }\n\n\n\n /* Set the desired slave's address */\n\n if (ioctl(i2c, I2C_SLAVE, addr) < 0)\n\n {\n\n perror(\"Couldn't reach requested address\");\n\n return I2C_ERROR_ADDR_TIMEOUT;\n\n }\n\n\n\n /* Read in data */\n\n if (read(i2c, ptr, len) != len)\n\n {\n\n perror(\"I2C read failed\");\n\n return I2C_ERROR;\n\n }\n\n\n\n return I2C_OK;\n", "file_path": "hal/kubos-hal/source/i2c.c", "rank": 69, "score": 52243.828682454274 }, { "content": "int main(int argc, char * argv[])\n\n{\n\n\n\n int opt;\n\n\n\n running = 1;\n\n\n\n /* Ctrl+C will trigger a signal to end the program */\n\n signal(SIGINT, sigint_handler);\n\n\n\n saio.sa_handler = sigio_handler;\n\n saio.sa_flags = 0;\n\n saio.sa_restorer = NULL;\n\n\n\n /* The UART bus receiving data will trigger an I/O interrupt */\n\n sigaction(SIGIO, &saio, NULL);\n\n\n\n /* Open connection to transmitter */\n\n uart_init();\n\n\n\n while (running)\n\n {\n\n\n\n /* Need while loop to keep app running while waiting for messages from\n\n * master */\n\n sleep(1);\n\n }\n\n\n\n /* Cleanup */\n\n uart_close();\n\n\n\n return 0;\n", "file_path": "examples/kubos-linux-uartrx/source/main.c", "rank": 70, "score": 52243.828682454274 }, { "content": "int running;\n", "file_path": "examples/kubos-linux-uartrx/source/main.c", "rank": 71, "score": 52243.828682454274 }, { "content": "KI2CStatus k_i2c_write(int i2c, uint16_t addr, uint8_t* ptr, int len)\n\n{\n\n if (i2c == 0 || ptr == NULL)\n\n {\n\n return I2C_ERROR;\n\n }\n\n\n\n /* Set the desired slave's address */\n\n if (ioctl(i2c, I2C_SLAVE, addr) < 0)\n\n {\n\n perror(\"Couldn't reach requested address\");\n\n return I2C_ERROR_ADDR_TIMEOUT;\n\n }\n\n\n\n /* Transmit buffer */\n\n if (write(i2c, ptr, len) != len)\n\n {\n\n perror(\"I2C write failed\");\n\n return I2C_ERROR;\n\n }\n\n\n\n return I2C_OK;\n", "file_path": "hal/kubos-hal/source/i2c.c", "rank": 72, "score": 52243.828682454274 }, { "content": "uint8_t quit;\n", "file_path": "examples/kubos-linux-tcprx/source/main.c", "rank": 73, "score": 52243.828682454274 }, { "content": "struct sigaction saio;\n", "file_path": "examples/kubos-linux-uartrx/source/main.c", "rank": 74, "score": 52243.828682454274 }, { "content": "int fd;\n", "file_path": "examples/kubos-linux-uartrx/source/main.c", "rank": 75, "score": 52243.828682454274 }, { "content": "int main(void)\n\n{\n\n const struct CMUnitTest tests[] = {\n\n cmocka_unit_test(test_no_init_write),\n\n cmocka_unit_test(test_no_init_read),\n\n cmocka_unit_test(test_init_write),\n\n cmocka_unit_test(test_init_write_null),\n\n cmocka_unit_test(test_init_read),\n\n cmocka_unit_test(test_init_read_null),\n\n cmocka_unit_test(test_init_write_read),\n\n cmocka_unit_test(test_init_term_write),\n\n cmocka_unit_test(test_init_term_read),\n\n cmocka_unit_test(test_init_term_write_read),\n\n cmocka_unit_test(test_init_term_init_write_read),\n\n };\n\n\n\n return cmocka_run_group_tests(tests, NULL, NULL);\n", "file_path": "hal/kubos-hal/test/i2c/i2c.c", "rank": 76, "score": 52243.828682454274 }, { "content": "int main(int argc, char * argv[])\n\n{\n\n\n\n int opt;\n\n char counter = 0;\n\n\n\n running = 1;\n\n\n\n /* Set up Ctrl+C interrupt handler for exiting cleanly */\n\n signal(SIGINT, sigint_handler);\n\n\n\n /* Open connection to receiver */\n\n uart_init();\n\n\n\n while (running)\n\n {\n\n\n\n char testmsg[] = \"Test Message nnn\\n\";\n\n int ret = 0;\n\n\n\n printf(\"Writing message %d\\n\", counter);\n\n\n\n sprintf(testmsg, \"Test Message %03d\\n\", counter++);\n\n\n\n\n\n if((ret = write(fd, testmsg, sizeof(testmsg)) < 0))\n\n {\n\n perror(\"Error from write\");\n\n break;\n\n }\n\n\n\n printf(\"Wrote %d bytes\\n\", ret);\n\n\n\n sleep(5);\n\n }\n\n\n\n /* Cleanup */\n\n uart_close();\n\n\n\n return 0;\n", "file_path": "examples/kubos-linux-uarttx/source/main.c", "rank": 77, "score": 52243.828682454274 }, { "content": "void k_i2c_terminate(int * fp)\n\n{\n\n if (fp == NULL || *fp == 0)\n\n {\n\n return;\n\n }\n\n\n\n close(*fp);\n\n *fp = 0;\n\n\n\n return;\n", "file_path": "hal/kubos-hal/source/i2c.c", "rank": 78, "score": 52243.828682454274 }, { "content": "KI2CStatus k_i2c_init(char * device, int * fp)\n\n{\n\n if (device == NULL || fp == NULL)\n\n {\n\n return I2C_ERROR;\n\n }\n\n\n\n char bus[] = \"/dev/i2c-n\\0\";\n\n // Make sure the device name is null terminated\n\n snprintf(bus, 11, \"%s\", device);\n\n *fp = open(bus, O_RDWR);\n\n\n\n if (*fp <= 0)\n\n {\n\n perror(\"Couldn't open I2C bus\");\n\n *fp = 0;\n\n return I2C_ERROR_CONFIG;\n\n }\n\n\n\n return I2C_OK;\n", "file_path": "hal/kubos-hal/source/i2c.c", "rank": 79, "score": 52243.828682454274 }, { "content": "int running;\n", "file_path": "examples/kubos-linux-uarttx/source/main.c", "rank": 80, "score": 52243.828682454274 }, { "content": "int main(int argc, char * argv[])\n\n{\n\n struct sockaddr_in server, client;\n\n int server_fd, client_fd, len;\n\n int status = 0;\n\n\n\n char buffer[256];\n\n char reply[] = \"Test message from server\\n\";\n\n\n\n quit = 0;\n\n\n\n /* Create an IPv4 socket (AF_INET) for TCP communication (SOCK_STREAM) */\n\n if ((server_fd = socket(AF_INET, SOCK_STREAM, 0)) == -1)\n\n {\n\n perror(\"Failed to get server socket\");\n\n exit(-1);\n\n }\n\n\n\n /* Setup the socket info, including the port to bind to */\n\n server.sin_family = AF_INET;\n\n server.sin_port = htons(PORT);\n\n server.sin_addr.s_addr = INADDR_ANY;\n\n\n\n /* Bind the new socket to the desired port */\n\n if ((bind(server_fd, (struct sockaddr *) &server, sizeof(struct sockaddr)))\n\n != 0)\n\n {\n\n perror(\"Failed to bind server socket\");\n\n exit(-1);\n\n }\n\n\n\n /* Listen for connections on this socket */\n\n if ((listen(server_fd, MAX_WAIT)) == -1)\n\n {\n\n perror(\"Failed to set up listener\");\n\n exit(-1);\n\n }\n\n\n\n /* Receive connections loop */\n\n while (!quit)\n\n {\n\n socklen_t size = sizeof(struct sockaddr_in);\n\n\n\n if ((client_fd = accept(server_fd, (struct sockaddr *) &client, &size))\n\n == -1)\n\n {\n\n perror(\"Failed to accept client connection\");\n\n status = -1;\n\n break;\n\n }\n\n\n\n printf(\"Received connection from %s\\n\", inet_ntoa(client.sin_addr));\n\n\n\n /* Receive messages loop */\n\n while (1)\n\n {\n\n /* Receive message */\n\n if ((len = recv(client_fd, buffer, sizeof(buffer), 0)) == -1)\n\n {\n\n perror(\"Error while receiving message\");\n\n status = -1;\n\n quit = 1;\n\n break;\n\n }\n\n else if (len == 0)\n\n {\n\n printf(\"Connection closed by client\\n\");\n\n break;\n\n }\n\n\n\n buffer[len] = '\\0';\n\n printf(\"Message received: %s\\n\", buffer);\n\n\n\n /* Send reply */\n\n if ((send(client_fd, reply, strlen(reply), 0)) == -1)\n\n {\n\n perror(\"Error while sending message\");\n\n quit = 1;\n\n break;\n\n }\n\n else\n\n {\n\n printf(\"Message being sent: %s\\nNumber of bytes sent: %d\\n\",\n\n reply, strlen(reply));\n\n }\n\n } /* End of message receive loop */\n\n\n\n close(client_fd);\n\n } /* End of connections loop */\n\n\n\n close(server_fd);\n\n\n\n return status;\n", "file_path": "examples/kubos-linux-tcprx/source/main.c", "rank": 81, "score": 52243.828682454274 }, { "content": "struct sigaction saio;\n", "file_path": "examples/kubos-linux-uarttx/source/main.c", "rank": 82, "score": 52243.828682454274 }, { "content": "int fd;\n", "file_path": "examples/kubos-linux-uarttx/source/main.c", "rank": 83, "score": 52243.828682454274 }, { "content": "int main(int argc, char * argv[])\n\n{\n\n int client_fd, port, len;\n\n struct sockaddr_in server;\n\n\n\n char tx_buf[] = \"Test message!\\n\";\n\n char rx_buf[256] = {0};\n\n\n\n if (argc < 3)\n\n {\n\n fprintf(stderr, \"Usage: %s ip_addr port\\n\", argv[0]);\n\n exit(0);\n\n }\n\n\n\n /* Set socket type to IPv4 */\n\n server.sin_family = AF_INET;\n\n\n\n /* Convert text version of IP address to usable version */\n\n if(inet_pton(AF_INET, argv[1], (void *) &server.sin_addr.s_addr) != 1)\n\n {\n\n perror(\"Error converting IP address\");\n\n exit(-1);\n\n }\n\n\n\n /* Convert text version of port */\n\n server.sin_port = htons(atoi(argv[2]));\n\n\n\n /* Open a socket to use for TCP communication */\n\n client_fd = socket(AF_INET, SOCK_STREAM, 0);\n\n if (client_fd < 0)\n\n {\n\n perror(\"Error getting socket\");\n\n exit(-1);\n\n }\n\n\n\n /* Connect to the endpoint */\n\n if (connect(client_fd, (struct sockaddr *) &server, sizeof(server)) < 0)\n\n {\n\n perror(\"Error connecting to address\");\n\n close(client_fd);\n\n exit(-1);\n\n }\n\n\n\n /* Send the message */\n\n len = write(client_fd, tx_buf, strlen(tx_buf));\n\n if (len < 0)\n\n {\n\n perror(\"Error writing to address\");\n\n close(client_fd);\n\n exit(-1);\n\n }\n\n\n\n /* Wait for the reply */\n\n len = read(client_fd, rx_buf, 255);\n\n if (len < 0)\n\n {\n\n perror(\"Error reading from socket\");\n\n close(client_fd);\n\n exit(-1);\n\n }\n\n\n\n printf(\"Reply received: %s\\n\", rx_buf);\n\n\n\n close(client_fd);\n\n\n\n return 0;\n", "file_path": "examples/kubos-linux-tcptx/source/main.c", "rank": 84, "score": 52243.828682454274 }, { "content": "/// Interface for retrieving iOBC supervisor version data\n\npub fn supervisor_version() -> Result<SupervisorVersion, String> {\n\n let mut version: ffi::supervisor_version = unsafe { mem::uninitialized() };\n\n let version_result = unsafe { ffi::supervisor_get_version(&mut version) };\n\n if !version_result {\n\n Err(String::from(\"Problem retrieving supervisor version\"))\n\n } else {\n\n Ok(convert_raw_version(&version))\n\n }\n\n}\n\n\n", "file_path": "apis/isis-iobc-supervisor/src/lib.rs", "rank": 85, "score": 51814.85287279464 }, { "content": "#[test]\n\nfn test_request_version_bad_response_crc() {\n\n let mut mock = MockStream::default();\n\n\n\n mock.write.set_input(vec![\n\n 0xAA, 0x44, 0x12, 0x1C, 0x1, 0x0, 0x0, 0xC0, 0x20, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,\n\n 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x20, 0x0, 0x0, 0x0, 0x25, 0x0, 0x0,\n\n 0x0, 0x4, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0,\n\n 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x30, 0x8E, 0x33, 0x3C,\n\n ]);\n\n\n\n mock.read.set_output(vec![\n\n 0xAA, 0x44, 0x12, 0x1C, 0x1, 0x0, 0x80, 0x20, 0x6, 0x0, 0x0, 0x0, 0xFF, 0x78, 0xD1, 0xB,\n\n 0x6E, 0x5D, 0xC9, 0x9, 0x0, 0x0, 0x0, 0x0, 0xFB, 0xFD, 0x0, 0x0, 0x1, 0x0, 0x0, 0x0, 0x4F,\n\n 0x4B, 0x92, 0x8F, 0x77, 0x4B,\n\n ]);\n\n\n\n let oem = mock_new!(mock);\n\n\n\n assert_eq!(oem.request_version().unwrap_err(), OEMError::NoResponse);\n\n}\n\n\n", "file_path": "apis/novatel-oem6-api/src/tests/version.rs", "rank": 86, "score": 51809.42363818341 }, { "content": "void sigio_handler(int sig)\n\n{\n\n printf(\"Received data from UART\\n\");\n\n uart_read();\n", "file_path": "examples/kubos-linux-uartrx/source/main.c", "rank": 87, "score": 51175.31787716666 }, { "content": "int uart_close(void)\n\n{\n\n close(fd);\n", "file_path": "examples/kubos-linux-uartrx/source/main.c", "rank": 88, "score": 51175.31787716666 }, { "content": "static char uart_buf[256];\n", "file_path": "examples/kubos-linux-uartrx/source/main.c", "rank": 89, "score": 51175.31787716666 }, { "content": "#define MAX_WAIT 5\n\n\n", "file_path": "examples/kubos-linux-tcprx/source/main.c", "rank": 90, "score": 51175.31787716666 }, { "content": "int uart_read(void)\n\n{\n\n\n\n int rdlen;\n\n\n\n /* Make sure the buffer is cleared from the last time we read */\n\n memset(uart_buf, 0, sizeof(uart_buf));\n\n\n\n /* Read as much data as is available */\n\n rdlen = read(fd, uart_buf, sizeof(uart_buf) - 1);\n\n\n\n if (rdlen < 0)\n\n {\n\n perror(\"Error from read\");\n\n\n\n return -1;\n\n }\n\n\n\n printf(\"Received(%d): %s\\n\", rdlen, uart_buf);\n\n\n\n return 0;\n", "file_path": "examples/kubos-linux-uartrx/source/main.c", "rank": 91, "score": 51175.31787716666 }, { "content": "void sigint_handler(int sig)\n\n{\n\n running = 0;\n\n sleep(2);\n\n signal(SIGINT, SIG_DFL);\n\n kill(getpid(), SIGINT);\n", "file_path": "examples/kubos-linux-uarttx/source/main.c", "rank": 92, "score": 51175.31787716666 }, { "content": "int uart_init(void)\n\n{\n\n char * device = \"/dev/ttyS1\";\n\n speed_t speed = B115200;\n\n struct termios tty;\n\n\n\n /*\n\n * Open UART (terminal)\n\n * O_RDONLY - Read only\n\n * O_NOCTTY - This terminal should not be the controlling terminal of the\n\n * process (that should still be the debug UART)\n\n * O_NDELAY - Open file in non-blocking mode\n\n * O_ASYNC - Enable SIGIO generation when data is received\n\n */\n\n fd = open(device, O_RDONLY | O_NOCTTY | O_NDELAY | O_ASYNC);\n\n if (fd < 0)\n\n {\n\n perror(\"** Error opening device\");\n\n return -1;\n\n }\n\n\n\n /* Set this process as the PID that should receive the SIGIO signals on\n\n * this file */\n\n fcntl(fd, F_SETOWN, getpid());\n\n\n\n /*\n\n * Get the current terminal settings. There are a bunch, and we only want\n\n * to update a few, so we'll preserve everything else that's currently\n\n * set.\n\n */\n\n if (tcgetattr(fd, &tty) < 0)\n\n {\n\n perror(\"Error from tcgetattr\");\n\n return -1;\n\n }\n\n\n\n /* Set input baudrate */\n\n cfsetispeed(&tty, speed);\n\n\n\n /* Character processing flags */\n\n tty.c_cflag |= (CLOCAL | CREAD); /* Ignore modem controls */\n\n tty.c_cflag &= ~CSIZE; /* Clear current char size mask */\n\n tty.c_cflag |= CS8; /* 8-bit characters */\n\n tty.c_cflag &= ~PARENB; /* No parity bit */\n\n tty.c_cflag &= ~CSTOPB; /* 1 stop bit */\n\n tty.c_cflag &= ~CRTSCTS; /* No hardware flowcontrol */\n\n\n\n /* Input flags */\n\n tty.c_iflag &= ~(IGNBRK | /* Don't ignore BREAK conditions */\n\n BRKINT | /* Read BREAKs as null bytes */\n\n ICRNL | /* Do not translate carriage returns */\n\n INLCR | /* Do not translate NL to CR */\n\n PARMRK | /* Do not mark errors */\n\n INPCK | /* Turn off parity checking */\n\n ISTRIP | /* Do not strip off eighth bit */\n\n IXON); /* Turn off flow control */\n\n\n\n tty.c_lflag &= ~ICANON | ECHO; /* Non-canonical (raw) input */\n\n\n\n tty.c_cc[VMIN] = 18; /* We know that we're looking for an 18 byte message\n\n \"Test message nnn\" */\n\n tty.c_cc[VTIME] = 2; /* But give a 2/10th second timeout in case something\n\n goes wrong mid-read */\n\n\n\n /* Clear anything that's currently in the UART buffers */\n\n tcflush(fd, TCIOFLUSH);\n\n\n\n /* Update the terminal settings */\n\n if (tcsetattr(fd, TCSANOW, &tty) != 0)\n\n {\n\n perror(\"Error from tcsetattr\");\n\n return -1;\n\n }\n\n\n\n return 0;\n", "file_path": "examples/kubos-linux-uartrx/source/main.c", "rank": 93, "score": 51175.31787716666 }, { "content": "int uart_close(void)\n\n{\n\n close(fd);\n", "file_path": "examples/kubos-linux-uarttx/source/main.c", "rank": 94, "score": 51175.31787716666 }, { "content": "int __wrap_open(const char * filename, int flags)\n\n{\n\n test_char = 0;\n\n return mock_type(int);\n", "file_path": "hal/kubos-hal/test/i2c/sysfs.c", "rank": 95, "score": 51175.31787716666 }, { "content": "int __wrap_close(int fd)\n\n{\n\n return mock_type(int);\n", "file_path": "hal/kubos-hal/test/i2c/sysfs.c", "rank": 96, "score": 51175.31787716666 }, { "content": "int uart_init(void)\n\n{\n\n char * device = \"/dev/ttyS3\";\n\n struct termios uart;\n\n speed_t speed = B115200;\n\n\n\n /*\n\n * Open UART (terminal)\n\n * O_WRONLY - Write only\n\n * O_NOCTTY - This terminal should not be the controlling terminal of the\n\n * process\n\n * (That should still be the debug UART)\n\n * O_DSYNC - Writes should block until all data has been successfully\n\n * written out of the buffer to the underlying hardware\n\n */\n\n fd = open(device, O_WRONLY | O_NOCTTY | O_DSYNC);\n\n if (fd < 0)\n\n {\n\n perror(\"Error opening device\");\n\n return -1;\n\n }\n\n\n\n /*\n\n * Get the current terminal settings. There are a bunch, and we only want\n\n * to update a few, so we'll preserve everything else that's currently set.\n\n */\n\n if (tcgetattr(fd, &uart) < 0)\n\n {\n\n perror(\"Error from tcgetattr\");\n\n return -1;\n\n }\n\n\n\n /* Set output baudrate */\n\n cfsetospeed(&uart, speed);\n\n\n\n /* Character processing flags */\n\n uart.c_cflag |= (CLOCAL | CREAD); /* Ignore modem controls */\n\n uart.c_cflag &= ~CSIZE; /* Clear current char size mask */\n\n uart.c_cflag |= CS8; /* 8-bit characters */\n\n uart.c_cflag &= ~PARENB; /* No parity bit */\n\n uart.c_cflag &= ~CSTOPB; /* 1 stop bit */\n\n uart.c_cflag &= ~CRTSCTS; /* No hardware flowcontrol */\n\n\n\n /* Output flags */\n\n uart.c_oflag = 0; /* Raw output */\n\n\n\n /* Clear anything that's currently in the UART buffers */\n\n tcflush(fd, TCIOFLUSH);\n\n\n\n /* Update the terminal settings */\n\n if (tcsetattr(fd, TCSANOW, &uart) != 0)\n\n {\n\n perror(\"Error from tcsetattr\");\n\n return -1;\n\n }\n\n\n\n return 0;\n", "file_path": "examples/kubos-linux-uarttx/source/main.c", "rank": 97, "score": 51175.31787716666 }, { "content": "void sigint_handler(int sig)\n\n{\n\n running = 0;\n\n sleep(2);\n\n signal(SIGINT, SIG_DFL);\n\n kill(getpid(), SIGINT);\n", "file_path": "examples/kubos-linux-uartrx/source/main.c", "rank": 98, "score": 51175.31787716666 }, { "content": "#define TEST_ADDR 0x50\n\n\n", "file_path": "hal/kubos-hal/test/i2c/i2c.c", "rank": 99, "score": 51175.31787716666 } ]
Rust
examples/todomvc/src/store.rs
tlively/wasm-bindgen
64e53a5502d92143ff312d461c286cc22522c8ae
use js_sys::JSON; use wasm_bindgen::prelude::*; pub struct Store { local_storage: web_sys::Storage, data: ItemList, name: String, } impl Store { pub fn new(name: &str) -> Option<Store> { let window = web_sys::window()?; if let Ok(Some(local_storage)) = window.local_storage() { let mut store = Store { local_storage, data: ItemList::new(), name: String::from(name), }; store.fetch_local_storage(); Some(store) } else { None } } fn fetch_local_storage(&mut self) -> Option<()> { let mut item_list = ItemList::new(); if let Ok(Some(value)) = self.local_storage.get_item(&self.name) { let data = JSON::parse(&value).ok()?; let iter = js_sys::try_iter(&data).ok()??; for item in iter { let item = item.ok()?; let item_array: &js_sys::Array = wasm_bindgen::JsCast::dyn_ref(&item)?; let title = item_array.shift().as_string()?; let completed = item_array.shift().as_bool()?; let id = item_array.shift().as_string()?; let temp_item = Item { title, completed, id, }; item_list.push(temp_item); } } self.data = item_list; Some(()) } fn sync_local_storage(&mut self) { let array = js_sys::Array::new(); for item in self.data.iter() { let child = js_sys::Array::new(); child.push(&JsValue::from(&item.title)); child.push(&JsValue::from(item.completed)); child.push(&JsValue::from(&item.id)); array.push(&JsValue::from(child)); } if let Ok(storage_string) = JSON::stringify(&JsValue::from(array)) { let storage_string: String = storage_string.into(); self.local_storage .set_item(&self.name, &storage_string) .unwrap(); } } pub fn find(&mut self, query: ItemQuery) -> Option<ItemListSlice<'_>> { Some( self.data .iter() .filter(|todo| query.matches(todo)) .collect(), ) } pub fn update(&mut self, update: ItemUpdate) { let id = update.id(); self.data.iter_mut().for_each(|todo| { if id == todo.id { todo.update(&update); } }); self.sync_local_storage(); } pub fn insert(&mut self, item: Item) { self.data.push(item); self.sync_local_storage(); } pub fn remove(&mut self, query: ItemQuery) { self.data.retain(|todo| !query.matches(todo)); self.sync_local_storage(); } pub fn count(&mut self) -> Option<(usize, usize, usize)> { self.find(ItemQuery::EmptyItemQuery).map(|data| { let total = data.length(); let mut completed = 0; for item in data.iter() { if item.completed { completed += 1; } } (total, total - completed, completed) }) } } pub struct Item { pub id: String, pub title: String, pub completed: bool, } impl Item { pub fn update(&mut self, update: &ItemUpdate) { match update { ItemUpdate::Title { title, .. } => { self.title = title.to_string(); } ItemUpdate::Completed { completed, .. } => { self.completed = *completed; } } } } pub trait ItemListTrait<T> { fn new() -> Self; fn get(&self, i: usize) -> Option<&T>; fn length(&self) -> usize; fn push(&mut self, item: T); fn iter(&self) -> std::slice::Iter<'_, T>; } pub struct ItemList { list: Vec<Item>, } impl ItemList { fn retain<F>(&mut self, f: F) where F: FnMut(&Item) -> bool, { self.list.retain(f); } fn iter_mut(&mut self) -> std::slice::IterMut<'_, Item> { self.list.iter_mut() } } impl ItemListTrait<Item> for ItemList { fn new() -> ItemList { ItemList { list: Vec::new() } } fn get(&self, i: usize) -> Option<&Item> { self.list.get(i) } fn length(&self) -> usize { self.list.len() } fn push(&mut self, item: Item) { self.list.push(item) } fn iter(&self) -> std::slice::Iter<'_, Item> { self.list.iter() } } use std::iter::FromIterator; impl<'a> FromIterator<Item> for ItemList { fn from_iter<I: IntoIterator<Item = Item>>(iter: I) -> Self { let mut c = ItemList::new(); for i in iter { c.push(i); } c } } pub struct ItemListSlice<'a> { list: Vec<&'a Item>, } impl<'a> ItemListTrait<&'a Item> for ItemListSlice<'a> { fn new() -> ItemListSlice<'a> { ItemListSlice { list: Vec::new() } } fn get(&self, i: usize) -> Option<&&'a Item> { self.list.get(i) } fn length(&self) -> usize { self.list.len() } fn push(&mut self, item: &'a Item) { self.list.push(item) } fn iter(&self) -> std::slice::Iter<'_, &'a Item> { self.list.iter() } } impl<'a> FromIterator<&'a Item> for ItemListSlice<'a> { fn from_iter<I: IntoIterator<Item = &'a Item>>(iter: I) -> Self { let mut c = ItemListSlice::new(); for i in iter { c.push(i); } c } } impl<'a> Into<ItemList> for ItemListSlice<'a> { fn into(self) -> ItemList { let mut i = ItemList::new(); let items = self.list.into_iter(); for j in items { let item = Item { id: j.id.clone(), completed: j.completed, title: j.title.clone(), }; i.push(item); } i } } pub enum ItemQuery { Id { id: String }, Completed { completed: bool }, EmptyItemQuery, } impl ItemQuery { fn matches(&self, item: &Item) -> bool { match *self { ItemQuery::EmptyItemQuery => true, ItemQuery::Id { ref id } => &item.id == id, ItemQuery::Completed { completed } => item.completed == completed, } } } pub enum ItemUpdate { Title { id: String, title: String }, Completed { id: String, completed: bool }, } impl ItemUpdate { fn id(&self) -> String { match self { ItemUpdate::Title { id, .. } => id.clone(), ItemUpdate::Completed { id, .. } => id.clone(), } } }
use js_sys::JSON; use wasm_bindgen::prelude::*; pub struct Store { local_storage: web_sys::Storage, data: ItemList, name: String, } impl Store { pub fn new(name: &str) -> Option<Store> { let window = web_sys::window()?; if let Ok(Some(local_storage)) = window.local_storage() { let mut store = Store { local_storage, data: ItemList::new(), name: String::from(name), }; store.fetch_local_storage(); Some(store) } else { None } } fn fetch_local_storage(&mut self) -> Option<()> { let mut item_list = ItemList::new();
self.data = item_list; Some(()) } fn sync_local_storage(&mut self) { let array = js_sys::Array::new(); for item in self.data.iter() { let child = js_sys::Array::new(); child.push(&JsValue::from(&item.title)); child.push(&JsValue::from(item.completed)); child.push(&JsValue::from(&item.id)); array.push(&JsValue::from(child)); } if let Ok(storage_string) = JSON::stringify(&JsValue::from(array)) { let storage_string: String = storage_string.into(); self.local_storage .set_item(&self.name, &storage_string) .unwrap(); } } pub fn find(&mut self, query: ItemQuery) -> Option<ItemListSlice<'_>> { Some( self.data .iter() .filter(|todo| query.matches(todo)) .collect(), ) } pub fn update(&mut self, update: ItemUpdate) { let id = update.id(); self.data.iter_mut().for_each(|todo| { if id == todo.id { todo.update(&update); } }); self.sync_local_storage(); } pub fn insert(&mut self, item: Item) { self.data.push(item); self.sync_local_storage(); } pub fn remove(&mut self, query: ItemQuery) { self.data.retain(|todo| !query.matches(todo)); self.sync_local_storage(); } pub fn count(&mut self) -> Option<(usize, usize, usize)> { self.find(ItemQuery::EmptyItemQuery).map(|data| { let total = data.length(); let mut completed = 0; for item in data.iter() { if item.completed { completed += 1; } } (total, total - completed, completed) }) } } pub struct Item { pub id: String, pub title: String, pub completed: bool, } impl Item { pub fn update(&mut self, update: &ItemUpdate) { match update { ItemUpdate::Title { title, .. } => { self.title = title.to_string(); } ItemUpdate::Completed { completed, .. } => { self.completed = *completed; } } } } pub trait ItemListTrait<T> { fn new() -> Self; fn get(&self, i: usize) -> Option<&T>; fn length(&self) -> usize; fn push(&mut self, item: T); fn iter(&self) -> std::slice::Iter<'_, T>; } pub struct ItemList { list: Vec<Item>, } impl ItemList { fn retain<F>(&mut self, f: F) where F: FnMut(&Item) -> bool, { self.list.retain(f); } fn iter_mut(&mut self) -> std::slice::IterMut<'_, Item> { self.list.iter_mut() } } impl ItemListTrait<Item> for ItemList { fn new() -> ItemList { ItemList { list: Vec::new() } } fn get(&self, i: usize) -> Option<&Item> { self.list.get(i) } fn length(&self) -> usize { self.list.len() } fn push(&mut self, item: Item) { self.list.push(item) } fn iter(&self) -> std::slice::Iter<'_, Item> { self.list.iter() } } use std::iter::FromIterator; impl<'a> FromIterator<Item> for ItemList { fn from_iter<I: IntoIterator<Item = Item>>(iter: I) -> Self { let mut c = ItemList::new(); for i in iter { c.push(i); } c } } pub struct ItemListSlice<'a> { list: Vec<&'a Item>, } impl<'a> ItemListTrait<&'a Item> for ItemListSlice<'a> { fn new() -> ItemListSlice<'a> { ItemListSlice { list: Vec::new() } } fn get(&self, i: usize) -> Option<&&'a Item> { self.list.get(i) } fn length(&self) -> usize { self.list.len() } fn push(&mut self, item: &'a Item) { self.list.push(item) } fn iter(&self) -> std::slice::Iter<'_, &'a Item> { self.list.iter() } } impl<'a> FromIterator<&'a Item> for ItemListSlice<'a> { fn from_iter<I: IntoIterator<Item = &'a Item>>(iter: I) -> Self { let mut c = ItemListSlice::new(); for i in iter { c.push(i); } c } } impl<'a> Into<ItemList> for ItemListSlice<'a> { fn into(self) -> ItemList { let mut i = ItemList::new(); let items = self.list.into_iter(); for j in items { let item = Item { id: j.id.clone(), completed: j.completed, title: j.title.clone(), }; i.push(item); } i } } pub enum ItemQuery { Id { id: String }, Completed { completed: bool }, EmptyItemQuery, } impl ItemQuery { fn matches(&self, item: &Item) -> bool { match *self { ItemQuery::EmptyItemQuery => true, ItemQuery::Id { ref id } => &item.id == id, ItemQuery::Completed { completed } => item.completed == completed, } } } pub enum ItemUpdate { Title { id: String, title: String }, Completed { id: String, completed: bool }, } impl ItemUpdate { fn id(&self) -> String { match self { ItemUpdate::Title { id, .. } => id.clone(), ItemUpdate::Completed { id, .. } => id.clone(), } } }
if let Ok(Some(value)) = self.local_storage.get_item(&self.name) { let data = JSON::parse(&value).ok()?; let iter = js_sys::try_iter(&data).ok()??; for item in iter { let item = item.ok()?; let item_array: &js_sys::Array = wasm_bindgen::JsCast::dyn_ref(&item)?; let title = item_array.shift().as_string()?; let completed = item_array.shift().as_bool()?; let id = item_array.shift().as_string()?; let temp_item = Item { title, completed, id, }; item_list.push(temp_item); } }
if_condition
[ { "content": "#[wasm_bindgen]\n\npub fn return_optional_str_none() -> Option<String> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/simple.rs", "rank": 0, "score": 469336.6002525708 }, { "content": "#[wasm_bindgen]\n\npub fn take_optional_str_none(x: Option<String>) {\n\n assert!(x.is_none())\n\n}\n", "file_path": "tests/wasm/simple.rs", "rank": 1, "score": 459275.6242489594 }, { "content": "pub fn struct_function_export_name(struct_: &str, f: &str) -> String {\n\n let mut name = struct_\n\n .chars()\n\n .flat_map(|s| s.to_lowercase())\n\n .collect::<String>();\n\n name.push_str(\"_\");\n\n name.push_str(f);\n\n return name;\n\n}\n\n\n", "file_path": "crates/shared/src/lib.rs", "rank": 2, "score": 455763.4953605319 }, { "content": "pub fn new_function(struct_name: &str) -> String {\n\n let mut name = format!(\"__wbg_\");\n\n name.extend(struct_name.chars().flat_map(|s| s.to_lowercase()));\n\n name.push_str(\"_new\");\n\n return name;\n\n}\n\n\n", "file_path": "crates/shared/src/lib.rs", "rank": 3, "score": 441355.5693009839 }, { "content": "pub fn free_function(struct_name: &str) -> String {\n\n let mut name = format!(\"__wbg_\");\n\n name.extend(struct_name.chars().flat_map(|s| s.to_lowercase()));\n\n name.push_str(\"_free\");\n\n return name;\n\n}\n\n\n", "file_path": "crates/shared/src/lib.rs", "rank": 4, "score": 441355.5693009839 }, { "content": "#[wasm_bindgen]\n\npub fn return_optional_str_some() -> Option<String> {\n\n Some(\"world\".to_string())\n\n}\n\n\n", "file_path": "tests/wasm/simple.rs", "rank": 5, "score": 414238.73459533905 }, { "content": "#[wasm_bindgen]\n\npub fn take_optional_str_some(x: Option<String>) {\n\n assert_eq!(x, Some(String::from(\"hello\")));\n\n}\n\n\n", "file_path": "tests/wasm/simple.rs", "rank": 6, "score": 404177.7585917276 }, { "content": "pub fn struct_field_set(struct_: &str, f: &str) -> String {\n\n let mut name = String::from(\"__wbg_set_\");\n\n name.extend(struct_.chars().flat_map(|s| s.to_lowercase()));\n\n name.push_str(\"_\");\n\n name.push_str(f);\n\n return name;\n\n}\n\n\n", "file_path": "crates/shared/src/lib.rs", "rank": 7, "score": 402671.2076873514 }, { "content": "pub fn struct_field_get(struct_: &str, f: &str) -> String {\n\n let mut name = String::from(\"__wbg_get_\");\n\n name.extend(struct_.chars().flat_map(|s| s.to_lowercase()));\n\n name.push_str(\"_\");\n\n name.push_str(f);\n\n return name;\n\n}\n\n\n", "file_path": "crates/shared/src/lib.rs", "rank": 8, "score": 402671.2076873514 }, { "content": "// Returns a link to MDN\n\npub fn mdn_doc(class: &str, method: Option<&str>) -> String {\n\n let mut link = format!(\"https://developer.mozilla.org/en-US/docs/Web/API/{}\", class);\n\n if let Some(method) = method {\n\n link.push_str(&format!(\"/{}\", method));\n\n }\n\n format!(\"[MDN Documentation]({})\", link).into()\n\n}\n\n\n\n// Array type is borrowed for arguments (`&mut [T]` or `&[T]`) and owned for return value (`Vec<T>`).\n\npub(crate) fn array(base_ty: &str, pos: TypePosition, immutable: bool) -> syn::Type {\n\n match pos {\n\n TypePosition::Argument => {\n\n shared_ref(\n\n slice_ty(ident_ty(raw_ident(base_ty))),\n\n /*mutable =*/ !immutable,\n\n )\n\n }\n\n TypePosition::Return => vec_ty(ident_ty(raw_ident(base_ty))),\n\n }\n\n}\n\n\n", "file_path": "crates/webidl/src/util.rs", "rank": 9, "score": 396958.69297891716 }, { "content": "pub fn free_function_export_name(function_name: &str) -> String {\n\n function_name.to_string()\n\n}\n\n\n", "file_path": "crates/shared/src/lib.rs", "rank": 10, "score": 388680.56359303783 }, { "content": "#[cfg(feature = \"Window\")]\n\npub fn window() -> Option<Window> {\n\n use wasm_bindgen::JsCast;\n\n\n\n js_sys::global().dyn_into::<Window>().ok()\n\n}\n", "file_path": "crates/web-sys/src/lib.rs", "rank": 11, "score": 367143.7479846036 }, { "content": "#[wasm_bindgen]\n\npub fn named_struct_by_exclusive_ref(x: &mut ExportedNamedStruct) {}\n\n\n", "file_path": "examples/guide-supported-types-examples/src/exported_types.rs", "rank": 12, "score": 364996.9801025861 }, { "content": "#[wasm_bindgen]\n\npub fn return_option_string() -> Option<String> {\n\n None\n\n}\n", "file_path": "examples/guide-supported-types-examples/src/string.rs", "rank": 13, "score": 361619.47135509516 }, { "content": "#[wasm_bindgen]\n\npub fn take_option_string(x: Option<String>) {}\n\n\n", "file_path": "examples/guide-supported-types-examples/src/string.rs", "rank": 14, "score": 351558.49535148375 }, { "content": "fn get_features_doc(options: &Options, name: String) -> Option<String> {\n\n let mut features = BTreeSet::new();\n\n features.insert(name);\n\n required_doc_string(options, &features)\n\n}\n\n\n", "file_path": "crates/webidl/src/generator.rs", "rank": 15, "score": 348778.5423576723 }, { "content": "#[wasm_bindgen]\n\npub fn greet(name: String) {\n\n log(&format!(\"Hello from {}!\", name)); // should output \"Hello from Rust!\"\n\n\n\n let x = MyClass::new();\n\n assert_eq!(x.number(), add(40, 2));\n\n test();\n\n log(&x.render());\n\n}\n", "file_path": "examples/deno/src/lib.rs", "rank": 16, "score": 345272.41594739154 }, { "content": "#[wasm_bindgen]\n\npub fn strings(a: &str) -> String {\n\n String::new()\n\n}\n\n\n", "file_path": "crates/cli/tests/reference/interface-types-strings.rs", "rank": 17, "score": 344041.326979773 }, { "content": "#[wasm_bindgen]\n\npub fn greet(name: &str) {\n\n alert(&format!(\"Hello, {}!\", name));\n\n}\n", "file_path": "examples/hello_world/src/lib.rs", "rank": 18, "score": 341732.52757096285 }, { "content": "fn get_string(data: &mut &[u32]) -> String {\n\n (0..get(data))\n\n .map(|_| char::from_u32(get(data)).unwrap())\n\n .collect()\n\n}\n\n\n\nimpl Closure {\n\n fn decode(data: &mut &[u32]) -> Closure {\n\n let shim_idx = get(data);\n\n let dtor_idx = get(data);\n\n let mutable = get(data) == REFMUT;\n\n assert_eq!(get(data), FUNCTION);\n\n Closure {\n\n shim_idx,\n\n dtor_idx,\n\n mutable,\n\n function: Function::decode(data),\n\n }\n\n }\n\n}\n", "file_path": "crates/cli-support/src/descriptor.rs", "rank": 19, "score": 338924.11230560933 }, { "content": "#[wasm_bindgen]\n\npub fn str_roundtrip(s: String) -> String {\n\n s\n\n}\n", "file_path": "benchmarks/src/lib.rs", "rank": 20, "score": 336489.3845878228 }, { "content": "#[wasm_bindgen]\n\npub fn simple_clone(a: &str) -> String {\n\n a.to_string()\n\n}\n\n\n", "file_path": "tests/wasm/simple.rs", "rank": 21, "score": 335121.05390299484 }, { "content": "pub fn required_doc_string(options: &Options, features: &BTreeSet<String>) -> Option<String> {\n\n if !options.features || features.len() == 0 {\n\n return None;\n\n }\n\n let list = features\n\n .iter()\n\n .map(|ident| format!(\"`{}`\", ident))\n\n .collect::<Vec<_>>()\n\n .join(\", \");\n\n Some(format!(\n\n \"\\n\\n*This API requires the following crate features \\\n\n to be activated: {}*\",\n\n list,\n\n ))\n\n}\n\n\n", "file_path": "crates/webidl/src/util.rs", "rank": 22, "score": 332843.5049572462 }, { "content": "#[wasm_bindgen]\n\npub fn many_strings(a: &str, b: String) {}\n", "file_path": "crates/cli/tests/reference/interface-types-strings.rs", "rank": 23, "score": 332802.80408870603 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u32_none() -> Option<u32> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 24, "score": 330260.6138211163 }, { "content": "#[wasm_bindgen]\n\npub fn optional_i64_none() -> Option<i64> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 25, "score": 330260.6138211163 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u8_none() -> Option<u8> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 26, "score": 330260.6138211163 }, { "content": "#[wasm_bindgen]\n\npub fn optional_char_none() -> Option<char> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 27, "score": 330260.6138211163 }, { "content": "#[wasm_bindgen]\n\npub fn optional_usize_none() -> Option<usize> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 28, "score": 330260.61382111616 }, { "content": "#[wasm_bindgen]\n\npub fn option_class_none() -> Option<OptionClass> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/classes.rs", "rank": 29, "score": 330260.61382111616 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u16_none() -> Option<u16> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 30, "score": 330260.61382111616 }, { "content": "#[wasm_bindgen]\n\npub fn optional_i8_none() -> Option<i8> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 31, "score": 330260.61382111616 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u64_none() -> Option<u64> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 32, "score": 330260.61382111616 }, { "content": "#[wasm_bindgen]\n\npub fn optional_f64_none() -> Option<f64> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 33, "score": 330260.61382111616 }, { "content": "#[wasm_bindgen]\n\npub fn optional_i32_none() -> Option<i32> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 34, "score": 330260.6138211163 }, { "content": "#[wasm_bindgen]\n\npub fn optional_bool_none() -> Option<bool> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 35, "score": 330260.61382111616 }, { "content": "#[wasm_bindgen]\n\npub fn optional_isize_none() -> Option<isize> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 36, "score": 330260.61382111616 }, { "content": "#[wasm_bindgen]\n\npub fn optional_f32_none() -> Option<f32> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 37, "score": 330260.6138211163 }, { "content": "#[wasm_bindgen]\n\npub fn optional_i16_none() -> Option<i16> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 38, "score": 330260.61382111616 }, { "content": "// Unescapes a quoted string. char::escape_debug() was used to escape the text.\n\nfn try_unescape(s: &str) -> Option<String> {\n\n if s.is_empty() {\n\n return Some(String::new());\n\n }\n\n let mut result = String::with_capacity(s.len());\n\n let mut chars = s.chars();\n\n for i in 0.. {\n\n let c = match chars.next() {\n\n Some(c) => c,\n\n None => {\n\n if result.ends_with('\"') {\n\n result.pop();\n\n }\n\n return Some(result);\n\n }\n\n };\n\n if i == 0 && c == '\"' {\n\n // ignore it\n\n } else if c == '\\\\' {\n\n let c = chars.next()?;\n", "file_path": "crates/macro-support/src/parser.rs", "rank": 39, "score": 328383.60897058155 }, { "content": "// Create an `Ident`, possibly mangling it if it conflicts with a Rust keyword.\n\npub fn rust_ident(name: &str) -> Ident {\n\n if name == \"\" {\n\n panic!(\"tried to create empty Ident (from \\\"\\\")\");\n\n } else if is_rust_keyword(name) {\n\n Ident::new(&format!(\"{}_\", name), proc_macro2::Span::call_site())\n\n\n\n // we didn't historically have `async` in the `is_rust_keyword` list above,\n\n // so for backwards compatibility reasons we need to generate an `async`\n\n // identifier as well, but we'll be sure to use a raw identifier to ease\n\n // compatibility with the 2018 edition.\n\n //\n\n // Note, though, that `proc-macro` doesn't support a normal way to create a\n\n // raw identifier. To get around that we do some wonky parsing to\n\n // roundaboutly create one.\n\n } else if name == \"async\" {\n\n let ident = \"r#async\"\n\n .parse::<proc_macro2::TokenStream>()\n\n .unwrap()\n\n .into_iter()\n\n .next()\n", "file_path": "crates/backend/src/util.rs", "rank": 40, "score": 328144.0446592521 }, { "content": "// Create an `Ident` without checking to see if it conflicts with a Rust\n\n// keyword.\n\npub fn raw_ident(name: &str) -> Ident {\n\n Ident::new(name, proc_macro2::Span::call_site())\n\n}\n\n\n", "file_path": "crates/backend/src/util.rs", "rank": 41, "score": 328144.0446592521 }, { "content": "#[wasm_bindgen]\n\npub fn simple_concat(a: &str, b: &str, c: i8) -> String {\n\n format!(\"{} {} {}\", a, b, c)\n\n}\n\n\n", "file_path": "tests/wasm/simple.rs", "rank": 42, "score": 326053.9658800631 }, { "content": "/// Convert an identifier to camel case\n\npub fn camel_case_ident(identifier: &str) -> String {\n\n fix_ident(identifier).to_camel_case()\n\n}\n\n\n", "file_path": "crates/webidl/src/util.rs", "rank": 43, "score": 324604.0302814118 }, { "content": "/// Convert an identifier to snake case\n\npub fn snake_case_ident(identifier: &str) -> String {\n\n fix_ident(identifier).to_snake_case()\n\n}\n\n\n", "file_path": "crates/webidl/src/util.rs", "rank": 44, "score": 324604.0302814117 }, { "content": "#[wasm_bindgen]\n\npub fn rust_return_none() -> Option<Options> {\n\n None\n\n}\n\n\n\n/// doc\n", "file_path": "tests/wasm/import_class.rs", "rank": 45, "score": 324442.3114211764 }, { "content": "#[wasm_bindgen]\n\npub fn rust_return_none_byval() -> Option<MyType> {\n\n None\n\n}\n\n\n", "file_path": "tests/wasm/option.rs", "rank": 46, "score": 324442.3114211764 }, { "content": "fn interpret(wat: &str, name: &str, result: Option<&[u32]>) {\n\n let wasm = wat::parse_str(wat).unwrap();\n\n let module = walrus::Module::from_buffer(&wasm).unwrap();\n\n let mut i = Interpreter::new(&module).unwrap();\n\n let id = module\n\n .exports\n\n .iter()\n\n .filter(|e| e.name == name)\n\n .filter_map(|e| match e.item {\n\n walrus::ExportItem::Function(f) => Some(f),\n\n _ => None,\n\n })\n\n .next()\n\n .unwrap();\n\n assert_eq!(i.interpret_descriptor(id, &module), result);\n\n}\n\n\n", "file_path": "crates/wasm-interpreter/tests/smoke.rs", "rank": 47, "score": 322541.29136652115 }, { "content": "fn comment(mut comment: String, features: &Option<String>) -> TokenStream {\n\n if let Some(s) = features {\n\n comment.push_str(s);\n\n }\n\n\n\n let lines = comment.lines().map(|doc| quote!( #[doc = #doc] ));\n\n\n\n quote! {\n\n #(#lines)*\n\n }\n\n}\n\n\n", "file_path": "crates/webidl/src/generator.rs", "rank": 48, "score": 322436.30163526576 }, { "content": "/// Convert an identifier to shouty snake case\n\npub fn shouty_snake_case_ident(identifier: &str) -> String {\n\n fix_ident(identifier).to_shouty_snake_case()\n\n}\n\n\n", "file_path": "crates/webidl/src/util.rs", "rank": 49, "score": 321297.7241612591 }, { "content": "fn record(args: &Array, dst: impl FnOnce(&mut Output) -> &mut String) {\n\n if !CURRENT_OUTPUT.is_set() {\n\n return;\n\n }\n\n\n\n CURRENT_OUTPUT.with(|output| {\n\n let mut out = output.borrow_mut();\n\n let dst = dst(&mut out);\n\n args.for_each(&mut |val, idx, _array| {\n\n if idx != 0 {\n\n dst.push_str(\" \");\n\n }\n\n dst.push_str(&stringify(&val));\n\n });\n\n dst.push_str(\"\\n\");\n\n });\n\n}\n\n\n\nimpl Context {\n\n /// Entry point for a synchronous test in wasm. The `#[wasm_bindgen_test]`\n", "file_path": "crates/test/src/rt/mod.rs", "rank": 50, "score": 320852.4196894737 }, { "content": "#[wasm_bindgen]\n\npub fn option_class_assert_none(x: Option<OptionClass>) {\n\n assert!(x.is_none());\n\n}\n\n\n", "file_path": "tests/wasm/classes.rs", "rank": 51, "score": 319763.8355759124 }, { "content": "fn format_doc_comments(comments: &str, js_doc_comments: Option<String>) -> String {\n\n let body: String = comments.lines().map(|c| format!(\"*{}\\n\", c)).collect();\n\n let doc = if let Some(docs) = js_doc_comments {\n\n docs.lines().map(|l| format!(\"* {} \\n\", l)).collect()\n\n } else {\n\n String::new()\n\n };\n\n format!(\"/**\\n{}{}*/\\n\", body, doc)\n\n}\n\n\n", "file_path": "crates/cli-support/src/js/mod.rs", "rank": 52, "score": 317342.3415637178 }, { "content": "#[wasm_bindgen]\n\npub fn rust_take_none_byval(t: Option<MyType>) {\n\n assert!(t.is_none());\n\n}\n\n\n", "file_path": "tests/wasm/option.rs", "rank": 53, "score": 316995.362385017 }, { "content": "#[wasm_bindgen]\n\npub fn rust_take_none(a: Option<Options>) {\n\n assert!(a.is_none());\n\n}\n\n\n\n/// doc\n", "file_path": "tests/wasm/import_class.rs", "rank": 54, "score": 316995.362385017 }, { "content": "fn item_id(mut element: Element) -> Option<String> {\n\n element.parent_element().map(|mut parent| {\n\n let mut res = None;\n\n let parent_id = parent.dataset_get(\"id\");\n\n if parent_id != \"\" {\n\n res = Some(parent_id);\n\n } else {\n\n if let Some(mut ep) = parent.parent_element() {\n\n res = Some(ep.dataset_get(\"id\"));\n\n }\n\n }\n\n res.unwrap()\n\n })\n\n}\n\n\n\n/// Presentation layer\n\n#[wasm_bindgen]\n\npub struct View {\n\n sched: RefCell<Rc<Scheduler>>,\n\n todo_list: Element,\n", "file_path": "examples/todomvc/src/view.rs", "rank": 55, "score": 316696.0429471126 }, { "content": "#[wasm_bindgen]\n\npub fn foo(a: &str) {\n\n drop(a);\n\n}\n", "file_path": "crates/cli/tests/reference/string-arg.rs", "rank": 56, "score": 315090.44876574446 }, { "content": "pub fn get_rust_deprecated<'a>(ext_attrs: &Option<ExtendedAttributeList<'a>>) -> Option<&'a str> {\n\n ext_attrs\n\n .as_ref()?\n\n .body\n\n .list\n\n .iter()\n\n .filter_map(|attr| match attr {\n\n ExtendedAttribute::Ident(id) => Some(id),\n\n _ => None,\n\n })\n\n .filter(|attr| attr.lhs_identifier.0 == \"RustDeprecated\")\n\n .filter_map(|ident| match ident.rhs {\n\n IdentifierOrString::String(s) => Some(s),\n\n IdentifierOrString::Identifier(_) => None,\n\n })\n\n .next()\n\n .map(|s| s.0)\n\n}\n\n\n", "file_path": "crates/webidl/src/util.rs", "rank": 57, "score": 302925.65676464647 }, { "content": "#[wasm_bindgen]\n\npub fn named_struct_by_value(x: ExportedNamedStruct) {}\n\n\n", "file_path": "examples/guide-supported-types-examples/src/exported_types.rs", "rank": 58, "score": 297440.9151154635 }, { "content": "#[wasm_bindgen]\n\npub fn named_struct_by_shared_ref(x: &ExportedNamedStruct) {}\n\n\n", "file_path": "examples/guide-supported-types-examples/src/exported_types.rs", "rank": 59, "score": 294788.02543956076 }, { "content": "pub fn get_cfg_features(options: &Options, features: &BTreeSet<String>) -> Option<syn::Attribute> {\n\n let len = features.len();\n\n\n\n if !options.features || len == 0 {\n\n None\n\n } else {\n\n let features = features\n\n .into_iter()\n\n .map(|feature| quote!( feature = #feature, ))\n\n .collect::<TokenStream>();\n\n\n\n // This is technically unneeded but it generates more idiomatic code\n\n if len == 1 {\n\n Some(syn::parse_quote!( #[cfg(#features)] ))\n\n } else {\n\n Some(syn::parse_quote!( #[cfg(all(#features))] ))\n\n }\n\n }\n\n}\n", "file_path": "crates/webidl/src/util.rs", "rank": 60, "score": 293897.2474404356 }, { "content": "fn get_remaining<'a>(data: &mut &'a [u8]) -> Option<&'a [u8]> {\n\n if data.len() == 0 {\n\n return None;\n\n }\n\n let len = ((data[0] as usize) << 0)\n\n | ((data[1] as usize) << 8)\n\n | ((data[2] as usize) << 16)\n\n | ((data[3] as usize) << 24);\n\n let (a, b) = data[4..].split_at(len);\n\n *data = b;\n\n Some(a)\n\n}\n\n\n", "file_path": "crates/cli-support/src/wit/mod.rs", "rank": 61, "score": 292397.09985544096 }, { "content": "fn add_features(features: &mut BTreeSet<String>, ty: &impl TraverseType) {\n\n ty.traverse_type(&mut |ident| {\n\n let ident = ident.to_string();\n\n\n\n if !BUILTIN_IDENTS.contains(ident.as_str()) {\n\n features.insert(ident);\n\n }\n\n });\n\n}\n\n\n", "file_path": "crates/webidl/src/generator.rs", "rank": 62, "score": 288682.2244547859 }, { "content": "/// Generates Rust source code with #[wasm_bindgen] annotations.\n\n///\n\n/// * Reads WebIDL files in `from`\n\n/// * Generates Rust source code in the directory `to`\n\n/// * `options.features` indicates whether everything is gated by features or\n\n/// not\n\n///\n\n/// If features are enabled, returns a string that should be appended to\n\n/// `Cargo.toml` which lists all the known features.\n\npub fn generate(from: &Path, to: &Path, options: Options) -> Result<String> {\n\n let generate_features = options.features;\n\n\n\n let source = read_source_from_path(&from.join(\"enabled\"))?;\n\n let unstable_source = read_source_from_path(&from.join(\"unstable\"))?;\n\n\n\n let features = parse_webidl(generate_features, source, unstable_source)?;\n\n\n\n if to.exists() {\n\n fs::remove_dir_all(&to).context(\"Removing features directory\")?;\n\n }\n\n\n\n fs::create_dir_all(&to).context(\"Creating features directory\")?;\n\n\n\n for (name, feature) in features.iter() {\n\n let out_file_path = to.join(format!(\"gen_{}.rs\", name));\n\n\n\n fs::write(&out_file_path, &feature.code)?;\n\n\n\n rustfmt(&out_file_path, name)?;\n", "file_path": "crates/webidl/src/lib.rs", "rank": 63, "score": 288237.5719949956 }, { "content": "#[wasm_bindgen]\n\npub fn return_named_struct(inner: u32) -> ExportedNamedStruct {\n\n ExportedNamedStruct { inner }\n\n}\n\n\n\n#[wasm_bindgen]\n\npub struct ExportedTupleStruct(pub u32, pub u32);\n\n\n", "file_path": "examples/guide-supported-types-examples/src/exported_types.rs", "rank": 64, "score": 287346.9348458325 }, { "content": "fn delete_synthetic_global(module: &mut Module, name: &str) -> Result<u32, Error> {\n\n let id = match delete_synthetic_export(module, name)? {\n\n walrus::ExportItem::Global(g) => g,\n\n _ => bail!(\"`{}` must be a global\", name),\n\n };\n\n let g = match module.globals.get(id).kind {\n\n walrus::GlobalKind::Local(g) => g,\n\n walrus::GlobalKind::Import(_) => bail!(\"`{}` must not be an imported global\", name),\n\n };\n\n match g {\n\n InitExpr::Value(Value::I32(v)) => Ok(v as u32),\n\n _ => bail!(\"`{}` was not an `i32` constant\", name),\n\n }\n\n}\n\n\n", "file_path": "crates/threads-xform/src/lib.rs", "rank": 65, "score": 283650.6033441423 }, { "content": "fn verify_schema_matches<'a>(data: &'a [u8]) -> Result<Option<&'a str>, Error> {\n\n macro_rules! bad {\n\n () => {\n\n bail!(\"failed to decode what looked like wasm-bindgen data\")\n\n };\n\n }\n\n let data = match str::from_utf8(data) {\n\n Ok(s) => s,\n\n Err(_) => bad!(),\n\n };\n\n log::debug!(\"found version specifier {}\", data);\n\n if !data.starts_with(\"{\") || !data.ends_with(\"}\") {\n\n bad!()\n\n }\n\n let needle = \"\\\"schema_version\\\":\\\"\";\n\n let rest = match data.find(needle) {\n\n Some(i) => &data[i + needle.len()..],\n\n None => bad!(),\n\n };\n\n let their_schema_version = match rest.find(\"\\\"\") {\n", "file_path": "crates/cli-support/src/wit/mod.rs", "rank": 66, "score": 281819.8858955392 }, { "content": "#[wasm_bindgen]\n\npub fn do_string_roundtrip(s: String) -> String {\n\n s\n\n}\n\n\n", "file_path": "tests/wasm/simple.rs", "rank": 67, "score": 281013.79158825544 }, { "content": "fn delete_synthetic_func(module: &mut Module, name: &str) -> Result<FunctionId, Error> {\n\n match delete_synthetic_export(module, name)? {\n\n walrus::ExportItem::Function(f) => Ok(f),\n\n _ => bail!(\"`{}` must be a function\", name),\n\n }\n\n}\n\n\n", "file_path": "crates/threads-xform/src/lib.rs", "rank": 68, "score": 280768.1539624334 }, { "content": "fn delete_synthetic_export(module: &mut Module, name: &str) -> Result<ExportItem, Error> {\n\n let item = module\n\n .exports\n\n .iter()\n\n .find(|e| e.name == name)\n\n .ok_or_else(|| anyhow!(\"failed to find `{}`\", name))?;\n\n let ret = item.item;\n\n let id = item.id();\n\n module.exports.delete(id);\n\n Ok(ret)\n\n}\n\n\n", "file_path": "crates/threads-xform/src/lib.rs", "rank": 69, "score": 280768.15396243345 }, { "content": "/// Search for an attribute by name in some webidl object's attributes.\n\nfn has_named_attribute(list: Option<&ExtendedAttributeList>, attribute: &str) -> bool {\n\n let list = match list {\n\n Some(list) => list,\n\n None => return false,\n\n };\n\n list.body.list.iter().any(|attr| match attr {\n\n ExtendedAttribute::NoArgs(name) => (name.0).0 == attribute,\n\n _ => false,\n\n })\n\n}\n\n\n", "file_path": "crates/webidl/src/util.rs", "rank": 70, "score": 280205.1151106845 }, { "content": "#[wasm_bindgen]\n\npub fn return_string() -> String {\n\n \"hello\".into()\n\n}\n\n\n", "file_path": "examples/guide-supported-types-examples/src/string.rs", "rank": 71, "score": 278534.9459651738 }, { "content": "#[inline]\n\npub fn intern(s: &str) -> &str {\n\n #[cfg(feature = \"enable-interning\")]\n\n intern_str(s);\n\n\n\n s\n\n}\n\n\n\n\n\n/// Removes a Rust string from the intern cache.\n\n///\n\n/// This does the opposite of the [`intern`](fn.intern.html) function.\n\n///\n\n/// If the [`intern`](fn.intern.html) function is called again then it will re-intern the string.\n", "file_path": "src/cache/intern.rs", "rank": 72, "score": 278090.99290167674 }, { "content": "#[cold]\n\n#[inline(never)]\n\npub fn throw_str(s: &str) -> ! {\n\n unsafe {\n\n __wbindgen_throw(s.as_ptr(), s.len());\n\n }\n\n}\n\n\n\n/// Rethrow a JS exception\n\n///\n\n/// This function will throw a JS exception with the JS value provided. This\n\n/// function will not return and the wasm stack will be popped until the point\n\n/// of entry of wasm itself.\n\n///\n\n/// Note that it is very easy to leak memory with this function because this\n\n/// function, unlike `panic!` on other platforms, **will not run destructors**.\n\n/// It's recommended to return a `Result` where possible to avoid the worry of\n\n/// leaks.\n", "file_path": "src/lib.rs", "rank": 73, "score": 275682.7751412069 }, { "content": "#[wasm_bindgen]\n\npub fn option_class_some() -> Option<OptionClass> {\n\n Some(OptionClass(3))\n\n}\n\n\n", "file_path": "tests/wasm/classes.rs", "rank": 74, "score": 273258.76225421 }, { "content": "#[wasm_bindgen]\n\npub fn simple_str(_a: &str) {}\n\n\n", "file_path": "tests/wasm/simple.rs", "rank": 75, "score": 273079.7753013248 }, { "content": "#[wasm_bindgen]\n\npub fn annotated() -> String {\n\n String::new()\n\n}\n\n\n\n/// annotated struct type\n\n#[wasm_bindgen]\n\npub struct Annotated {\n\n a: String,\n\n /// annotated struct field b\n\n pub b: u32,\n\n /// annotated struct field c\n\n #[wasm_bindgen(readonly)]\n\n pub c: u32,\n\n d: u32,\n\n}\n\n\n\n#[wasm_bindgen]\n\nimpl Annotated {\n\n /// annotated struct constructor\n\n #[wasm_bindgen(constructor)]\n", "file_path": "tests/wasm/comments.rs", "rank": 76, "score": 272633.98141191655 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u8_zero() -> Option<u8> {\n\n Some(0)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 77, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_f64_one() -> Option<f64> {\n\n Some(1f64)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 78, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_f32_zero() -> Option<f32> {\n\n Some(0f32)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 79, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_i64_zero() -> Option<i64> {\n\n Some(0)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 80, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_i16_max() -> Option<i16> {\n\n Some(i16::max_value())\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 81, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_isize_min() -> Option<isize> {\n\n Some(isize::min_value())\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 82, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_f64_zero() -> Option<f64> {\n\n Some(0f64)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 83, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_isize_zero() -> Option<isize> {\n\n Some(0)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 84, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u8_one() -> Option<u8> {\n\n Some(1)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 85, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_i16_min() -> Option<i16> {\n\n Some(i16::min_value())\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 86, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_i16_one() -> Option<i16> {\n\n Some(1)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 87, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_f32_one() -> Option<f32> {\n\n Some(1f32)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 88, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_i64_one() -> Option<i64> {\n\n Some(1)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 89, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_isize_one() -> Option<isize> {\n\n Some(1)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 90, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u32_max() -> Option<u32> {\n\n Some(u32::max_value())\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 91, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_isize_max() -> Option<isize> {\n\n Some(isize::max_value())\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 92, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_i16_zero() -> Option<i16> {\n\n Some(0)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 93, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u32_zero() -> Option<u32> {\n\n Some(0)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 94, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u8_min() -> Option<u8> {\n\n Some(u8::min_value())\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 95, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u32_one() -> Option<u32> {\n\n Some(1)\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 96, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_i64_min() -> Option<i64> {\n\n Some(i64::min_value())\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 97, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u32_min() -> Option<u32> {\n\n Some(u32::min_value())\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 98, "score": 271006.6801743709 }, { "content": "#[wasm_bindgen]\n\npub fn optional_u8_max() -> Option<u8> {\n\n Some(u8::max_value())\n\n}\n\n\n", "file_path": "tests/wasm/optional_primitives.rs", "rank": 99, "score": 271006.6801743709 } ]
Rust
circlemud_ffi_client/src/slack_descriptor.rs
medwards/CircleMUD
f24154b52cceec7df93d750816c4aea7e332cc29
use std::collections::{HashMap, HashSet}; use std::sync::mpsc; use std::sync::Mutex; use std::thread; use slack_morphism::prelude::*; use slack_morphism_hyper::*; use hyper::service::{make_service_fn, service_fn}; use hyper::{Body, Request, Response}; use log::*; use tokio::runtime::Runtime; use std::sync::Arc; use crate::descriptor; use crate::descriptor::Descriptor; pub struct SlackDescriptorManager { server: thread::JoinHandle<Result<(), Box<dyn std::error::Error + Send + Sync>>>, bot_token: SlackApiTokenValue, descriptors: HashMap<String, Box<dyn descriptor::Descriptor>>, new_descriptors: mpsc::Receiver<SlackDescriptor>, } impl SlackDescriptorManager { pub fn new(signing_secret: &str, bot_token: SlackApiTokenValue) -> Self { let (new_descriptors_send, new_descriptors) = mpsc::channel(); SlackDescriptorManager { server: SlackDescriptorManager::launch_server( signing_secret.to_owned(), SlackApiToken::new(bot_token.clone()), new_descriptors_send, ), bot_token: bot_token, descriptors: HashMap::new(), new_descriptors: new_descriptors, } } fn launch_server( signing_secret: String, bot_token: SlackApiToken, new_descriptors_sender: mpsc::Sender<SlackDescriptor>, ) -> thread::JoinHandle<Result<(), Box<dyn std::error::Error + Send + Sync>>> { thread::spawn(|| { let runtime = Runtime::new().expect("Unable to create Runtime"); info!("Launching Slack Event API callback server"); runtime.block_on(async { init_log()?; let hyper_connector = SlackClientHyperConnector::new(); let client: Arc<SlackHyperClient> = Arc::new(SlackClient::new(hyper_connector)); create_server(client, signing_secret, bot_token, new_descriptors_sender).await }) }) } } impl descriptor::DescriptorManager for SlackDescriptorManager { fn get_new_descriptor(&mut self) -> Option<descriptor::DescriptorId> { match self.new_descriptors.try_recv() { Ok(descriptor) => { let ret = descriptor.identifier().clone(); self.descriptors .insert(ret.identifier.clone(), Box::new(descriptor)); Some(ret) } Err(mpsc::TryRecvError::Empty) => None, Err(_) => panic!("Channel for receiving new SlackDescriptors unexpectedly closed"), } } fn get_descriptor( &mut self, descriptor: &descriptor::DescriptorId, ) -> Option<&mut Box<dyn descriptor::Descriptor>> { self.descriptors.get_mut(&descriptor.identifier) } fn close_descriptor(&mut self, descriptor: &descriptor::DescriptorId) { info!("closing {:#?}", descriptor); self.descriptors.remove(&descriptor.identifier); } } async fn push_events_handler( event: SlackPushEvent, _client: Arc<SlackHyperClient>, bot_token: &SlackApiToken, new_descriptors_sender: Arc<Mutex<mpsc::Sender<SlackDescriptor>>>, message_senders: Arc<Mutex<HashMap<String, mpsc::Sender<SlackMessageContent>>>>, ) { info!("{}", display_push_event(&event)); debug!("{:#?}", event); if let SlackPushEvent::EventCallback(callback) = event { if let SlackEventCallbackBody::Message(message) = callback.event { if let Some(channel_type) = message.origin.channel_type { if channel_type == SlackChannelType("im".to_owned()) && message.sender.bot_id.is_none() { let mut message_senders = message_senders .lock() .expect("Unable to get lock for senders hashmap"); if let Some(channel) = message.origin.channel { let key = channel.to_string(); if !message_senders.contains_key(&key) { insert_new_session_for_channel( channel.clone(), bot_token.clone(), new_descriptors_sender, &mut message_senders, ); info!("New Events connection from {:?}", channel); } else if let Some(content) = message.content { match message_senders .get(&key) .expect("Sender went missing") .send(content.clone()) { Ok(()) => info!("Sent event from {:?} to descriptor", channel), Err(e) => { insert_new_session_for_channel( channel.clone(), bot_token.clone(), new_descriptors_sender, &mut message_senders, ); info!( "Old events connection failed for {:?}, creating a new one", channel ) } } } } } } } } } fn insert_new_session_for_channel( channel: SlackChannelId, bot_token: SlackApiToken, new_descriptors_sender: Arc<Mutex<mpsc::Sender<SlackDescriptor>>>, message_senders: &mut HashMap<String, mpsc::Sender<SlackMessageContent>>, ) { let (sender, receiver) = mpsc::channel(); message_senders.insert(channel.to_string(), sender); new_descriptors_sender .lock() .expect("Unable to lock SlackDescriptor sender") .send(SlackDescriptor::new(channel.clone(), receiver, bot_token)) .expect(&format!( "Unable to send new SlackDescriptor {:?} to SlackDescriptorManager", channel )); } fn display_push_event(event: &SlackPushEvent) -> String { match event { SlackPushEvent::EventCallback(cb) => match &cb.event { SlackEventCallbackBody::Message(message) => format!( "{:?} {:?} {:?} - had text content {:?}", message.origin.channel, message.origin.channel_type, message.subtype, message .content .as_ref() .and_then(|c| c.text.as_ref()) .is_some() ), _ => format!("unexpected event"), }, SlackPushEvent::AppRateLimited(event) => { format!("AppRateLimited event {:?}", event.minute_rate_limited) } _ => "unexpected push event".to_owned(), } } fn test_error_handler( err: Box<dyn std::error::Error + Send + Sync>, _client: Arc<SlackHyperClient>, ) { println!("{:#?}", err); } async fn create_server( client: Arc<SlackHyperClient>, signing_secret: String, bot_token: SlackApiToken, new_descriptors_sender: mpsc::Sender<SlackDescriptor>, ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { let addr = std::env::var("SLACK_SOCKET_ADDR") .unwrap_or("127.0.0.1:8000".to_owned()) .parse() .expect("Invalid SLACK_SOCKET_ADDR provided"); info!("Loading server: {}", addr); async fn your_others_routes( _req: Request<Body>, ) -> Result<Response<Body>, Box<dyn std::error::Error + Send + Sync>> { Response::builder() .body("Hey, this is a default users route handler".into()) .map_err(|e| e.into()) } let push_events_config = Arc::new(SlackPushEventsListenerConfig::new(signing_secret)); let message_senders = Arc::new(Mutex::new(HashMap::new())); let new_descriptors_sender = Arc::new(Mutex::new(new_descriptors_sender)); let wrapped_push_events_handler = move |event, client| { let message_senders_clone = message_senders.clone(); let new_descriptors_sender_clone = new_descriptors_sender.clone(); let bot_token_clone = bot_token.clone(); async move { push_events_handler( event, client, &bot_token_clone, new_descriptors_sender_clone, message_senders_clone, ) .await } }; let make_svc = make_service_fn(move |_| { let thread_push_events_config = push_events_config.clone(); let wrapped_p_clone = wrapped_push_events_handler.clone(); let listener_environment = SlackClientEventsListenerEnvironment::new(client.clone()) .with_error_handler(test_error_handler); let listener = SlackClientEventsHyperListener::new(listener_environment); async move { let routes = chain_service_routes_fn( listener.push_events_service_fn(thread_push_events_config, wrapped_p_clone), your_others_routes, ); Ok::<_, Box<dyn std::error::Error + Send + Sync>>(service_fn(routes)) } }); let server = hyper::server::Server::bind(&addr).serve(make_svc); server.await.map_err(|e| { error!("Server error: {}", e); e.into() }) } fn init_log() -> Result<(), Box<dyn std::error::Error + Send + Sync>> { use fern::colors::{Color, ColoredLevelConfig}; let colors_level = ColoredLevelConfig::new() .info(Color::Green) .warn(Color::Magenta); fern::Dispatch::new() .format(move |out, message, record| { out.finish(format_args!( "{}[{}][{}] {}{}\x1B[0m", chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"), record.target(), colors_level.color(record.level()), format_args!( "\x1B[{}m", colors_level.get_color(&record.level()).to_fg_str() ), message )) }) .level(log::LevelFilter::Info) .level_for("hyper", log::LevelFilter::Info) .chain(std::io::stdout()) .apply()?; Ok(()) } pub struct SlackDescriptor { slack_bot_token: SlackApiToken, input_channel: Arc<Mutex<mpsc::Receiver<SlackMessageContent>>>, channel_id: SlackChannelId, identifier: descriptor::DescriptorId, user_id: u32, } impl SlackDescriptor { pub fn new( channel_id: SlackChannelId, input_channel: mpsc::Receiver<SlackMessageContent>, token: SlackApiToken, ) -> Self { let identifier = descriptor::DescriptorId { identifier: channel_id.to_string(), descriptor_type: "SLACK".to_owned(), }; SlackDescriptor { slack_bot_token: token, input_channel: Arc::new(Mutex::new(input_channel)), channel_id: channel_id, identifier: identifier, user_id: 0, } } async fn send_message( &self, content: SlackMessageContent, ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { info!( "chat_post_message response: {:?} to {:?}", content.clone().text.map(|mut s| s.truncate(10)), self.channel_id ); let request = SlackApiChatPostMessageRequest::new(self.channel_id.clone(), content); let hyper_connector = SlackClientHyperConnector::new(); let client = SlackClient::new(hyper_connector); let session = client.open_session(&self.slack_bot_token); session .chat_post_message(&request) .await .map(|_response| ()) } } impl descriptor::Descriptor for SlackDescriptor { fn identifier(&self) -> &descriptor::DescriptorId { &self.identifier } fn read(&mut self, buf: &mut [u8]) -> Result<usize, descriptor::ErrorCode> { let temp = self .input_channel .lock() .expect("Unable to get lock on input channel") .try_recv(); match temp { Ok(content) => { let text_raw = format!("{}\n", content.text.expect("text content was empty")); let text = text_raw.as_bytes(); if text.len() > buf.len() { println!( "ERROR got slack message bigger than buffer CircleMUD allocated ({} > {}", text.len(), buf.len() ); return Err(1); } let common_length = std::cmp::min(text.len(), buf.len()); buf[0..common_length].copy_from_slice(&text[0..common_length]); Ok(common_length) } Err(mpsc::TryRecvError::Empty) => Ok(0), Err(_) => Err(2), } } fn write(&mut self, content: String) -> Result<usize, descriptor::ErrorCode> { let runtime = tokio::runtime::Builder::new_current_thread() .enable_io() .enable_time() .build() .expect("Failed to create local runtime"); match runtime .block_on(self.send_message(SlackMessageContent::new().with_text(content.clone()))) { Ok(()) => Ok(content.as_bytes().len()), Err(_) => Err(1), } } }
use std::collections::{HashMap, HashSet}; use std::sync::mpsc; use std::sync::Mutex; use std::thread; use slack_morphism::prelude::*; use slack_morphism_hyper::*; use hyper::service::{make_service_fn, service_fn}; use hyper::{Body, Request, Response}; use log::*; use tokio::runtime::Runtime; use std::sync::Arc; use crate::descriptor; use crate::descriptor::Descriptor; pub struct SlackDescriptorManager { server: thread::JoinHandle<Result<(), Box<dyn std::error::Error + Send + Sync>>>, bot_token: SlackApiTokenValue, descriptors: HashMap<String, Box<dyn descriptor::Descriptor>>, new_descriptors: mpsc::Receiver<SlackDescriptor>, } impl SlackDescriptorManager { pub fn new(signing_secret: &str, bot_token: SlackApiTokenValue) -> Self { let (new_descriptors_send, new_descriptors) = mpsc::channel(); SlackDescriptorManager { server: SlackDescriptorManager::launch_server( signing_secret.to_owned(), SlackApiToken::new(bot_token.clone()), new_descriptors_send, ), bot_token: bot_token, descriptors: HashMap::new(), new_descriptors: new_descriptors, } } fn launch_server( signing_secret: String, bot_token: SlackApiToken, new_descriptors_sender: mpsc::Sender<SlackDescriptor>, ) -> thread::JoinHandle<Result<(), Box<dyn std::error::Error + Send + Sync>>> { thread::spawn(|| { let runtime = Runtime::new().expect("Unable to create Runtime"); info!("Launching Slack Event API callback server"); runtime.block_on(async { init_log()?; let hyper_connector = SlackClientHyperConnector::new(); let client: Arc<SlackHyperClient> = Arc::new(SlackClient::new(hyper_connector)); create_server(client, signing_secret, bot_token, new_descriptors_sender).await }) }) } } impl descriptor::DescriptorManager for SlackDescriptorManager { fn get_new_descriptor(&mut self) -> Option<descriptor::DescriptorId> { match self.new_descriptors.try_recv() { Ok(descriptor) => { let ret = descriptor.identifier().clone(); self.descriptors .insert(ret.identifier.clone(), Box::new(descriptor)); Some(ret) } Err(mpsc::TryRecvError::Empty) => None, Err(_) => panic!("Channel for receiving new SlackDescriptors unexpectedly closed"), } } fn get_descriptor( &mut self, descriptor: &descriptor::DescriptorId, ) -> Option<&mut Box<dyn descriptor::Descriptor>> { self.descriptors.get_mut(&descriptor.identifier) } fn close_descriptor(&mut self, descriptor: &descriptor::DescriptorId) { info!("closing {:#?}", descriptor); self.descriptors.remove(&descriptor.identifier); } } async fn push_events_handler( event: SlackPushEvent, _client: Arc<SlackHyperClient>, bot_token: &SlackApiToken, new_descriptors_sender: Arc<Mutex<mpsc::Sender<SlackDescriptor>>>, message_senders: Arc<Mutex<HashMap<String, mpsc::Sender<SlackMessageContent>>>>, ) { info!("{}", display_push_event(&event)); debug!("{:#?}", event); if let SlackPushEvent::EventCallback(callback) = event { if let SlackEventCallbackBody::Message(message) = callback.event { if let Some(channel_type) = message.origin.channel_type { if channel_type == SlackChannelType("im".to_owned()) && message.sender.bot_id.is_none() { let mut message_senders = message_senders .lock() .expect("Unable to get lock for senders hashmap"); if let Some(channel) = message.origin.channel { let key = channel.to_string(); if !message_senders.contains_key(&key) { insert_new_session_for_channel( channel.clone(), bot_token.clone(), new_descriptors_sender, &mut message_senders, ); info!("New Events connection from {:?}", channel); } else if let Some(content) = message.content { match message_senders .get(&key) .expect("Sender went missing") .send(content.clone()) { Ok(()) => info!("Sent event from {:?} to descriptor", channel), Err(e) => { insert_new_session_for_channel( channel.clone(), bot_token.clone(), new_descriptors_sender, &mut message_senders, ); info!( "Old events connection failed for {:?}, creating a new one", channel ) } } } } } } } } } fn insert_new_session_for_channel( channel: SlackChannelId, bot_token: SlackApiToken, new_descriptors_sender: Arc<Mutex<mpsc::Sender<SlackDescriptor>>>, message_senders: &mut HashMap<String, mpsc::Sender<SlackMessageContent>>, ) { let (sender, receiver) = mpsc::channel(); message_senders.insert(channel.to_string(), sender); new_descriptors_sender .lock() .expect("Unable to lock SlackDescriptor sender") .send(SlackDescriptor::new(channel.clone(), receiver, bot_token)) .expect(&format!( "Unable to send new SlackDescriptor {:?} to SlackDescriptorManager", channel )); } fn display_push_event(event: &SlackPushEvent) -> String { match event { SlackPushEvent::EventCallback(cb) => match &cb.event { SlackEventCallbackBody::Message(message) => format!( "{:?} {:?} {:?} - had text content {:?}", message.origin.channel, message.origin.channel_type, message.subtype, message .content .as_ref() .and_then(|c| c.text.as_ref()) .is_some() ), _ => format!("unexpected event"), }, SlackPushEvent::AppRateLimited(event) => { format!("AppRateLimited event {:?}", event.minute_rate_limited) } _ => "unexpected push event".to_owned(), } } fn test_error_handler( err: Box<dyn std::error::Error + Send + Sync>, _client: Arc<SlackHyperClient>, ) { println!("{:#?}", err); } async fn create_server( client: Arc<SlackHyperClient>, signing_secret: String, bot_token: SlackApiToken, new_descriptors_sender: mpsc::Sender<SlackDescriptor>, ) -> Result<(), Box<dyn std::er
channel: Arc::new(Mutex::new(input_channel)), channel_id: channel_id, identifier: identifier, user_id: 0, } } async fn send_message( &self, content: SlackMessageContent, ) -> Result<(), Box<dyn std::error::Error + Send + Sync>> { info!( "chat_post_message response: {:?} to {:?}", content.clone().text.map(|mut s| s.truncate(10)), self.channel_id ); let request = SlackApiChatPostMessageRequest::new(self.channel_id.clone(), content); let hyper_connector = SlackClientHyperConnector::new(); let client = SlackClient::new(hyper_connector); let session = client.open_session(&self.slack_bot_token); session .chat_post_message(&request) .await .map(|_response| ()) } } impl descriptor::Descriptor for SlackDescriptor { fn identifier(&self) -> &descriptor::DescriptorId { &self.identifier } fn read(&mut self, buf: &mut [u8]) -> Result<usize, descriptor::ErrorCode> { let temp = self .input_channel .lock() .expect("Unable to get lock on input channel") .try_recv(); match temp { Ok(content) => { let text_raw = format!("{}\n", content.text.expect("text content was empty")); let text = text_raw.as_bytes(); if text.len() > buf.len() { println!( "ERROR got slack message bigger than buffer CircleMUD allocated ({} > {}", text.len(), buf.len() ); return Err(1); } let common_length = std::cmp::min(text.len(), buf.len()); buf[0..common_length].copy_from_slice(&text[0..common_length]); Ok(common_length) } Err(mpsc::TryRecvError::Empty) => Ok(0), Err(_) => Err(2), } } fn write(&mut self, content: String) -> Result<usize, descriptor::ErrorCode> { let runtime = tokio::runtime::Builder::new_current_thread() .enable_io() .enable_time() .build() .expect("Failed to create local runtime"); match runtime .block_on(self.send_message(SlackMessageContent::new().with_text(content.clone()))) { Ok(()) => Ok(content.as_bytes().len()), Err(_) => Err(1), } } }
ror::Error + Send + Sync>> { let addr = std::env::var("SLACK_SOCKET_ADDR") .unwrap_or("127.0.0.1:8000".to_owned()) .parse() .expect("Invalid SLACK_SOCKET_ADDR provided"); info!("Loading server: {}", addr); async fn your_others_routes( _req: Request<Body>, ) -> Result<Response<Body>, Box<dyn std::error::Error + Send + Sync>> { Response::builder() .body("Hey, this is a default users route handler".into()) .map_err(|e| e.into()) } let push_events_config = Arc::new(SlackPushEventsListenerConfig::new(signing_secret)); let message_senders = Arc::new(Mutex::new(HashMap::new())); let new_descriptors_sender = Arc::new(Mutex::new(new_descriptors_sender)); let wrapped_push_events_handler = move |event, client| { let message_senders_clone = message_senders.clone(); let new_descriptors_sender_clone = new_descriptors_sender.clone(); let bot_token_clone = bot_token.clone(); async move { push_events_handler( event, client, &bot_token_clone, new_descriptors_sender_clone, message_senders_clone, ) .await } }; let make_svc = make_service_fn(move |_| { let thread_push_events_config = push_events_config.clone(); let wrapped_p_clone = wrapped_push_events_handler.clone(); let listener_environment = SlackClientEventsListenerEnvironment::new(client.clone()) .with_error_handler(test_error_handler); let listener = SlackClientEventsHyperListener::new(listener_environment); async move { let routes = chain_service_routes_fn( listener.push_events_service_fn(thread_push_events_config, wrapped_p_clone), your_others_routes, ); Ok::<_, Box<dyn std::error::Error + Send + Sync>>(service_fn(routes)) } }); let server = hyper::server::Server::bind(&addr).serve(make_svc); server.await.map_err(|e| { error!("Server error: {}", e); e.into() }) } fn init_log() -> Result<(), Box<dyn std::error::Error + Send + Sync>> { use fern::colors::{Color, ColoredLevelConfig}; let colors_level = ColoredLevelConfig::new() .info(Color::Green) .warn(Color::Magenta); fern::Dispatch::new() .format(move |out, message, record| { out.finish(format_args!( "{}[{}][{}] {}{}\x1B[0m", chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"), record.target(), colors_level.color(record.level()), format_args!( "\x1B[{}m", colors_level.get_color(&record.level()).to_fg_str() ), message )) }) .level(log::LevelFilter::Info) .level_for("hyper", log::LevelFilter::Info) .chain(std::io::stdout()) .apply()?; Ok(()) } pub struct SlackDescriptor { slack_bot_token: SlackApiToken, input_channel: Arc<Mutex<mpsc::Receiver<SlackMessageContent>>>, channel_id: SlackChannelId, identifier: descriptor::DescriptorId, user_id: u32, } impl SlackDescriptor { pub fn new( channel_id: SlackChannelId, input_channel: mpsc::Receiver<SlackMessageContent>, token: SlackApiToken, ) -> Self { let identifier = descriptor::DescriptorId { identifier: channel_id.to_string(), descriptor_type: "SLACK".to_owned(), }; SlackDescriptor { slack_bot_token: token, input_
random
[ { "content": "// TODO: this should probably just require the Read trait\n\npub trait Descriptor: Send + Sync {\n\n fn identifier(&self) -> &DescriptorId;\n\n fn read(&mut self, read_point: &mut [u8]) -> Result<usize, ErrorCode>;\n\n fn write(&mut self, content: String) -> Result<usize, ErrorCode>;\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub struct DescriptorId {\n\n pub identifier: String,\n\n pub descriptor_type: String,\n\n}\n\n\n\nimpl DescriptorId {\n\n pub fn new(identifier: &str, descriptor_type: &str) -> Self {\n\n DescriptorId {\n\n identifier: identifier.to_owned(),\n\n descriptor_type: descriptor_type.to_owned(),\n\n }\n\n }\n\n}\n", "file_path": "circlemud_ffi_client/src/descriptor.rs", "rank": 3, "score": 323027.72125884244 }, { "content": "pub trait DescriptorManager {\n\n fn get_new_descriptor(&mut self) -> Option<DescriptorId>;\n\n fn get_descriptor(&mut self, descriptor: &DescriptorId) -> Option<&mut Box<dyn Descriptor>>;\n\n fn close_descriptor(&mut self, identifier: &DescriptorId);\n\n}\n\n\n", "file_path": "circlemud_ffi_client/src/descriptor.rs", "rank": 5, "score": 185984.33646118635 }, { "content": " sh_int /*bitvector_t*/ exit_info;\t/* Exit info\t\t\t*/\n", "file_path": "src/structs.h", "rank": 23, "score": 153616.28669686106 }, { "content": "struct rent_info {\n\n int\ttime;\n\n int\trentcode;\n\n int\tnet_cost_per_diem;\n\n int\tgold;\n\n int\taccount;\n\n int\tnitems;\n\n int\tspare0;\n\n int\tspare1;\n\n int\tspare2;\n\n int\tspare3;\n\n int\tspare4;\n\n int\tspare5;\n\n int\tspare6;\n\n int\tspare7;\n", "file_path": "src/structs.h", "rank": 24, "score": 153611.39282874655 }, { "content": "struct DescriptorId* ffi_new_descriptor(struct DescriptorManager* manager, size_t type);\n", "file_path": "src/clients.h", "rank": 25, "score": 152064.32854252667 }, { "content": "void ffi_close_descriptor(struct DescriptorManager* manager, struct DescriptorId* identifier);\n", "file_path": "src/clients.h", "rank": 26, "score": 152056.83850285003 }, { "content": "struct time_info_data {\n\n int hours, day, month;\n\n sh_int year;\n", "file_path": "src/structs.h", "rank": 27, "score": 150729.60059908216 }, { "content": "struct guild_info_type {\n\n int pc_class;\n\n room_vnum guild_room;\n\n int direction;\n", "file_path": "src/structs.h", "rank": 28, "score": 150729.60059908216 }, { "content": "struct DescriptorManager* ffi_create_descriptor_manager();\n", "file_path": "src/clients.h", "rank": 29, "score": 149258.57735876276 }, { "content": "char *info = NULL;\t\t/* info page\t\t\t */\n", "file_path": "src/db.c", "rank": 30, "score": 144130.39400983314 }, { "content": " obj_vnum key;\t\t/* Key's number (-1 for no key)\t\t*/\n", "file_path": "src/structs.h", "rank": 31, "score": 142831.16361601272 }, { "content": " char\t*text;\n", "file_path": "src/structs.h", "rank": 32, "score": 142813.56698356345 }, { "content": " char\t**str;\t\t\t/* for the modify-str system\t\t*/\n", "file_path": "src/structs.h", "rank": 33, "score": 142796.76708812572 }, { "content": " socket_t\tdescriptor;\t/* file descriptor for socket\t\t*/\n", "file_path": "src/structs.h", "rank": 34, "score": 142754.63407259964 }, { "content": "int new_descriptor(socket_t s);\n", "file_path": "src/comm.c", "rank": 35, "score": 142151.1691503257 }, { "content": "extern char *info;\n", "file_path": "src/act.informative.c", "rank": 36, "score": 142116.87560503767 }, { "content": "\n\npub struct ByteStreamDescriptor {\n\n reader: Box<dyn Read + Send + Sync>,\n\n writer: Box<dyn Write + Send + Sync>,\n\n}\n\n\n\nimpl ByteStreamDescriptor {\n\n pub fn new(\n\n reader: Box<dyn std::io::Read + Send + Sync + 'static>,\n\n writer: Box<dyn std::io::Write + Send + Sync + 'static>,\n\n ) -> Self {\n\n Self {\n\n reader: reader,\n\n writer: writer,\n\n }\n\n }\n\n // no special drop impl requited\n\n}\n\n\n\nimpl Descriptor for ByteStreamDescriptor {\n", "file_path": "circlemud_ffi_client/src/descriptor.rs", "rank": 37, "score": 107394.61970291623 }, { "content": " fn identifier(&self) -> &DescriptorId {\n\n unimplemented!()\n\n }\n\n\n\n fn read(&mut self, buf: &mut [u8]) -> Result<usize, ErrorCode> {\n\n // TODO: don't silently drop the error\n\n self.reader.read(buf).map_err(|e| 0)\n\n }\n\n\n\n fn write(&mut self, content: String) -> Result<usize, ErrorCode> {\n\n self.writer.write(content.as_bytes()).map_err(|e| 1)\n\n }\n\n}\n", "file_path": "circlemud_ffi_client/src/descriptor.rs", "rank": 38, "score": 107391.36971273305 }, { "content": "use std::io::{Read, Write};\n\nuse std::sync::Arc;\n\nuse std::sync::Mutex;\n\npub type ErrorCode = usize;\n\n\n", "file_path": "circlemud_ffi_client/src/descriptor.rs", "rank": 39, "score": 107386.62821350021 }, { "content": " struct obj_data *contents; /* List of items in room */\n", "file_path": "src/structs.h", "rank": 40, "score": 104012.29830926913 }, { "content": " int\tconnected;\t\t/* mode of 'connectedness'\t\t*/\n", "file_path": "src/structs.h", "rank": 41, "score": 104010.2559071669 }, { "content": " sh_int p_locks;\n", "file_path": "src/structs.h", "rank": 42, "score": 104002.37467897912 }, { "content": "struct guild_info_type guild_info[] = {\n\n\n\n/* Midgaard */\n\n { CLASS_MAGIC_USER,\t3017,\tSCMD_SOUTH\t},\n\n { CLASS_CLERIC,\t3004,\tSCMD_NORTH\t},\n\n { CLASS_THIEF,\t3027,\tSCMD_EAST\t},\n\n { CLASS_WARRIOR,\t3021,\tSCMD_EAST\t},\n\n\n\n/* Brass Dragon */\n\n { -999 /* all */ ,\t5065,\tSCMD_WEST\t},\n\n\n\n/* this must go last -- add new guards above! */\n\n { -1, NOWHERE, -1}\n", "file_path": "src/class.c", "rank": 43, "score": 103309.48521500935 }, { "content": "struct board_info_type board_info[NUM_OF_BOARDS] = {\n\n {3099, 0, 0, LVL_GOD, LIB_ETC \"board.mort\", 0},\n\n {3098, LVL_IMMORT, LVL_IMMORT, LVL_GRGOD, LIB_ETC \"board.immort\", 0},\n\n {3097, LVL_IMMORT, LVL_FREEZE, LVL_IMPL, LIB_ETC \"board.freeze\", 0},\n\n {3096, 0, 0, LVL_IMMORT, LIB_ETC \"board.social\", 0},\n", "file_path": "src/boards.c", "rank": 44, "score": 103304.74445940451 }, { "content": "extern struct time_info_data time_info;\n", "file_path": "src/weather.c", "rank": 45, "score": 103301.78567216313 }, { "content": "extern struct spell_info_type spell_info[];\n", "file_path": "src/modify.c", "rank": 46, "score": 103301.78567216313 }, { "content": "cpp_extern const struct command_info cmd_info[] = {\n\n { \"RESERVED\", 0, 0, 0, 0 },\t/* this must be first -- for specprocs */\n\n\n\n /* directions must come before other commands but after RESERVED */\n\n { \"north\" , POS_STANDING, do_move , 0, SCMD_NORTH },\n\n { \"east\" , POS_STANDING, do_move , 0, SCMD_EAST },\n\n { \"south\" , POS_STANDING, do_move , 0, SCMD_SOUTH },\n\n { \"west\" , POS_STANDING, do_move , 0, SCMD_WEST },\n\n { \"up\" , POS_STANDING, do_move , 0, SCMD_UP },\n\n { \"down\" , POS_STANDING, do_move , 0, SCMD_DOWN },\n\n\n\n /* now, the main list */\n\n { \"at\" , POS_DEAD , do_at , LVL_IMMORT, 0 },\n\n { \"advance\" , POS_DEAD , do_advance , LVL_IMPL, 0 },\n\n { \"alias\" , POS_DEAD , do_alias , 0, 0 },\n\n { \"accuse\" , POS_SITTING , do_action , 0, 0 },\n\n { \"applaud\" , POS_RESTING , do_action , 0, 0 },\n\n { \"assist\" , POS_FIGHTING, do_assist , 1, 0 },\n\n { \"ask\" , POS_RESTING , do_spec_comm, 0, SCMD_ASK },\n\n { \"auction\" , POS_SLEEPING, do_gen_comm , 0, SCMD_AUCTION },\n\n { \"autoexit\" , POS_DEAD , do_gen_tog , 0, SCMD_AUTOEXIT },\n\n\n\n { \"bounce\" , POS_STANDING, do_action , 0, 0 },\n\n { \"backstab\" , POS_STANDING, do_backstab , 1, 0 },\n\n { \"ban\" , POS_DEAD , do_ban , LVL_GRGOD, 0 },\n\n { \"balance\" , POS_STANDING, do_not_here , 1, 0 },\n\n { \"bash\" , POS_FIGHTING, do_bash , 1, 0 },\n\n { \"beg\" , POS_RESTING , do_action , 0, 0 },\n\n { \"bleed\" , POS_RESTING , do_action , 0, 0 },\n\n { \"blush\" , POS_RESTING , do_action , 0, 0 },\n\n { \"bow\" , POS_STANDING, do_action , 0, 0 },\n\n { \"brb\" , POS_RESTING , do_action , 0, 0 },\n\n { \"brief\" , POS_DEAD , do_gen_tog , 0, SCMD_BRIEF },\n\n { \"burp\" , POS_RESTING , do_action , 0, 0 },\n\n { \"buy\" , POS_STANDING, do_not_here , 0, 0 },\n\n { \"bug\" , POS_DEAD , do_gen_write, 0, SCMD_BUG },\n\n\n\n { \"cast\" , POS_SITTING , do_cast , 1, 0 },\n\n { \"cackle\" , POS_RESTING , do_action , 0, 0 },\n\n { \"check\" , POS_STANDING, do_not_here , 1, 0 },\n\n { \"chuckle\" , POS_RESTING , do_action , 0, 0 },\n\n { \"clap\" , POS_RESTING , do_action , 0, 0 },\n\n { \"clear\" , POS_DEAD , do_gen_ps , 0, SCMD_CLEAR },\n\n { \"close\" , POS_SITTING , do_gen_door , 0, SCMD_CLOSE },\n\n { \"cls\" , POS_DEAD , do_gen_ps , 0, SCMD_CLEAR },\n\n { \"consider\" , POS_RESTING , do_consider , 0, 0 },\n\n { \"color\" , POS_DEAD , do_color , 0, 0 },\n\n { \"comfort\" , POS_RESTING , do_action , 0, 0 },\n\n { \"comb\" , POS_RESTING , do_action , 0, 0 },\n\n { \"commands\" , POS_DEAD , do_commands , 0, SCMD_COMMANDS },\n\n { \"compact\" , POS_DEAD , do_gen_tog , 0, SCMD_COMPACT },\n\n { \"cough\" , POS_RESTING , do_action , 0, 0 },\n\n { \"credits\" , POS_DEAD , do_gen_ps , 0, SCMD_CREDITS },\n\n { \"cringe\" , POS_RESTING , do_action , 0, 0 },\n\n { \"cry\" , POS_RESTING , do_action , 0, 0 },\n\n { \"cuddle\" , POS_RESTING , do_action , 0, 0 },\n\n { \"curse\" , POS_RESTING , do_action , 0, 0 },\n\n { \"curtsey\" , POS_STANDING, do_action , 0, 0 },\n\n\n\n { \"dance\" , POS_STANDING, do_action , 0, 0 },\n\n { \"date\" , POS_DEAD , do_date , LVL_IMMORT, SCMD_DATE },\n\n { \"daydream\" , POS_SLEEPING, do_action , 0, 0 },\n\n { \"dc\" , POS_DEAD , do_dc , LVL_GOD, 0 },\n\n { \"deposit\" , POS_STANDING, do_not_here , 1, 0 },\n\n { \"diagnose\" , POS_RESTING , do_diagnose , 0, 0 },\n\n { \"display\" , POS_DEAD , do_display , 0, 0 },\n\n { \"donate\" , POS_RESTING , do_drop , 0, SCMD_DONATE },\n\n { \"drink\" , POS_RESTING , do_drink , 0, SCMD_DRINK },\n\n { \"drop\" , POS_RESTING , do_drop , 0, SCMD_DROP },\n\n { \"drool\" , POS_RESTING , do_action , 0, 0 },\n\n\n\n { \"eat\" , POS_RESTING , do_eat , 0, SCMD_EAT },\n\n { \"echo\" , POS_SLEEPING, do_echo , LVL_IMMORT, SCMD_ECHO },\n\n { \"emote\" , POS_RESTING , do_echo , 1, SCMD_EMOTE },\n\n { \":\" , POS_RESTING, do_echo , 1, SCMD_EMOTE },\n\n { \"embrace\" , POS_STANDING, do_action , 0, 0 },\n\n { \"enter\" , POS_STANDING, do_enter , 0, 0 },\n\n { \"equipment\", POS_SLEEPING, do_equipment, 0, 0 },\n\n { \"exits\" , POS_RESTING , do_exits , 0, 0 },\n\n { \"examine\" , POS_SITTING , do_examine , 0, 0 },\n\n\n\n { \"force\" , POS_SLEEPING, do_force , LVL_GOD, 0 },\n\n { \"fart\" , POS_RESTING , do_action , 0, 0 },\n\n { \"fill\" , POS_STANDING, do_pour , 0, SCMD_FILL },\n\n { \"flee\" , POS_FIGHTING, do_flee , 1, 0 },\n\n { \"flip\" , POS_STANDING, do_action , 0, 0 },\n\n { \"flirt\" , POS_RESTING , do_action , 0, 0 },\n\n { \"follow\" , POS_RESTING , do_follow , 0, 0 },\n\n { \"fondle\" , POS_RESTING , do_action , 0, 0 },\n\n { \"freeze\" , POS_DEAD , do_wizutil , LVL_FREEZE, SCMD_FREEZE },\n\n { \"french\" , POS_RESTING , do_action , 0, 0 },\n\n { \"frown\" , POS_RESTING , do_action , 0, 0 },\n\n { \"fume\" , POS_RESTING , do_action , 0, 0 },\n\n\n\n { \"get\" , POS_RESTING , do_get , 0, 0 },\n\n { \"gasp\" , POS_RESTING , do_action , 0, 0 },\n\n { \"gecho\" , POS_DEAD , do_gecho , LVL_GOD, 0 },\n\n { \"give\" , POS_RESTING , do_give , 0, 0 },\n\n { \"giggle\" , POS_RESTING , do_action , 0, 0 },\n\n { \"glare\" , POS_RESTING , do_action , 0, 0 },\n\n { \"goto\" , POS_SLEEPING, do_goto , LVL_IMMORT, 0 },\n\n { \"gold\" , POS_RESTING , do_gold , 0, 0 },\n\n { \"gossip\" , POS_SLEEPING, do_gen_comm , 0, SCMD_GOSSIP },\n\n { \"group\" , POS_RESTING , do_group , 1, 0 },\n\n { \"grab\" , POS_RESTING , do_grab , 0, 0 },\n\n { \"grats\" , POS_SLEEPING, do_gen_comm , 0, SCMD_GRATZ },\n\n { \"greet\" , POS_RESTING , do_action , 0, 0 },\n\n { \"grin\" , POS_RESTING , do_action , 0, 0 },\n\n { \"groan\" , POS_RESTING , do_action , 0, 0 },\n\n { \"grope\" , POS_RESTING , do_action , 0, 0 },\n\n { \"grovel\" , POS_RESTING , do_action , 0, 0 },\n\n { \"growl\" , POS_RESTING , do_action , 0, 0 },\n\n { \"gsay\" , POS_SLEEPING, do_gsay , 0, 0 },\n\n { \"gtell\" , POS_SLEEPING, do_gsay , 0, 0 },\n\n\n\n { \"help\" , POS_DEAD , do_help , 0, 0 },\n\n { \"handbook\" , POS_DEAD , do_gen_ps , LVL_IMMORT, SCMD_HANDBOOK },\n\n { \"hcontrol\" , POS_DEAD , do_hcontrol , LVL_GRGOD, 0 },\n\n { \"hiccup\" , POS_RESTING , do_action , 0, 0 },\n\n { \"hide\" , POS_RESTING , do_hide , 1, 0 },\n\n { \"hit\" , POS_FIGHTING, do_hit , 0, SCMD_HIT },\n\n { \"hold\" , POS_RESTING , do_grab , 1, 0 },\n\n { \"holler\" , POS_RESTING , do_gen_comm , 1, SCMD_HOLLER },\n\n { \"holylight\", POS_DEAD , do_gen_tog , LVL_IMMORT, SCMD_HOLYLIGHT },\n\n { \"hop\" , POS_RESTING , do_action , 0, 0 },\n\n { \"house\" , POS_RESTING , do_house , 0, 0 },\n\n { \"hug\" , POS_RESTING , do_action , 0, 0 },\n\n\n\n { \"inventory\", POS_DEAD , do_inventory, 0, 0 },\n\n { \"idea\" , POS_DEAD , do_gen_write, 0, SCMD_IDEA },\n\n { \"imotd\" , POS_DEAD , do_gen_ps , LVL_IMMORT, SCMD_IMOTD },\n\n { \"immlist\" , POS_DEAD , do_gen_ps , 0, SCMD_IMMLIST },\n\n { \"info\" , POS_SLEEPING, do_gen_ps , 0, SCMD_INFO },\n\n { \"insult\" , POS_RESTING , do_insult , 0, 0 },\n\n { \"invis\" , POS_DEAD , do_invis , LVL_IMMORT, 0 },\n\n\n\n { \"junk\" , POS_RESTING , do_drop , 0, SCMD_JUNK },\n\n\n\n { \"kill\" , POS_FIGHTING, do_kill , 0, 0 },\n\n { \"kick\" , POS_FIGHTING, do_kick , 1, 0 },\n\n { \"kiss\" , POS_RESTING , do_action , 0, 0 },\n\n\n\n { \"look\" , POS_RESTING , do_look , 0, SCMD_LOOK },\n\n { \"laugh\" , POS_RESTING , do_action , 0, 0 },\n\n { \"last\" , POS_DEAD , do_last , LVL_GOD, 0 },\n\n { \"leave\" , POS_STANDING, do_leave , 0, 0 },\n\n { \"levels\" , POS_DEAD , do_levels , 0, 0 },\n\n { \"list\" , POS_STANDING, do_not_here , 0, 0 },\n\n { \"lick\" , POS_RESTING , do_action , 0, 0 },\n\n { \"lock\" , POS_SITTING , do_gen_door , 0, SCMD_LOCK },\n\n { \"load\" , POS_DEAD , do_load , LVL_GOD, 0 },\n\n { \"love\" , POS_RESTING , do_action , 0, 0 },\n\n\n\n { \"moan\" , POS_RESTING , do_action , 0, 0 },\n\n { \"motd\" , POS_DEAD , do_gen_ps , 0, SCMD_MOTD },\n\n { \"mail\" , POS_STANDING, do_not_here , 1, 0 },\n\n { \"massage\" , POS_RESTING , do_action , 0, 0 },\n\n { \"mute\" , POS_DEAD , do_wizutil , LVL_GOD, SCMD_SQUELCH },\n\n { \"murder\" , POS_FIGHTING, do_hit , 0, SCMD_MURDER },\n\n\n\n { \"news\" , POS_SLEEPING, do_gen_ps , 0, SCMD_NEWS },\n\n { \"nibble\" , POS_RESTING , do_action , 0, 0 },\n\n { \"nod\" , POS_RESTING , do_action , 0, 0 },\n\n { \"noauction\", POS_DEAD , do_gen_tog , 0, SCMD_NOAUCTION },\n\n { \"nogossip\" , POS_DEAD , do_gen_tog , 0, SCMD_NOGOSSIP },\n\n { \"nograts\" , POS_DEAD , do_gen_tog , 0, SCMD_NOGRATZ },\n\n { \"nohassle\" , POS_DEAD , do_gen_tog , LVL_IMMORT, SCMD_NOHASSLE },\n\n { \"norepeat\" , POS_DEAD , do_gen_tog , 0, SCMD_NOREPEAT },\n\n { \"noshout\" , POS_SLEEPING, do_gen_tog , 1, SCMD_DEAF },\n\n { \"nosummon\" , POS_DEAD , do_gen_tog , 1, SCMD_NOSUMMON },\n\n { \"notell\" , POS_DEAD , do_gen_tog , 1, SCMD_NOTELL },\n\n { \"notitle\" , POS_DEAD , do_wizutil , LVL_GOD, SCMD_NOTITLE },\n\n { \"nowiz\" , POS_DEAD , do_gen_tog , LVL_IMMORT, SCMD_NOWIZ },\n\n { \"nudge\" , POS_RESTING , do_action , 0, 0 },\n\n { \"nuzzle\" , POS_RESTING , do_action , 0, 0 },\n\n\n\n { \"olc\" , POS_DEAD , do_olc , LVL_IMPL, 0 },\n\n { \"order\" , POS_RESTING , do_order , 1, 0 },\n\n { \"offer\" , POS_STANDING, do_not_here , 1, 0 },\n\n { \"open\" , POS_SITTING , do_gen_door , 0, SCMD_OPEN },\n\n\n\n { \"put\" , POS_RESTING , do_put , 0, 0 },\n\n { \"pat\" , POS_RESTING , do_action , 0, 0 },\n\n { \"page\" , POS_DEAD , do_page , LVL_GOD, 0 },\n\n { \"pardon\" , POS_DEAD , do_wizutil , LVL_GOD, SCMD_PARDON },\n\n { \"peer\" , POS_RESTING , do_action , 0, 0 },\n\n { \"pick\" , POS_STANDING, do_gen_door , 1, SCMD_PICK },\n\n { \"point\" , POS_RESTING , do_action , 0, 0 },\n\n { \"poke\" , POS_RESTING , do_action , 0, 0 },\n\n { \"policy\" , POS_DEAD , do_gen_ps , 0, SCMD_POLICIES },\n\n { \"ponder\" , POS_RESTING , do_action , 0, 0 },\n\n { \"poofin\" , POS_DEAD , do_poofset , LVL_IMMORT, SCMD_POOFIN },\n\n { \"poofout\" , POS_DEAD , do_poofset , LVL_IMMORT, SCMD_POOFOUT },\n\n { \"pour\" , POS_STANDING, do_pour , 0, SCMD_POUR },\n\n { \"pout\" , POS_RESTING , do_action , 0, 0 },\n\n { \"prompt\" , POS_DEAD , do_display , 0, 0 },\n\n { \"practice\" , POS_RESTING , do_practice , 1, 0 },\n\n { \"pray\" , POS_SITTING , do_action , 0, 0 },\n\n { \"puke\" , POS_RESTING , do_action , 0, 0 },\n\n { \"punch\" , POS_RESTING , do_action , 0, 0 },\n\n { \"purr\" , POS_RESTING , do_action , 0, 0 },\n\n { \"purge\" , POS_DEAD , do_purge , LVL_GOD, 0 },\n\n\n\n { \"quaff\" , POS_RESTING , do_use , 0, SCMD_QUAFF },\n\n { \"qecho\" , POS_DEAD , do_qcomm , LVL_IMMORT, SCMD_QECHO },\n\n { \"quest\" , POS_DEAD , do_gen_tog , 0, SCMD_QUEST },\n\n { \"qui\" , POS_DEAD , do_quit , 0, 0 },\n\n { \"quit\" , POS_DEAD , do_quit , 0, SCMD_QUIT },\n\n { \"qsay\" , POS_RESTING , do_qcomm , 0, SCMD_QSAY },\n\n\n\n { \"reply\" , POS_SLEEPING, do_reply , 0, 0 },\n\n { \"rest\" , POS_RESTING , do_rest , 0, 0 },\n\n { \"read\" , POS_RESTING , do_look , 0, SCMD_READ },\n\n { \"reload\" , POS_DEAD , do_reboot , LVL_IMPL, 0 },\n\n { \"recite\" , POS_RESTING , do_use , 0, SCMD_RECITE },\n\n { \"receive\" , POS_STANDING, do_not_here , 1, 0 },\n\n { \"remove\" , POS_RESTING , do_remove , 0, 0 },\n\n { \"rent\" , POS_STANDING, do_not_here , 1, 0 },\n\n { \"report\" , POS_RESTING , do_report , 0, 0 },\n\n { \"reroll\" , POS_DEAD , do_wizutil , LVL_GRGOD, SCMD_REROLL },\n\n { \"rescue\" , POS_FIGHTING, do_rescue , 1, 0 },\n\n { \"restore\" , POS_DEAD , do_restore , LVL_GOD, 0 },\n\n { \"return\" , POS_DEAD , do_return , 0, 0 },\n\n { \"roll\" , POS_RESTING , do_action , 0, 0 },\n\n { \"roomflags\", POS_DEAD , do_gen_tog , LVL_IMMORT, SCMD_ROOMFLAGS },\n\n { \"ruffle\" , POS_STANDING, do_action , 0, 0 },\n\n\n\n { \"say\" , POS_RESTING , do_say , 0, 0 },\n\n { \"'\" , POS_RESTING , do_say , 0, 0 },\n\n { \"save\" , POS_SLEEPING, do_save , 0, 0 },\n\n { \"score\" , POS_DEAD , do_score , 0, 0 },\n\n { \"scream\" , POS_RESTING , do_action , 0, 0 },\n\n { \"sell\" , POS_STANDING, do_not_here , 0, 0 },\n\n { \"send\" , POS_SLEEPING, do_send , LVL_GOD, 0 },\n\n { \"set\" , POS_DEAD , do_set , LVL_GOD, 0 },\n\n { \"shout\" , POS_RESTING , do_gen_comm , 0, SCMD_SHOUT },\n\n { \"shake\" , POS_RESTING , do_action , 0, 0 },\n\n { \"shiver\" , POS_RESTING , do_action , 0, 0 },\n\n { \"show\" , POS_DEAD , do_show , LVL_IMMORT, 0 },\n\n { \"shrug\" , POS_RESTING , do_action , 0, 0 },\n\n { \"shutdow\" , POS_DEAD , do_shutdown , LVL_IMPL, 0 },\n\n { \"shutdown\" , POS_DEAD , do_shutdown , LVL_IMPL, SCMD_SHUTDOWN },\n\n { \"sigh\" , POS_RESTING , do_action , 0, 0 },\n\n { \"sing\" , POS_RESTING , do_action , 0, 0 },\n\n { \"sip\" , POS_RESTING , do_drink , 0, SCMD_SIP },\n\n { \"sit\" , POS_RESTING , do_sit , 0, 0 },\n\n { \"skillset\" , POS_SLEEPING, do_skillset , LVL_GRGOD, 0 },\n\n { \"sleep\" , POS_SLEEPING, do_sleep , 0, 0 },\n\n { \"slap\" , POS_RESTING , do_action , 0, 0 },\n\n { \"slowns\" , POS_DEAD , do_gen_tog , LVL_IMPL, SCMD_SLOWNS },\n\n { \"smile\" , POS_RESTING , do_action , 0, 0 },\n\n { \"smirk\" , POS_RESTING , do_action , 0, 0 },\n\n { \"snicker\" , POS_RESTING , do_action , 0, 0 },\n\n { \"snap\" , POS_RESTING , do_action , 0, 0 },\n\n { \"snarl\" , POS_RESTING , do_action , 0, 0 },\n\n { \"sneeze\" , POS_RESTING , do_action , 0, 0 },\n\n { \"sneak\" , POS_STANDING, do_sneak , 1, 0 },\n\n { \"sniff\" , POS_RESTING , do_action , 0, 0 },\n\n { \"snore\" , POS_SLEEPING, do_action , 0, 0 },\n\n { \"snowball\" , POS_STANDING, do_action , LVL_IMMORT, 0 },\n\n { \"snoop\" , POS_DEAD , do_snoop , LVL_GOD, 0 },\n\n { \"snuggle\" , POS_RESTING , do_action , 0, 0 },\n\n { \"socials\" , POS_DEAD , do_commands , 0, SCMD_SOCIALS },\n\n { \"split\" , POS_SITTING , do_split , 1, 0 },\n\n { \"spank\" , POS_RESTING , do_action , 0, 0 },\n\n { \"spit\" , POS_STANDING, do_action , 0, 0 },\n\n { \"squeeze\" , POS_RESTING , do_action , 0, 0 },\n\n { \"stand\" , POS_RESTING , do_stand , 0, 0 },\n\n { \"stare\" , POS_RESTING , do_action , 0, 0 },\n\n { \"stat\" , POS_DEAD , do_stat , LVL_IMMORT, 0 },\n\n { \"steal\" , POS_STANDING, do_steal , 1, 0 },\n\n { \"steam\" , POS_RESTING , do_action , 0, 0 },\n\n { \"stroke\" , POS_RESTING , do_action , 0, 0 },\n\n { \"strut\" , POS_STANDING, do_action , 0, 0 },\n\n { \"sulk\" , POS_RESTING , do_action , 0, 0 },\n\n { \"switch\" , POS_DEAD , do_switch , LVL_GRGOD, 0 },\n\n { \"syslog\" , POS_DEAD , do_syslog , LVL_IMMORT, 0 },\n\n\n\n { \"tell\" , POS_DEAD , do_tell , 0, 0 },\n\n { \"tackle\" , POS_RESTING , do_action , 0, 0 },\n\n { \"take\" , POS_RESTING , do_get , 0, 0 },\n\n { \"tango\" , POS_STANDING, do_action , 0, 0 },\n\n { \"taunt\" , POS_RESTING , do_action , 0, 0 },\n\n { \"taste\" , POS_RESTING , do_eat , 0, SCMD_TASTE },\n\n { \"teleport\" , POS_DEAD , do_teleport , LVL_GOD, 0 },\n\n { \"thank\" , POS_RESTING , do_action , 0, 0 },\n\n { \"think\" , POS_RESTING , do_action , 0, 0 },\n\n { \"thaw\" , POS_DEAD , do_wizutil , LVL_FREEZE, SCMD_THAW },\n\n { \"title\" , POS_DEAD , do_title , 0, 0 },\n\n { \"tickle\" , POS_RESTING , do_action , 0, 0 },\n\n { \"time\" , POS_DEAD , do_time , 0, 0 },\n\n { \"toggle\" , POS_DEAD , do_toggle , 0, 0 },\n\n { \"track\" , POS_STANDING, do_track , 0, 0 },\n\n { \"trackthru\", POS_DEAD , do_gen_tog , LVL_IMPL, SCMD_TRACK },\n\n { \"transfer\" , POS_SLEEPING, do_trans , LVL_GOD, 0 },\n\n { \"twiddle\" , POS_RESTING , do_action , 0, 0 },\n\n { \"typo\" , POS_DEAD , do_gen_write, 0, SCMD_TYPO },\n\n\n\n { \"unlock\" , POS_SITTING , do_gen_door , 0, SCMD_UNLOCK },\n\n { \"ungroup\" , POS_DEAD , do_ungroup , 0, 0 },\n\n { \"unban\" , POS_DEAD , do_unban , LVL_GRGOD, 0 },\n\n { \"unaffect\" , POS_DEAD , do_wizutil , LVL_GOD, SCMD_UNAFFECT },\n\n { \"uptime\" , POS_DEAD , do_date , LVL_IMMORT, SCMD_UPTIME },\n\n { \"use\" , POS_SITTING , do_use , 1, SCMD_USE },\n\n { \"users\" , POS_DEAD , do_users , LVL_IMMORT, 0 },\n\n\n\n { \"value\" , POS_STANDING, do_not_here , 0, 0 },\n\n { \"version\" , POS_DEAD , do_gen_ps , 0, SCMD_VERSION },\n\n { \"visible\" , POS_RESTING , do_visible , 1, 0 },\n\n { \"vnum\" , POS_DEAD , do_vnum , LVL_IMMORT, 0 },\n\n { \"vstat\" , POS_DEAD , do_vstat , LVL_IMMORT, 0 },\n\n\n\n { \"wake\" , POS_SLEEPING, do_wake , 0, 0 },\n\n { \"wave\" , POS_RESTING , do_action , 0, 0 },\n\n { \"wear\" , POS_RESTING , do_wear , 0, 0 },\n\n { \"weather\" , POS_RESTING , do_weather , 0, 0 },\n\n { \"who\" , POS_DEAD , do_who , 0, 0 },\n\n { \"whoami\" , POS_DEAD , do_gen_ps , 0, SCMD_WHOAMI },\n\n { \"where\" , POS_RESTING , do_where , 1, 0 },\n\n { \"whisper\" , POS_RESTING , do_spec_comm, 0, SCMD_WHISPER },\n\n { \"whine\" , POS_RESTING , do_action , 0, 0 },\n\n { \"whistle\" , POS_RESTING , do_action , 0, 0 },\n\n { \"wield\" , POS_RESTING , do_wield , 0, 0 },\n\n { \"wiggle\" , POS_STANDING, do_action , 0, 0 },\n\n { \"wimpy\" , POS_DEAD , do_wimpy , 0, 0 },\n\n { \"wink\" , POS_RESTING , do_action , 0, 0 },\n\n { \"withdraw\" , POS_STANDING, do_not_here , 1, 0 },\n\n { \"wiznet\" , POS_DEAD , do_wiznet , LVL_IMMORT, 0 },\n\n { \";\" , POS_DEAD , do_wiznet , LVL_IMMORT, 0 },\n\n { \"wizhelp\" , POS_SLEEPING, do_commands , LVL_IMMORT, SCMD_WIZHELP },\n\n { \"wizlist\" , POS_DEAD , do_gen_ps , 0, SCMD_WIZLIST },\n\n { \"wizlock\" , POS_DEAD , do_wizlock , LVL_IMPL, 0 },\n\n { \"worship\" , POS_RESTING , do_action , 0, 0 },\n\n { \"write\" , POS_STANDING, do_write , 1, 0 },\n\n\n\n { \"yawn\" , POS_RESTING , do_action , 0, 0 },\n\n { \"yodel\" , POS_RESTING , do_action , 0, 0 },\n\n\n\n { \"zreset\" , POS_DEAD , do_zreset , LVL_GRGOD, 0 },\n\n\n", "file_path": "src/interpreter.c", "rank": 47, "score": 103301.78567216313 }, { "content": "extern struct time_info_data time_info;\n", "file_path": "src/shop.c", "rank": 48, "score": 103301.78567216313 }, { "content": "struct time_info_data time_info;/* the infomation about the time */\n", "file_path": "src/db.c", "rank": 49, "score": 103301.78567216313 }, { "content": "extern const struct command_info cmd_info[];\n", "file_path": "src/interpreter.h", "rank": 50, "score": 103301.78567216313 }, { "content": "extern struct spell_info_type spell_info[];\n", "file_path": "src/act.other.c", "rank": 51, "score": 103301.78567216313 }, { "content": "extern struct spell_info_type spell_info[];\n", "file_path": "src/magic.c", "rank": 52, "score": 103301.78567216313 }, { "content": "#define INFO_FILE\tLIB_TEXT\"info\"\t\t/* for INFO\t\t*/\n", "file_path": "src/db.h", "rank": 53, "score": 103301.78567216313 }, { "content": "extern struct time_data time_info;\n", "file_path": "src/utils.c", "rank": 54, "score": 103301.78567216313 }, { "content": "extern struct time_info_data time_info;\n", "file_path": "src/castle.c", "rank": 55, "score": 103301.78567216313 }, { "content": "extern struct weather_data weather_info;\n", "file_path": "src/utils.h", "rank": 56, "score": 103301.78567216313 }, { "content": "#define SCMD_INFO 0\n", "file_path": "src/interpreter.h", "rank": 57, "score": 103301.78567216313 }, { "content": "struct command_info {\n\n const char *command;\n\n byte minimum_position;\n\n void\t(*command_pointer)\n\n\t (struct char_data *ch, char *argument, int cmd, int subcmd);\n\n sh_int minimum_level;\n\n int\tsubcmd;\n", "file_path": "src/interpreter.h", "rank": 58, "score": 103301.78567216313 }, { "content": "struct weather_data weather_info;\t/* the infomation about the weather */\n", "file_path": "src/db.c", "rank": 59, "score": 103301.78567216313 }, { "content": "extern struct time_info_data time_info;\t\t/* In db.c */\n", "file_path": "src/comm.c", "rank": 60, "score": 103301.78567216313 }, { "content": "#define APPLY_NONE 0\t/* No effect\t\t\t*/\n", "file_path": "src/structs.h", "rank": 61, "score": 102029.90475179254 }, { "content": "#define LVL_IMPL\t34\n", "file_path": "src/structs.h", "rank": 62, "score": 102029.30292187209 }, { "content": " struct msg_type miss_msg;\t/* messages when miss\t\t\t*/\n", "file_path": "src/structs.h", "rank": 63, "score": 102024.60320850511 }, { "content": " struct obj_data *next_content; /* For 'contains' lists */\n", "file_path": "src/structs.h", "rank": 64, "score": 102023.69201119732 }, { "content": " sh_int miss_att;\n", "file_path": "src/structs.h", "rank": 65, "score": 102019.73282494582 }, { "content": "#define USE_AUTOEQ\t0\t/* TRUE/FALSE aren't defined yet. */\n\n\n\n\n", "file_path": "src/structs.h", "rank": 66, "score": 102017.79828487302 }, { "content": "#define ITEM_KEY 18\t\t/* Item is a key\t\t*/\n", "file_path": "src/structs.h", "rank": 67, "score": 102017.79828487302 }, { "content": "#define CONT_LOCKED (1 << 3)\t/* Container is locked\t\t*/\n\n\n\n\n", "file_path": "src/structs.h", "rank": 68, "score": 102013.95811033365 }, { "content": "#define EX_LOCKED\t\t(1 << 2) /* The door is locked\t*/\n", "file_path": "src/structs.h", "rank": 69, "score": 102013.95811033365 }, { "content": "#define CONT_CLOSED (1 << 2)\t/* Container is closed\t\t*/\n", "file_path": "src/structs.h", "rank": 70, "score": 102004.41203378455 }, { "content": "#define CON_CLOSE\t 1\t/* User disconnect, remove character.\t*/\n", "file_path": "src/structs.h", "rank": 71, "score": 102004.41203378455 }, { "content": "#define EX_CLOSED\t\t(1 << 1) /* The door is closed\t*/\n", "file_path": "src/structs.h", "rank": 72, "score": 102004.41203378455 }, { "content": " size_t max_str;\t /*\t\t-\t\t\t*/\n", "file_path": "src/structs.h", "rank": 73, "score": 102001.4643708332 }, { "content": " sbyte str_add; /* 000 - 100 if strength 18 */\n", "file_path": "src/structs.h", "rank": 74, "score": 101996.78699845415 }, { "content": "#define APPLY_STR 1\t/* Apply to strength\t\t*/\n", "file_path": "src/structs.h", "rank": 75, "score": 101996.78699845415 }, { "content": "struct message_list {\n\n int\ta_type;\t\t\t/* Attack type\t\t\t\t*/\n\n int\tnumber_of_attacks;\t/* How many attack messages to chose from. */\n\n struct message_type *msg;\t/* List of messages.\t\t\t*/\n", "file_path": "src/structs.h", "rank": 76, "score": 101958.58530632031 }, { "content": "#define MAX_MESSAGES\t\t60\n", "file_path": "src/structs.h", "rank": 77, "score": 101958.58530632031 }, { "content": "struct message_type {\n\n struct msg_type die_msg;\t/* messages when death\t\t\t*/\n\n struct msg_type miss_msg;\t/* messages when miss\t\t\t*/\n\n struct msg_type hit_msg;\t/* messages when hit\t\t\t*/\n\n struct msg_type god_msg;\t/* messages when hit on god\t\t*/\n\n struct message_type *next;\t/* to next messages of this kind.\t*/\n", "file_path": "src/structs.h", "rank": 78, "score": 101958.58530632031 }, { "content": " struct DescriptorId *ffi_descriptor; /* identifier for ffi client */\n", "file_path": "src/structs.h", "rank": 79, "score": 101947.90876522781 }, { "content": " int client_type; /* value from clients.h indicating which client protocol to use */\n", "file_path": "src/structs.h", "rank": 80, "score": 101945.16731285115 }, { "content": "struct descriptor_data {\n\n int client_type; /* value from clients.h indicating which client protocol to use */\n\n socket_t\tdescriptor;\t/* file descriptor for socket\t\t*/\n\n struct DescriptorId *ffi_descriptor; /* identifier for ffi client */\n\n char\thost[HOST_LENGTH+1];\t/* hostname\t\t\t\t*/\n\n byte\tbad_pws;\t\t/* number of bad pw attemps this login\t*/\n\n byte idle_tics;\t\t/* tics idle at password prompt\t\t*/\n\n int\tconnected;\t\t/* mode of 'connectedness'\t\t*/\n\n int\tdesc_num;\t\t/* unique num assigned to desc\t\t*/\n\n time_t login_time;\t\t/* when the person connected\t\t*/\n\n char *showstr_head;\t\t/* for keeping track of an internal str\t*/\n\n char **showstr_vector;\t/* for paging through texts\t\t*/\n\n int showstr_count;\t\t/* number of pages to page through\t*/\n\n int showstr_page;\t\t/* which page are we currently showing?\t*/\n\n char\t**str;\t\t\t/* for the modify-str system\t\t*/\n\n size_t max_str;\t /*\t\t-\t\t\t*/\n\n long\tmail_to;\t\t/* name for mail system\t\t\t*/\n\n int\thas_prompt;\t\t/* is the user at a prompt? */\n\n char\tinbuf[MAX_RAW_INPUT_LENGTH]; /* buffer for raw input\t\t*/\n\n char\tlast_input[MAX_INPUT_LENGTH]; /* the last input\t\t\t*/\n\n char small_outbuf[SMALL_BUFSIZE]; /* standard output buffer\t\t*/\n\n char *output;\t\t/* ptr to the current output buffer\t*/\n\n char **history;\t\t/* History of commands, for ! mostly.\t*/\n\n int\thistory_pos;\t\t/* Circular array position.\t\t*/\n\n int bufptr;\t\t\t/* ptr to end of current output\t\t*/\n\n int\tbufspace;\t\t/* space left in the output buffer\t*/\n\n struct txt_block *large_outbuf; /* ptr to large buffer, if we need it */\n\n struct txt_q input;\t\t/* q of unprocessed input\t\t*/\n\n struct char_data *character;\t/* linked to char\t\t\t*/\n\n struct char_data *original;\t/* original char if switched\t\t*/\n\n struct descriptor_data *snooping; /* Who is this char snooping\t*/\n\n struct descriptor_data *snoop_by; /* And who is snooping this char\t*/\n\n struct descriptor_data *next; /* link to next descriptor\t\t*/\n", "file_path": "src/structs.h", "rank": 81, "score": 101943.24825543631 }, { "content": "int new_ffi_descriptor(struct DescriptorManager* manager, int ffi_id)\n\n{\n\n int sockets_connected = 0;\n\n static int last_desc = 0;\t/* last descriptor number */\n\n struct descriptor_data *newd;\n\n struct hostent *from;\n\n\n\n struct DescriptorId *descriptor_id = ffi_new_descriptor(manager, CLIENT_FFI);\n\n if (descriptor_id == NULL) {\n\n //log(\"Failed to get new FFI descriptor\");\n\n return (1);\n\n }\n\n\n\n /* make sure we have room for it */\n\n for (newd = descriptor_list; newd; newd = newd->next)\n\n sockets_connected++;\n\n if (sockets_connected >= max_players) {\n\n // TODO: this used to write and close socket before initializing all the buffers, come back to this after introducing a real ffi_new_client impl\n\n ffi_write_to_descriptor(manager, descriptor_id, \"Sorry, CircleMUD is full right now... please try again later!\\r\\n\");\n\n ffi_close_descriptor(manager, descriptor_id);\n\n return (0);\n\n }\n\n\n\n /* create a new descriptor */\n\n CREATE(newd, struct descriptor_data, 1);\n\n\n\n /* find the sitename */\n\n strncpy(newd->host, \"FfiClient:ffi_id\", 10);\n\n\n\n // TODO: this used to write and close socket before initializing all the buffers, come back to this after introducing a real ffi_new_client impl\n\n /* determine if the site is banned */\n\n if (isbanned(newd->host) == BAN_ALL) {\n\n ffi_close_descriptor(manager, descriptor_id);\n\n mudlog(CMP, LVL_GOD, TRUE, \"Connection attempt denied from [%s]\", newd->host);\n\n free(newd);\n\n return (0);\n\n }\n\n#if 0\n\n /*\n\n * Log new connections - probably unnecessary, but you may want it.\n\n * Note that your immortals may wonder if they see a connection from\n\n * your site, but you are wizinvis upon login.\n\n */\n\n mudlog(CMP, LVL_GOD, FALSE, \"New connection from [%s]\", newd->host);\n\n#endif\n\n\n\n /* initialize descriptor data */\n\n newd->client_type = CLIENT_FFI;\n\n newd->descriptor = NULL;\n\n newd->ffi_descriptor = descriptor_id;\n\n newd->idle_tics = 0;\n\n newd->output = newd->small_outbuf;\n\n newd->bufspace = SMALL_BUFSIZE - 1;\n\n newd->login_time = time(0);\n\n *newd->output = '\\0';\n\n newd->bufptr = 0;\n\n newd->has_prompt = 1; /* prompt is part of greetings */\n\n STATE(newd) = CON_GET_NAME;\n\n\n\n /*\n\n * This isn't exactly optimal but allows us to make a design choice.\n\n * Do we embed the history in descriptor_data or keep it dynamically\n\n * allocated and allow a user defined history size?\n\n */\n\n CREATE(newd->history, char *, HISTORY_SIZE);\n\n\n\n if (++last_desc == 1000)\n\n last_desc = 1;\n\n newd->desc_num = last_desc;\n\n\n\n /* prepend to list */\n\n newd->next = descriptor_list;\n\n descriptor_list = newd;\n\n\n\n write_to_output(newd, \"%s\", GREETINGS);\n\n\n\n return (0);\n", "file_path": "src/comm.c", "rank": 82, "score": 101401.75525554939 }, { "content": "void close_ffi_descriptor(struct DescriptorManager *manager, struct descriptor_data *d) {\n\n struct DescriptorId *identifier = d->ffi_descriptor;\n\n free_descriptor(d);\n\n ffi_close_descriptor(manager, identifier);\n", "file_path": "src/comm.c", "rank": 83, "score": 101389.97574130353 }, { "content": " sh_int exit_info;\t\t/* Exit info */\n", "file_path": "src/util/wld2html.c", "rank": 84, "score": 101368.70946020434 }, { "content": "struct board_info_type {\n\n obj_vnum vnum;\t/* vnum of this board */\n\n int\tread_lvl;\t/* min level to read messages on this board */\n\n int\twrite_lvl;\t/* min level to write messages on this board */\n\n int\tremove_lvl;\t/* min level to remove messages from this board */\n\n char\tfilename[50];\t/* file to save this board to */\n\n obj_rnum rnum;\t/* rnum of this board */\n", "file_path": "src/boards.h", "rank": 85, "score": 101363.81559208984 }, { "content": "struct spell_info_type {\n\n byte min_position;\t/* Position for caster\t */\n\n int mana_min;\t/* Min amount of mana used by a spell (highest lev) */\n\n int mana_max;\t/* Max amount of mana used by a spell (lowest lev) */\n\n int mana_change;\t/* Change in mana used by spell from lev to lev */\n\n\n\n int min_level[NUM_CLASSES];\n\n int routines;\n\n byte violent;\n\n int targets; /* See below for use with TAR_XXX */\n\n const char *name;\t/* Input size not limited. Originates from string constants. */\n\n const char *wear_off_msg;\t/* Input size not limited. Originates from string constants. */\n", "file_path": "src/spells.h", "rank": 86, "score": 101363.81559208984 }, { "content": "struct spell_info_type spell_info[TOP_SPELL_DEFINE + 1];\n", "file_path": "src/spell_parser.c", "rank": 87, "score": 101363.81559208984 }, { "content": "extern int *cmd_sort_info;\n", "file_path": "src/comm.c", "rank": 88, "score": 101363.81559208984 }, { "content": "extern struct time_info_data time_info;\n", "file_path": "src/spec_procs.c", "rank": 89, "score": 101363.81559208984 }, { "content": "extern struct guild_info_type guild_info[];\n", "file_path": "src/spec_procs.c", "rank": 90, "score": 101363.81559208984 }, { "content": "extern struct spell_info_type spell_info[];\n", "file_path": "src/spec_procs.c", "rank": 91, "score": 101363.81559208984 }, { "content": "extern struct time_info_data time_info;\n", "file_path": "src/act.informative.c", "rank": 92, "score": 101363.81559208984 }, { "content": "char *get_text(char *fname)\n\n{\n\n static char t[MAX_FILESIZE];\n\n char tmp[LINEBUF_SIZE + 2];\n\n FILE *fl = NULL;\n\n\n\n *t = '\\0';\n\n\n\n if (!strcmp(fname, \"-\")) {\n\n fl = stdin;\n\n if (isatty(STDIN_FILENO))\n\n fprintf(stderr, \"Enter sign text; terminate with Ctrl-D.\\n\");\n\n } else {\n\n if (!(fl = fopen(fname, \"r\"))) {\n\n perror(fname);\n\n exit(1);\n\n }\n\n }\n\n\n\n while (fgets(tmp, LINEBUF_SIZE, fl)) {\n\n if (strlen(tmp) + strlen(t) < MAX_FILESIZE - 1)\n\n strcat(t, strcat(tmp, \"\\r\"));\n\n else {\n\n fprintf(stderr, \"String too long. Truncated.\\n\");\n\n break;\n\n }\n\n }\n\n\n\n return (t);\n", "file_path": "src/util/sign.c", "rank": 93, "score": 101346.43655565871 }, { "content": "void get_one_line(FILE *fl, char *buf);\n", "file_path": "src/db.c", "rank": 94, "score": 101340.82095326683 }, { "content": "size_t ffi_read_from_descriptor(struct DescriptorManager* manager, struct DescriptorId* identifier, char* read_point, size_t space_left);\n", "file_path": "src/clients.h", "rank": 95, "score": 101331.84246772627 }, { "content": "size_t ffi_write_to_descriptor(struct DescriptorManager* manager, struct DescriptorId* identifier, const char* content);\n", "file_path": "src/clients.h", "rank": 96, "score": 101331.84246772627 }, { "content": "}\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn ffi_new_descriptor(\n\n manager: *mut slack_descriptor::SlackDescriptorManager,\n\n descriptor_type: usize,\n\n) -> *const DescriptorId {\n\n unsafe {\n\n match manager\n\n .as_mut()\n\n .expect(\"manager was null\")\n\n .get_new_descriptor()\n\n {\n\n Some(descriptor) => {\n\n println!(\"Found new descriptor: {:?}\", descriptor);\n\n Box::into_raw(Box::new(descriptor.clone()))\n\n }\n\n None => std::ptr::null(),\n\n }\n\n }\n", "file_path": "circlemud_ffi_client/src/lib.rs", "rank": 97, "score": 22.46086220082428 }, { "content": "use std::ffi::CStr;\n\nuse std::os::raw::c_char;\n\nuse std::ptr::copy_nonoverlapping;\n\n\n\nmod descriptor;\n\nuse descriptor::{/*ByteStreamDescriptor,*/ DescriptorId, DescriptorManager, ErrorCode};\n\nmod slack_descriptor;\n\n\n\n// \"descriptor\" will be an identifier the ffi client can use to determine which user this request\n\n// is for, for now just handle it all globally on stdin/stdout\n\n\n\n#[no_mangle]\n\npub extern \"C\" fn ffi_create_descriptor_manager() -> *mut slack_descriptor::SlackDescriptorManager {\n\n Box::into_raw(Box::new(slack_descriptor::SlackDescriptorManager::new(\n\n &std::env::var(\"SLACK_SIGNING_SECRET\")\n\n .expect(\"SLACK_SIGNING_SECRET not in the environment\"),\n\n std::env::var(\"SLACK_BOT_TOKEN\")\n\n .expect(\"SLACK_BOT_TOKEN not in the environment\")\n\n .into(),\n\n )))\n", "file_path": "circlemud_ffi_client/src/lib.rs", "rank": 98, "score": 19.032151782376424 }, { "content": "# `circlemud_ffi_client`\n\n\n\nStatic library built into Circlemud to provide new clients.\n\n\n\n`ByteStreamDescriptor` supports any input/output streams that implement `Read` or `Write` (useful for stdin/stdout based clients or for testing with arbitrary buffers).\n\n\n\n`SlackBotDescriptor` supports integration with a Slack bot.\n\n\n\n## Setting up a Slack Bot\n\n\n\nSet up a new app in slack https://api.slack.com/bot-users#creating-bot-user\n\n\n\nAdd scopes? https://api.slack.com/apps/YOURAPPID/oauth\n\nI have\n\nchat:write\n\nim:history\n\nim:read\n\nim:write\n\n\n\nEnable Event Subscriptions https://api.slack.com/apps/YOURAPPID/event-subscriptions?\n\nadd message.im\n", "file_path": "circlemud_ffi_client/README.md", "rank": 99, "score": 17.06802113115731 } ]
Rust
src/io/sequences_reader.rs
Guilucand/biloki
7c1d710bd03e797584e7f4db151d7b4c7b4433d7
use crate::io::lines_reader::LinesReader; use bstr::ByteSlice; use nix::sys::signal::{self, Signal}; use nix::unistd::Pid; use rand::Rng; use serde::Serialize; use std::fs::File; use std::hint::unreachable_unchecked; use std::intrinsics::unlikely; use std::io::{BufRead, BufReader, Read}; use std::num::Wrapping; use std::path::{Path, PathBuf}; use std::process::Command; use std::process::Stdio; use std::ptr::null_mut; use std::slice::from_raw_parts; const IDENT_STATE: usize = 0; const SEQ_STATE: usize = 1; const QUAL_STATE: usize = 2; enum FileType { Fasta, Fastq, } #[derive(Copy, Clone)] pub struct FastaSequence<'a> { pub ident: &'a [u8], pub seq: &'a [u8], pub qual: Option<&'a [u8]>, } const SEQ_LETTERS_MAPPING: [u8; 256] = { let mut lookup = [b'N'; 256]; lookup[b'A' as usize] = b'A'; lookup[b'C' as usize] = b'C'; lookup[b'G' as usize] = b'G'; lookup[b'T' as usize] = b'T'; lookup[b'a' as usize] = b'A'; lookup[b'c' as usize] = b'C'; lookup[b'g' as usize] = b'G'; lookup[b't' as usize] = b'T'; lookup }; pub struct SequencesReader; impl SequencesReader { fn normalize_sequence(seq: &mut [u8]) { for el in seq.iter_mut() { *el = SEQ_LETTERS_MAPPING[*el as usize]; } } pub fn process_file_extended<F: FnMut(FastaSequence)>( source: impl AsRef<Path>, mut func: F, remove_file: bool, ) { const FASTQ_EXTS: &[&str] = &["fq", "fastq"]; const FASTA_EXTS: &[&str] = &["fa", "fasta", "fna", "ffn"]; let mut file_type = None; let mut tmp = source.as_ref().file_name().unwrap().to_str().unwrap(); let mut path: &Path = tmp.as_ref(); while let Some(ext) = path.extension() { if FASTQ_EXTS.contains(&ext.to_str().unwrap()) { file_type = Some(FileType::Fastq); break; } if FASTA_EXTS.contains(&ext.to_str().unwrap()) { file_type = Some(FileType::Fasta); break; } tmp = &tmp[0..tmp.len() - ext.len() - 1]; path = tmp.as_ref() } match file_type { None => panic!( "Cannot recognize file type of '{}'", source.as_ref().display() ), Some(ftype) => match ftype { FileType::Fasta => { Self::process_fasta(source, func, remove_file); } FileType::Fastq => { Self::process_fastq(source, func, remove_file); } }, } } fn process_fasta( source: impl AsRef<Path>, mut func: impl FnMut(FastaSequence), remove_file: bool, ) { let mut intermediate = [Vec::new(), Vec::new()]; LinesReader::process_lines( source, |line: &[u8], finished| { if finished || (line.len() > 0 && line[0] == b'>') { if intermediate[SEQ_STATE].len() > 0 { Self::normalize_sequence(&mut intermediate[SEQ_STATE]); func(FastaSequence { ident: &intermediate[IDENT_STATE], seq: &intermediate[SEQ_STATE], qual: None, }); intermediate[SEQ_STATE].clear(); } intermediate[IDENT_STATE].clear(); intermediate[IDENT_STATE].extend_from_slice(line); } else if line.len() > 0 && line[0] == b';' { return; } else { intermediate[SEQ_STATE].extend_from_slice(line); } }, remove_file, ); } fn process_fastq( source: impl AsRef<Path>, mut func: impl FnMut(FastaSequence), remove_file: bool, ) { let mut state = IDENT_STATE; let mut skipped_plus = false; let mut intermediate = [Vec::new(), Vec::new()]; LinesReader::process_lines( source, |line: &[u8], finished| { match state { QUAL_STATE => { if !skipped_plus { skipped_plus = true; return; } Self::normalize_sequence(&mut intermediate[SEQ_STATE]); func(FastaSequence { ident: &intermediate[IDENT_STATE], seq: &intermediate[SEQ_STATE], qual: Some(line), }); skipped_plus = false; } state => { intermediate[state].clear(); intermediate[state].extend_from_slice(line); } } state = (state + 1) % 3; }, remove_file, ); } }
use crate::io::lines_reader::LinesReader; use bstr::ByteSlice; use nix::sys::signal::{self, Signal}; use nix::unistd::Pid; use rand::Rng; use serde::Serialize; use std::fs::File; use std::hint::unreachable_unchecked; use std::intrinsics::unlikely; use std::io::{BufRead, BufReader, Read}; use std::num::Wrapping; use std::path::{Path, PathBuf}; use std::process::Command; use std::process::Stdio; use std::ptr::null_mut; use std::slice::from_raw_parts; const IDENT_STATE: usize = 0; const SEQ_STATE: usize = 1; const QUAL_STATE: usize = 2; enum FileType { Fasta, Fastq, } #[derive(Copy, Clone)] pub struct FastaSequence<'a> { pub ident: &'a [u8], pub seq: &'a [u8], pub qual: Option<&'a [u8]>, } const SEQ_LETTERS_MAPPING: [u8; 256] = { let mut lookup = [b'N'; 256]; lookup[b'A' as usize] = b'A'; lookup[b'C' as usize] = b'C'; lookup[b'G' as usize] = b'G'; lookup[b'T' as usize] = b'T'; lookup[b'a' as usize] = b'A'; lookup[b'c' as usize] = b'C'; lookup[b'g' as usize] = b'G'; lookup[b't' as usize] = b'T'; lookup }; pub struct SequencesReader; impl SequencesReader { fn normalize_sequence(seq: &mut [u8]) { for el in seq.iter_mut() { *el = SEQ_LETTERS_MAPPING[*el as usize]; } } pub fn process_file_extended<F: FnMut(FastaSequence)>( source: impl AsRef<Path>, mut func: F, remove_file: bool, ) { const FASTQ_EXTS: &[&str] = &["fq", "fastq"]; const FASTA_EXTS: &[&str] = &["fa", "fasta", "fna", "ffn"]; let mut file_type = None; let mut tmp = source.as_ref().file_name().unwrap().to_str().unwrap(); let mut path: &Path = tmp.as_ref(); while let Some(ext) = path.extension() { if FASTQ_EXTS.contains(&ext.to_str().unwrap()) { file_type = Some(FileType::Fastq); break; } if FASTA_EXTS.contains(&ext.to_str().unwrap()) { file_type = Some(FileType::Fasta); break; } tmp = &tmp[0..tmp.len() - ext.len() - 1]; path = tmp.as_ref() } match file_type { None => panic!( "Cannot recognize file type of '{}'", source.as_ref().display() ), Some(ftype) => match ftype { FileType::Fasta => { Self::process_fasta(source, func, remove_file); } FileType::Fastq => { Self::process_fastq(source, func, remove_file); } }, } } fn process_fasta( source: impl AsRef<Path>, mut func: impl FnMut(FastaSequence), remove_file: bool, ) { let mut intermediate = [Vec::new(), Vec::new()]; LinesReader::process_lines(
e[IDENT_STATE], seq: &intermediate[SEQ_STATE], qual: None, }); intermediate[SEQ_STATE].clear(); } intermediate[IDENT_STATE].clear(); intermediate[IDENT_STATE].extend_from_slice(line); } else if line.len() > 0 && line[0] == b';' { return; } else { intermediate[SEQ_STATE].extend_from_slice(line); } }, remove_file, ); } fn process_fastq( source: impl AsRef<Path>, mut func: impl FnMut(FastaSequence), remove_file: bool, ) { let mut state = IDENT_STATE; let mut skipped_plus = false; let mut intermediate = [Vec::new(), Vec::new()]; LinesReader::process_lines( source, |line: &[u8], finished| { match state { QUAL_STATE => { if !skipped_plus { skipped_plus = true; return; } Self::normalize_sequence(&mut intermediate[SEQ_STATE]); func(FastaSequence { ident: &intermediate[IDENT_STATE], seq: &intermediate[SEQ_STATE], qual: Some(line), }); skipped_plus = false; } state => { intermediate[state].clear(); intermediate[state].extend_from_slice(line); } } state = (state + 1) % 3; }, remove_file, ); } }
source, |line: &[u8], finished| { if finished || (line.len() > 0 && line[0] == b'>') { if intermediate[SEQ_STATE].len() > 0 { Self::normalize_sequence(&mut intermediate[SEQ_STATE]); func(FastaSequence { ident: &intermediat
random
[ { "content": "pub fn decompress_file_buffered(file: impl AsRef<Path>, func: impl FnMut(&[u8]) -> Result<(), ()>, buf_size: usize) -> Result<(), LibdeflateError> {\n\n let mut read_file = File::open(file).unwrap();\n\n\n\n let mut input_stream = DeflateChunkedBufferInput::new(|buf| {\n\n read_file.read(buf).unwrap_or(0)\n\n }, buf_size);\n\n\n\n let mut output_stream = DeflateChunkedBufferOutput::new(func, buf_size);\n\n\n\n let mut decompressor = libdeflate_alloc_decompressor();\n\n\n\n while input_stream.ensure_length(1) {\n\n libdeflate_gzip_decompress(&mut decompressor, &mut input_stream, &mut output_stream)?\n\n }\n\n Ok(())\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn it_works() {\n\n let result = 2 + 2;\n\n assert_eq!(result, 4);\n\n }\n\n}\n", "file_path": "libdeflate-rs/src/lib.rs", "rank": 0, "score": 433342.16041108186 }, { "content": "#[inline(always)]\n\npub fn decode_varint(mut read_byte: impl FnMut() -> Option<u8>) -> Option<u64> {\n\n let mut result = 0;\n\n let mut offset = 0u32;\n\n loop {\n\n let mut value = read_byte()?;\n\n let next = (value & 0b10000000) != 0;\n\n result |= ((value & 0b1111111) as u64) << offset;\n\n if !next {\n\n break;\n\n }\n\n offset += 7;\n\n }\n\n Some(result)\n\n}\n\n\n", "file_path": "src/io/varint.rs", "rank": 1, "score": 329241.4926953735 }, { "content": "#[inline(always)]\n\n#[allow(clippy::uninit_assumed_init)]\n\npub fn encode_varint<T>(mut write_bytes: impl FnOnce(&[u8]) -> T, mut value: u64) -> T {\n\n let mut bytes: [u8; 9] = unsafe { MaybeUninit::uninit().assume_init() };\n\n let mut index = 0;\n\n while index < bytes.len() {\n\n let rem = ((value > 127) as u8) << 7;\n\n bytes[index] = (((value as u8) & 0b1111111) | rem);\n\n value >>= 7;\n\n index += 1;\n\n if value == 0 {\n\n break;\n\n }\n\n }\n\n write_bytes(&bytes[..index])\n\n}\n\n\n", "file_path": "src/io/varint.rs", "rank": 2, "score": 285970.73927121965 }, { "content": "pub fn fast_smart_radix_sort<T: Sync + Send, F: SortKey<T>, const PARALLEL: bool>(data: &mut [T]) {\n\n smart_radix_sort_::<T, F, PARALLEL, false>(data, F::KEY_BITS as u8 - RADIX_SIZE_LOG);\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/fast_smart_bucket_sort.rs", "rank": 3, "score": 278887.187875624 }, { "content": "#[inline(always)]\n\npub fn decode_varint_flags<F: FnMut() -> Option<u8>, FLAGS_COUNT: typenum::Unsigned>(\n\n mut read_byte: F,\n\n) -> Option<(u64, u8)> {\n\n let first_byte = read_byte()?;\n\n\n\n let useful_first_bits: usize = 8 - FLAGS_COUNT::to_usize();\n\n let first_byte_max_value: u8 = ((1u16 << (useful_first_bits - 1)) - 1) as u8;\n\n\n\n let flags = (((first_byte as u16) >> useful_first_bits) as u8);\n\n let mut result = (first_byte & first_byte_max_value) as u64;\n\n let mut offset = useful_first_bits - 1;\n\n let mut next = first_byte & (1 << (useful_first_bits - 1)) != 0;\n\n\n\n loop {\n\n if !next {\n\n break;\n\n }\n\n let mut value = read_byte()?;\n\n next = (value & 0b10000000) != 0;\n\n result |= ((value & 0b1111111) as u64) << offset;\n\n offset += 7;\n\n }\n\n Some((result, flags))\n\n}\n\n\n", "file_path": "src/io/varint.rs", "rank": 4, "score": 265600.82696074876 }, { "content": "pub fn build_precode_decode_table(d: &mut LibdeflateDecompressor) -> bool {\n\n /* When you change TABLEBITS, you must change ENOUGH, and vice versa! */\n\n const_assert!(PRECODE_TABLEBITS == 7 && PRECODE_ENOUGH == 128);\n\n\n\n return build_decode_table(\n\n &mut d.l.precode_decode_table,\n\n &mut d.precode_lens,\n\n DEFLATE_NUM_PRECODE_SYMS,\n\n &PRECODE_DECODE_RESULTS,\n\n PRECODE_TABLEBITS,\n\n DEFLATE_MAX_PRE_CODEWORD_LEN,\n\n d.sorted_syms.as_mut_ptr(),\n\n );\n\n}\n\n\n\n/* Build the decode table for the literal/length code. */\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 5, "score": 236389.10451140098 }, { "content": "#[inline(always)]\n\n#[allow(clippy::uninit_assumed_init)]\n\npub fn encode_varint_flags<T, F: FnOnce(&[u8]) -> T, FLAGS_COUNT: typenum::Unsigned>(\n\n mut write_bytes: F,\n\n mut value: u64,\n\n flags: u8,\n\n) -> T {\n\n let mut bytes: [u8; 10] = unsafe { MaybeUninit::uninit().assume_init() };\n\n\n\n let useful_first_bits: usize = 8 - FLAGS_COUNT::to_usize();\n\n let first_byte_max_value: u8 = ((1u16 << (useful_first_bits - 1)) - 1) as u8;\n\n\n\n let fr_rem = ((value > first_byte_max_value as u64) as u8) << (useful_first_bits - 1);\n\n\n\n bytes[0] = (((flags as u16) << useful_first_bits) as u8)\n\n | (value as u8 & first_byte_max_value)\n\n | fr_rem;\n\n\n\n value >>= (useful_first_bits - 1);\n\n let mut index = 1;\n\n\n\n while index < bytes.len() {\n", "file_path": "src/io/varint.rs", "rank": 6, "score": 227273.85771953122 }, { "content": "pub fn debug_print_allocations(dir: impl AsRef<Path>, period: Duration) {\n\n let dir = dir.as_ref().to_path_buf();\n\n std::thread::spawn(move || {\n\n IS_NESTED.store(true, Ordering::Relaxed);\n\n let mut count = 1;\n\n loop {\n\n std::thread::sleep(period);\n\n\n\n let path = dir.join(format!(\"memory-log{}.json\", count));\n\n\n\n let mut allocations: Vec<_> =\n\n ALLOCATION_INFOS.iter().map(|x| x.as_writable()).collect();\n\n\n\n allocations.sort_by(|x, y| y.max_size.cmp(&x.max_size));\n\n\n\n File::create(path)\n\n .unwrap()\n\n .write_all(to_string_pretty(&allocations).unwrap().as_bytes());\n\n\n\n count += 1;\n\n }\n\n });\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/debug_allocator.rs", "rank": 7, "score": 223414.67850617046 }, { "content": "#[inline(always)]\n\npub fn bkw_l(c: u8) -> HashIntegerType {\n\n unsafe { *BKW_LOOKUP.get_unchecked(c as usize) }\n\n}\n\n\n\npub struct CanonicalRabinKarpHashIterator<N: HashableSequence> {\n\n seq: N,\n\n rmmult: HashIntegerType,\n\n fh: HashIntegerType,\n\n rc: HashIntegerType,\n\n k_minus1: usize,\n\n}\n\n\n", "file_path": "src/hashes/base/cn_rkhash_base.rs", "rank": 8, "score": 220523.38493612214 }, { "content": "#[inline(always)]\n\npub fn fwd_l(c: u8) -> HashIntegerType {\n\n unsafe { *FWD_LOOKUP.get_unchecked(c as usize) }\n\n}\n\n\n", "file_path": "src/hashes/base/cn_rkhash_base.rs", "rank": 9, "score": 220523.38493612214 }, { "content": "#[inline(always)]\n\npub fn fwd_l(c: u8) -> HashIntegerType {\n\n unsafe { *FWD_LOOKUP.get_unchecked(c as usize) }\n\n}\n\n\n\npub struct ForwardRabinKarpHashIterator<N: HashableSequence> {\n\n seq: N,\n\n rmmult: HashIntegerType,\n\n fh: HashIntegerType,\n\n k_minus1: usize,\n\n}\n\n\n", "file_path": "src/hashes/base/fw_rkhash_base.rs", "rank": 10, "score": 220523.38493612214 }, { "content": "fn assert_reads<H: HashFunctionFactory>(read: &[u8], bucket: BucketIndexType) {\n\n // Test ***************************\n\n let K: usize = 32;\n\n\n\n if read.len() == 33 {\n\n let hashes = H::new(&read[0..K], M);\n\n let minimizer = hashes\n\n .iter()\n\n .min_by_key(|read| {\n\n H::get_full_minimizer::<{ DEFAULT_MINIMIZER_MASK }>(read.to_unextendable())\n\n })\n\n .unwrap();\n\n\n\n let hashes1 = H::new(&read[1..K + 1], M);\n\n let minimizer1 = hashes1\n\n .iter()\n\n .min_by_key(|read| {\n\n H::get_full_minimizer::<{ DEFAULT_MINIMIZER_MASK }>(read.to_unextendable())\n\n })\n\n .unwrap();\n", "file_path": "src/utils/debug_functions.rs", "rank": 11, "score": 218126.09567896748 }, { "content": "fn store_backtrace(addr: *mut u8, size: usize) {\n\n let bt: Backtrace = backtrace::Backtrace::force_capture();\n\n\n\n let bt_string = bt.to_string();\n\n\n\n let parts = bt_string.split(\" 5:\").collect::<Vec<_>>();\n\n\n\n let bt_string = parts.last().unwrap().to_string();\n\n\n\n ADDRESSES_BACKTRACE.insert(addr as usize, bt_string.clone());\n\n\n\n let info = ALLOCATION_INFOS\n\n .entry(bt_string.clone())\n\n .or_insert(AllocationInfo {\n\n bt: bt_string,\n\n current_count: AtomicUsize::new(0),\n\n current_size: AtomicUsize::new(0),\n\n max_size: AtomicUsize::new(0),\n\n total_count: AtomicUsize::new(0),\n\n });\n\n\n\n info.current_count.fetch_add(1, Ordering::Relaxed);\n\n info.current_size.fetch_add(size, Ordering::Relaxed);\n\n info.total_count.fetch_add(1, Ordering::Relaxed);\n\n info.max_size\n\n .fetch_max(info.current_size.load(Ordering::Relaxed), Ordering::Relaxed);\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/debug_allocator.rs", "rank": 12, "score": 217931.98202135676 }, { "content": "fn dealloc_backtrace(ptr: *mut u8, size: usize) {\n\n let (_, bt) = ADDRESSES_BACKTRACE.remove(&(ptr as usize)).unwrap();\n\n\n\n let aref = ALLOCATION_INFOS.get(&bt).unwrap();\n\n aref.current_count.fetch_sub(1, Ordering::Relaxed);\n\n aref.current_size.fetch_sub(size, Ordering::Relaxed);\n\n}\n\n\n\nimpl DebugAllocator {\n\n pub const fn new() -> Self {\n\n Self {\n\n default_allocator: System,\n\n }\n\n }\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/debug_allocator.rs", "rank": 13, "score": 217931.98202135676 }, { "content": "fn update_backtrace(ptr: *mut u8, new_ptr: *mut u8, diff: isize) {\n\n let (_, bt) = ADDRESSES_BACKTRACE.remove(&(ptr as usize)).unwrap();\n\n\n\n let aref = ALLOCATION_INFOS.get(&bt).unwrap();\n\n if diff > 0 {\n\n aref.current_size\n\n .fetch_add(diff as usize, Ordering::Relaxed);\n\n aref.max_size\n\n .fetch_max(aref.current_size.load(Ordering::Relaxed), Ordering::Relaxed);\n\n } else {\n\n aref.current_size\n\n .fetch_sub((-diff) as usize, Ordering::Relaxed);\n\n }\n\n\n\n ADDRESSES_BACKTRACE.insert(new_ptr as usize, bt);\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/debug_allocator.rs", "rank": 14, "score": 202931.33912528306 }, { "content": "#[inline(always)]\n\npub fn h(c: u8) -> u64 {\n\n unsafe { *H_LOOKUP.get_unchecked(c as usize) }\n\n}\n\n\n", "file_path": "src/hashes/nthash_base.rs", "rank": 15, "score": 195197.00755638065 }, { "content": "#[inline(always)]\n\npub fn rc(c: u8) -> u64 {\n\n unsafe { *RC_LOOKUP.get_unchecked(c as usize) }\n\n}\n\n\n\nconst H_LOOKUP: [u64; 256] = {\n\n let mut lookup = [1; 256];\n\n\n\n // Support compressed reads transparently\n\n lookup[0 /*b'A'*/] = HASH_A;\n\n lookup[1 /*b'C'*/] = HASH_C;\n\n lookup[2 /*b'T'*/] = HASH_T;\n\n lookup[3 /*b'G'*/] = HASH_G;\n\n lookup[4 /*b'N'*/] = 0;\n\n\n\n lookup[b'A' as usize] = HASH_A;\n\n lookup[b'C' as usize] = HASH_C;\n\n lookup[b'G' as usize] = HASH_G;\n\n lookup[b'T' as usize] = HASH_T;\n\n lookup[b'N' as usize] = 0;\n\n lookup\n", "file_path": "src/hashes/nthash_base.rs", "rank": 16, "score": 192457.11709889257 }, { "content": "fn fastexp(base: HashIntegerType, mut exp: usize) -> HashIntegerType {\n\n let mut result: HashIntegerType = 1;\n\n let mut sqv = base;\n\n\n\n while exp > 0 {\n\n if exp & 0x1 == 1 {\n\n result = result.wrapping_mul(sqv);\n\n }\n\n exp /= 2;\n\n sqv = sqv.wrapping_mul(sqv);\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/hashes/base/fw_rkhash_base.rs", "rank": 17, "score": 192210.21629922686 }, { "content": "fn fastexp(base: HashIntegerType, mut exp: usize) -> HashIntegerType {\n\n let mut result: HashIntegerType = 1;\n\n let mut sqv = base;\n\n\n\n while exp > 0 {\n\n if exp & 0x1 == 1 {\n\n result = result.wrapping_mul(sqv);\n\n }\n\n exp /= 2;\n\n sqv = sqv.wrapping_mul(sqv);\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "src/hashes/base/cn_rkhash_base.rs", "rank": 18, "score": 192210.21629922686 }, { "content": "pub fn get_memory_mode(swap_priority: usize) -> MemoryFileMode {\n\n if PREFER_MEMORY.load(Ordering::Relaxed) {\n\n MemoryFileMode::PreferMemory { swap_priority }\n\n } else {\n\n MemoryFileMode::DiskOnly\n\n }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 19, "score": 191921.65869239732 }, { "content": "pub fn cast_static_mut<T: ?Sized>(val: &mut T) -> &'static mut T {\n\n unsafe { std::mem::transmute(val) }\n\n}\n", "file_path": "src/utils/mod.rs", "rank": 20, "score": 187214.1865645406 }, { "content": "pub fn compute_best_m(k: usize) -> usize {\n\n if k < 27 {\n\n min(k - 1, ((k + 5) as f64 / 3.0).round() as usize)\n\n } else {\n\n ((k + 2) as f64 / 3.0).round() as usize\n\n }\n\n}\n\n\n\nimpl Utils {\n\n #[inline(always)]\n\n pub fn compress_base(base: u8) -> u8 {\n\n (base >> 1) & 0x3\n\n }\n\n\n\n #[inline(always)]\n\n pub fn decompress_base(cbase: u8) -> u8 {\n\n C_INV_LETTERS[cbase as usize]\n\n }\n\n\n\n #[inline]\n", "file_path": "src/utils/mod.rs", "rank": 21, "score": 177290.8913302006 }, { "content": "pub fn process_subbucket<'a, F: KmersTransformExecutorFactory>(\n\n global_data: &F::GlobalExtraData<'a>,\n\n mut bucket_stream: FileReader,\n\n tmp_ref_vec: &mut Vec<ReadRef>,\n\n tmp_data_vec: &mut Vec<u8>,\n\n executor: &mut F::ExecutorType<'a>,\n\n file_path: &Path,\n\n vecs_process_queue: &Arc<SegQueue<(PathBuf, bool)>>,\n\n can_resplit: bool,\n\n is_outlier: bool,\n\n) {\n\n while let Ok(hash) = bucket_stream.read_u16::<LittleEndian>() {\n\n let size = decode_varint(|| bucket_stream.read_u8().ok()).unwrap() as usize;\n\n\n\n tmp_data_vec.reserve(size);\n\n let len = tmp_data_vec.len();\n\n unsafe {\n\n tmp_data_vec.set_len(len + size);\n\n }\n\n bucket_stream\n", "file_path": "src/pipeline_common/kmers_transform/process_subbucket.rs", "rank": 22, "score": 176682.15501441847 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct IntermediateReadsIndex {\n\n index: Vec<u64>,\n\n}\n\n\n\npub struct IntermediateReadsWriter<T> {\n\n writer: lz4::Encoder<FileWriter>,\n\n chunk_size: u64,\n\n index: IntermediateReadsIndex,\n\n path: PathBuf,\n\n _phantom: PhantomData<T>,\n\n}\n\n\n", "file_path": "src/io/concurrent/intermediate_storage.rs", "rank": 23, "score": 164568.7136879457 }, { "content": "#[derive(Debug, Desse, DesseSized, Default)]\n\nstruct IntermediateReadsHeader {\n\n magic: [u8; 16],\n\n index_offset: u64,\n\n}\n\n\n", "file_path": "src/io/concurrent/intermediate_storage.rs", "rank": 24, "score": 164568.7136879457 }, { "content": "struct RefThreadWrapper<'a, T: ?Sized>(&'a mut T);\n\nunsafe impl<'a, T: ?Sized> Sync for RefThreadWrapper<'a, T> {}\n\nunsafe impl<'a, T: ?Sized> Send for RefThreadWrapper<'a, T> {}\n\n\n\nimpl ReadsReader {\n\n pub fn from_file(name: String) -> ReadsReader {\n\n let is_compressed = name.ends_with(\".lz4\");\n\n let file = File::open(name).unwrap();\n\n\n\n let reader: Box<dyn Read> = if is_compressed {\n\n let decoder = lz4::Decoder::new(file).unwrap();\n\n Box::new(decoder)\n\n } else {\n\n Box::new(file)\n\n };\n\n ReadsReader {\n\n reader: UnsafeCell::new(reader),\n\n }\n\n }\n\n\n\n pub fn for_each<F: FnMut(&[u8])>(&self, mut func: F) {\n\n let mut reader_cell = self.reader.uget();\n\n let reader = BufReader::new(reader_cell);\n\n for line in reader.lines() {\n\n func(line.unwrap().as_bytes());\n\n }\n\n }\n\n}\n", "file_path": "src/io/reads_reader.rs", "rank": 26, "score": 161547.98891881778 }, { "content": "pub trait BucketWriter<DataType = u8> {\n\n type ExtraData;\n\n fn write_to(&self, bucket: &mut Vec<DataType>, extra_data: &Self::ExtraData);\n\n fn get_size(&self) -> usize;\n\n}\n\n\n\nimpl<T: Copy> BucketWriter<T> for T {\n\n type ExtraData = ();\n\n\n\n #[inline(always)]\n\n fn write_to(&self, bucket: &mut Vec<T>, _extra_data: &Self::ExtraData) {\n\n bucket.push(*self);\n\n }\n\n\n\n #[inline(always)]\n\n fn get_size(&self) -> usize {\n\n 1\n\n }\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/multi_thread_buckets.rs", "rank": 27, "score": 161258.986523643 }, { "content": "pub fn debug_minimizers<H: HashFunctionFactory, R: MinimizerInputSequence, const MASK: u32>(\n\n read: R,\n\n m: usize,\n\n k: usize,\n\n) {\n\n println!(\"Debugging sequence: {}\", read.debug_to_string());\n\n\n\n let mut queue = RollingMinQueue::<H>::new(k - m);\n\n\n\n let hashes = H::new(read, m);\n\n\n\n let mut rolling_iter = queue.make_iter::<_, MASK>(hashes.iter().map(|x| x.to_unextendable()));\n\n\n\n for (idx, hash) in rolling_iter.enumerate() {\n\n println!(\n\n \"Minimizer info for kmer: {}\\nHASH: {} UNMASKED_HASH: {} FB: {} SB: {} SH: {}\",\n\n read.get_subslice(idx..(idx + k - 1)).debug_to_string(),\n\n H::get_full_minimizer::<MASK>(hash),\n\n H::get_full_minimizer::<DEFAULT_MINIMIZER_MASK>(hash),\n\n H::get_first_bucket(hash),\n\n H::get_second_bucket(hash),\n\n H::get_sorting_hash(hash),\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/utils/debug_functions.rs", "rank": 28, "score": 159371.67719373896 }, { "content": "#[inline(always)]\n\nfn fwd_nt_manual_roll(hash: ExtForwardNtHash, k: usize, out_b: u8, in_b: u8) -> ExtForwardNtHash {\n\n let res = hash.0.rotate_left(1) ^ h(in_b);\n\n ExtForwardNtHash(res ^ h(out_b).rotate_left(k as u32))\n\n}\n\n\n", "file_path": "src/hashes/fw_nthash.rs", "rank": 29, "score": 155369.28807436256 }, { "content": "#[inline(always)]\n\npub fn fill_bits_bytewise<I: DeflateInput, O: DeflateOutput>(data: &mut DecompressTempData<I, O>) {\n\n loop {\n\n if unsafe { likely(data.input_stream.ensure_length(1)) } {\n\n let mut byte = [0];\n\n unsafe {\n\n data.input_stream.read_unchecked(&mut byte);\n\n }\n\n data.bitbuf |= (byte[0] as BitBufType) << data.bitsleft;\n\n } else {\n\n data.overrun_count += 1;\n\n }\n\n data.bitsleft += 8;\n\n if data.bitsleft > BITBUF_NBITS - 8 {\n\n break;\n\n }\n\n }\n\n}\n\n\n\n/*\n\n * Fill the bitbuffer variable by reading the next word from the input buffer\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 30, "score": 150924.48457409162 }, { "content": "pub fn striped_parallel_smart_radix_sort<T: Ord + Send + Sync + Debug, F: SortKey<T>>(\n\n striped_file: &[&mut [T]],\n\n dest_buffer: &mut [T],\n\n) {\n\n let num_threads = rayon::current_num_threads();\n\n let queue = crossbeam::queue::ArrayQueue::new(num_threads);\n\n\n\n let first_shift = F::KEY_BITS as u8 - RADIX_SIZE_LOG;\n\n\n\n for i in 0..num_threads {\n\n queue.push([0; RADIX_SIZE + 1]);\n\n }\n\n\n\n striped_file.par_iter().for_each(|chunk| {\n\n let mut counts = queue.pop().unwrap();\n\n for el in chunk.iter() {\n\n counts[((F::get_shifted(el, first_shift)) as usize + 1)] += 1usize;\n\n }\n\n queue.push(counts).unwrap();\n\n });\n", "file_path": "parallel-processor-rs/src/fast_smart_bucket_sort.rs", "rank": 31, "score": 148563.82282392922 }, { "content": "pub trait ExtendableHashTraitType: Copy + Clone + Debug + Eq + Ord + Send + Sync {\n\n type HashTypeUnextendable: UnextendableHashTraitType;\n\n fn to_unextendable(self) -> Self::HashTypeUnextendable;\n\n}\n\n\n", "file_path": "src/hashes/mod.rs", "rank": 32, "score": 147100.5382471647 }, { "content": "pub fn run_query<\n\n BucketingHash: HashFunctionFactory,\n\n MergingHash: HashFunctionFactory,\n\n AssemblerColorsManager: ColorsManager,\n\n const BUCKETS_COUNT: usize,\n\n>(\n\n k: usize,\n\n m: usize,\n\n step: QuerierStartingStep,\n\n graph_input: PathBuf,\n\n query_input: PathBuf,\n\n output_file: PathBuf,\n\n temp_dir: PathBuf,\n\n threads_count: usize,\n\n) {\n\n PHASES_TIMES_MONITOR.write().init();\n\n\n\n // let global_colors_table = AssemblerColorsManager::create_colors_table(\n\n // output_file.with_extension(\"colors.dat\"),\n\n // color_names,\n", "file_path": "src/querier.rs", "rank": 33, "score": 139345.45089645902 }, { "content": "pub fn run_assembler<\n\n BucketingHash: HashFunctionFactory,\n\n MergingHash: HashFunctionFactory,\n\n AssemblerColorsManager: ColorsManager,\n\n const BUCKETS_COUNT: usize,\n\n>(\n\n k: usize,\n\n m: usize,\n\n step: AssemblerStartingStep,\n\n last_step: AssemblerStartingStep,\n\n input: Vec<PathBuf>,\n\n output_file: PathBuf,\n\n temp_dir: PathBuf,\n\n threads_count: usize,\n\n min_multiplicity: usize,\n\n loopit_number: Option<usize>,\n\n) {\n\n PHASES_TIMES_MONITOR.write().init();\n\n\n\n let color_names: Vec<_> = input\n", "file_path": "src/assembler.rs", "rank": 34, "score": 139345.45089645902 }, { "content": "pub trait HashableSequence: Clone {\n\n unsafe fn get_unchecked_cbase(&self, index: usize) -> u8;\n\n fn bases_count(&self) -> usize;\n\n}\n\n\n\nimpl HashableSequence for &[u8] {\n\n #[inline(always)]\n\n unsafe fn get_unchecked_cbase(&self, index: usize) -> u8 {\n\n *self.get_unchecked(index)\n\n }\n\n\n\n #[inline(always)]\n\n fn bases_count(&self) -> usize {\n\n self.len()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use super::ExtendableHashTraitType;\n", "file_path": "src/hashes/mod.rs", "rank": 35, "score": 137511.6401466623 }, { "content": "fn get_rmmult(k: usize) -> HashIntegerType {\n\n unsafe {\n\n static mut LAST_K: usize = 0;\n\n static mut LAST_RMMULT: HashIntegerType = 0;\n\n static CHANGE_LOCK: Mutex<()> = Mutex::const_new(parking_lot::RawMutex::INIT, ());\n\n\n\n if unsafe { unlikely(LAST_K != k) } {\n\n let rmmult = fastexp(MULTIPLIER, k - 1);\n\n let _lock = CHANGE_LOCK.lock();\n\n LAST_RMMULT = rmmult;\n\n LAST_K = k;\n\n rmmult\n\n } else {\n\n LAST_RMMULT\n\n }\n\n }\n\n}\n\n\n\nimpl<N: HashableSequence> ForwardRabinKarpHashIterator<N> {\n\n pub fn new(seq: N, k: usize) -> Result<ForwardRabinKarpHashIterator<N>, &'static str> {\n", "file_path": "src/hashes/base/fw_rkhash_base.rs", "rank": 36, "score": 136606.93882510526 }, { "content": "fn get_rmmult(k: usize) -> HashIntegerType {\n\n unsafe {\n\n static mut LAST_K: usize = 0;\n\n static mut LAST_RMMULT: HashIntegerType = 0;\n\n static CHANGE_LOCK: Mutex<()> = Mutex::const_new(parking_lot::RawMutex::INIT, ());\n\n\n\n if unsafe { unlikely(LAST_K != k) } {\n\n let rmmult = fastexp(MULTIPLIER, k - 1);\n\n let _lock = CHANGE_LOCK.lock();\n\n LAST_RMMULT = rmmult;\n\n LAST_K = k;\n\n rmmult\n\n } else {\n\n LAST_RMMULT\n\n }\n\n }\n\n}\n\n\n\nimpl<N: HashableSequence> CanonicalRabinKarpHashIterator<N> {\n\n pub fn new(seq: N, k: usize) -> Result<CanonicalRabinKarpHashIterator<N>, &'static str> {\n", "file_path": "src/hashes/base/cn_rkhash_base.rs", "rank": 37, "score": 136606.93882510526 }, { "content": "#[inline(always)]\n\nfn get_mask(k: usize) -> HashIntegerType {\n\n HashIntegerType::MAX >> ((((size_of::<HashIntegerType>() * 4) - k) * 2) as HashIntegerType)\n\n}\n\n\n\nimpl<N: HashableSequence> CanonicalSeqHashIterator<N> {\n\n pub fn new(seq: N, k: usize) -> Result<CanonicalSeqHashIterator<N>, &'static str> {\n\n if k > seq.bases_count() || k > (size_of::<HashIntegerType>() * 4) {\n\n return Err(\"K out of range!\");\n\n }\n\n\n\n let mut fh = 0;\n\n let mut bw = 0;\n\n for i in 0..(k - 1) {\n\n fh = (fh << 2) | unsafe { seq.get_unchecked_cbase(i) as HashIntegerType };\n\n bw |= unsafe { xrc(seq.get_unchecked_cbase(i) as HashIntegerType) } << (i * 2);\n\n }\n\n\n\n let mask = get_mask(k);\n\n\n\n Ok(CanonicalSeqHashIterator {\n", "file_path": "src/hashes/base/cn_seqhash_base.rs", "rank": 38, "score": 136606.93882510526 }, { "content": "#[inline(always)]\n\nfn get_mask(k: usize) -> HashIntegerType {\n\n HashIntegerType::MAX >> ((((size_of::<HashIntegerType>() * 4) - k) * 2) as HashIntegerType)\n\n}\n\n\n\nimpl<N: HashableSequence> ForwardSeqHashIterator<N> {\n\n pub fn new(seq: N, k: usize) -> Result<ForwardSeqHashIterator<N>, &'static str> {\n\n if k > seq.bases_count() || k > (size_of::<HashIntegerType>() * 4) {\n\n return Err(\"K out of range!\");\n\n }\n\n\n\n let mut fh = 0;\n\n for i in 0..(k - 1) {\n\n fh = (fh << 2) | unsafe { seq.get_unchecked_cbase(i) as HashIntegerType };\n\n }\n\n\n\n let mask = get_mask(k);\n\n\n\n Ok(ForwardSeqHashIterator {\n\n seq,\n\n mask,\n", "file_path": "src/hashes/base/fw_seqhash_base.rs", "rank": 39, "score": 136606.93882510526 }, { "content": "pub fn debug_increase() {\n\n KCOUNTER.fetch_add(1, Ordering::Relaxed);\n\n}\n\n\n", "file_path": "src/utils/debug_utils.rs", "rank": 40, "score": 135448.11396569994 }, { "content": "pub fn debug_print() {\n\n println!(\"COUNTER: {:?}\", KCOUNTER.load(Ordering::Relaxed));\n\n}\n\n\n\n#[thread_local]\n\npub static BIGGEST_BUCKET: AtomicBool = AtomicBool::new(false);\n", "file_path": "src/utils/debug_utils.rs", "rank": 41, "score": 135448.11396569994 }, { "content": "pub fn build_decode_table(\n\n decode_table: &mut [u32],\n\n lens: &[LenType],\n\n num_syms: usize,\n\n decode_results: &[u32],\n\n table_bits: usize,\n\n max_codeword_len: usize,\n\n mut sorted_syms: *mut u16,\n\n) -> bool {\n\n let mut len_counts: [u32; DEFLATE_MAX_CODEWORD_LEN + 1] = [0; DEFLATE_MAX_CODEWORD_LEN + 1];\n\n let mut offsets: [u32; DEFLATE_MAX_CODEWORD_LEN + 1] = [0; DEFLATE_MAX_CODEWORD_LEN + 1];\n\n let mut count: u32 = 0; /* num codewords remaining with this length */\n\n let mut codespace_used: u32 = 0; /* codespace used out of '2^max_codeword_len' */\n\n let mut cur_table_end: usize = 0; /* end index of current table */\n\n let mut subtable_prefix: usize = 0; /* codeword prefix of current subtable */\n\n let mut subtable_start: usize = 0; /* start index of current subtable */\n\n let mut subtable_bits: usize = 0; /* log2 of current subtable length */\n\n\n\n /* Count how many codewords have each length, including 0. */\n\n for len in 0..=max_codeword_len {\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 42, "score": 131883.1630331446 }, { "content": "pub fn minb_reader<\n\n ReadAssociatedData: SequenceExtraData,\n\n GlobalData,\n\n FileInfo: Clone + Sync + Send + Default,\n\n>(\n\n context: &MinimizerBucketingExecutionContext<ReadAssociatedData, GlobalData>,\n\n manager: ObjectsPoolManager<MinimizerBucketingQueueData<FileInfo>, (PathBuf, FileInfo)>,\n\n) {\n\n while let Some((input, file_info)) = manager.recv_obj() {\n\n let mut data = manager.allocate();\n\n data.file_info = file_info.clone();\n\n data.start_read_index = 0;\n\n\n\n let mut read_index = 0;\n\n\n\n context.current_file.fetch_add(1, Ordering::Relaxed);\n\n\n\n SequencesReader::process_file_extended(\n\n input,\n\n |x| {\n", "file_path": "src/pipeline_common/minimizer_bucketing/reader.rs", "rank": 43, "score": 131883.1630331446 }, { "content": "pub fn build_litlen_decode_table(\n\n d: &mut LibdeflateDecompressor,\n\n num_litlen_syms: usize,\n\n _num_offset_syms: usize,\n\n) -> bool {\n\n /* When you change TABLEBITS, you must change ENOUGH, and vice versa! */\n\n const_assert!(LITLEN_TABLEBITS == 10 && LITLEN_ENOUGH == 1334);\n\n\n\n return build_decode_table(\n\n &mut d.litlen_decode_table,\n\n &mut d.l.lens[..],\n\n num_litlen_syms,\n\n &LITLEN_DECODE_RESULTS,\n\n LITLEN_TABLEBITS,\n\n DEFLATE_MAX_LITLEN_CODEWORD_LEN,\n\n d.sorted_syms.as_mut_ptr(),\n\n );\n\n}\n\n\n\n/* Build the decode table for the offset code. */\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 44, "score": 130212.3217451867 }, { "content": "pub fn build_offset_decode_table(\n\n d: &mut LibdeflateDecompressor,\n\n num_litlen_syms: usize,\n\n num_offset_syms: usize,\n\n) -> bool {\n\n /* When you change TABLEBITS, you must change ENOUGH, and vice versa! */\n\n const_assert!(OFFSET_TABLEBITS == 8 && OFFSET_ENOUGH == 402);\n\n\n\n return build_decode_table(\n\n &mut d.offset_decode_table,\n\n &d.l.lens[num_litlen_syms..],\n\n num_offset_syms,\n\n &OFFSET_DECODE_RESULTS,\n\n OFFSET_TABLEBITS,\n\n DEFLATE_MAX_OFFSET_CODEWORD_LEN,\n\n d.sorted_syms.as_mut_ptr(),\n\n );\n\n}\n\n\n\n/*****************************************************************************\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 45, "score": 130212.3217451867 }, { "content": "pub fn start_info_logging() {\n\n thread::spawn(|| loop {\n\n print_memory_info();\n\n thread::sleep(Duration::from_millis(3000));\n\n });\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/mem_tracker/mod.rs", "rank": 46, "score": 128609.81565580293 }, { "content": "pub fn print_memory_info() {\n\n let mut info_map = MEMORY_INFO.lock();\n\n\n\n let mut tot_reserved = 0;\n\n let mut tot_resident = 0;\n\n let mut tot_objects_count = 0;\n\n\n\n for (pos, info) in info_map.as_mut().unwrap().iter_mut() {\n\n let mut reserved = 0;\n\n let mut resident = 0;\n\n let mut objects_count = 0;\n\n\n\n info.drain_filter(|x| match x.upgrade() {\n\n None => true,\n\n Some(val) => {\n\n reserved += val.bytes.load(Ordering::Relaxed);\n\n resident += val.resident_bytes.load(Ordering::Relaxed);\n\n objects_count += 1;\n\n false\n\n }\n", "file_path": "parallel-processor-rs/src/mem_tracker/mod.rs", "rank": 47, "score": 128609.81565580293 }, { "content": "#[cfg(feature = \"track-usage\")]\n\npub fn create_hashmap_entry(\n\n location: &'static Location,\n\n type_name: &'static str,\n\n) -> Arc<MemoryInfo> {\n\n let mut mem_info_map = MEMORY_INFO.lock();\n\n\n\n let new_mem_info = Arc::new(MemoryInfo {\n\n bytes: AtomicUsize::new(0),\n\n resident_bytes: AtomicUsize::new(0),\n\n });\n\n\n\n mem_info_map\n\n .as_mut()\n\n .unwrap()\n\n .entry((location, type_name))\n\n .or_insert(Vec::new())\n\n .push(Arc::downgrade(&new_mem_info));\n\n\n\n new_mem_info\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/mem_tracker/mod.rs", "rank": 48, "score": 128609.81565580293 }, { "content": "pub fn init_memory_info() {\n\n *MEMORY_INFO.lock() = Some(HashMap::new());\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/mem_tracker/mod.rs", "rank": 49, "score": 128609.81565580293 }, { "content": "pub fn add_two(a: i32) -> i32 {\n\n a + 2\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::hashes::fw_nthash::{ForwardNtHashIterator, ForwardNtHashIteratorFactory};\n\n use crate::hashes::fw_seqhash::u64::{ForwardSeqHashFactory, ForwardSeqHashIterator};\n\n use crate::hashes::{HashFunction, HashFunctionFactory};\n\n use crate::io::varint::{encode_varint, encode_varint_flags};\n\n use crate::rolling::minqueue::RollingMinQueue;\n\n use crate::utils::compressed_read::CompressedRead;\n\n use crate::utils::Utils;\n\n use bincode::DefaultOptions;\n\n use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};\n\n use parallel_processor::fast_smart_bucket_sort::fast_smart_radix_sort;\n\n use rand::{thread_rng, RngCore};\n\n use serde::{Deserialize, Serialize};\n\n use std::fs::File;\n", "file_path": "src/benchmarks.rs", "rank": 50, "score": 127592.41224010085 }, { "content": "pub fn libdeflate_alloc_decompressor() -> LibdeflateDecompressor {\n\n /*\n\n * Note that only certain parts of the decompressor actually must be\n\n * initialized here:\n\n *\n\n * - 'static_codes_loaded' must be initialized to false.\n\n *\n\n * - The first half of the main portion of each decode table must be\n\n * initialized to any value, to avoid reading from uninitialized\n\n * memory during table expansion in build_decode_table(). (Although,\n\n * this is really just to avoid warnings with dynamic tools like\n\n * valgrind, since build_decode_table() is guaranteed to initialize\n\n * all entries eventually anyway.)\n\n *\n\n * But for simplicity, we currently just zero the whole decompressor.\n\n */\n\n unsafe { MaybeUninit::<LibdeflateDecompressor>::zeroed().assume_init() }\n\n}\n\n\n", "file_path": "libdeflate-rs/src/lib.rs", "rank": 51, "score": 127299.53346490837 }, { "content": "pub fn process_cmdutils(args: CmdUtilsArgs) {\n\n match args {\n\n CmdUtilsArgs::Rewrite(args) => {\n\n cmd_rewrite(args);\n\n }\n\n }\n\n}\n", "file_path": "src/cmd_utils/mod.rs", "rank": 52, "score": 123094.11018889293 }, { "content": "type BitBufType = usize;\n\n\n\npub struct DecompressTempData<'a, I: DeflateInput, O: DeflateOutput> {\n\n pub bitbuf: BitBufType,\n\n pub bitsleft: usize,\n\n pub overrun_count: usize,\n\n pub is_final_block: bool,\n\n pub block_type: u32,\n\n // len: u16,\n\n // nlen: u16,\n\n pub num_litlen_syms: usize,\n\n pub num_offset_syms: usize,\n\n // u16 tmp16;\n\n // u32 tmp32;\n\n pub input_stream: &'a mut I,\n\n pub output_stream: &'a mut O,\n\n}\n\n\n\n/*\n\n * Number of bits the bitbuffer variable can hold.\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 53, "score": 121893.08797969467 }, { "content": "pub fn cmd_rewrite(args: CmdRewriteArgs) {\n\n let mut files_list = Vec::new();\n\n\n\n files_list.extend(args.input.into_iter());\n\n\n\n for list in args.input_lists {\n\n let file_list = BufReader::new(File::open(list).unwrap());\n\n\n\n files_list.extend(file_list.lines().map(|l| PathBuf::from(l.unwrap())));\n\n }\n\n\n\n files_list.par_iter().for_each(|x| {\n\n let new_file_path = args.output_path.join(x.file_name().unwrap());\n\n\n\n let mut writer = ReadsWriter::new_compressed_gzip(new_file_path, 9);\n\n\n\n SequencesReader::process_file_extended(\n\n x,\n\n |f| {\n\n writer.add_read(f);\n\n },\n\n false,\n\n )\n\n });\n\n}\n", "file_path": "src/cmd_utils/cmd_rewrite.rs", "rank": 54, "score": 121555.83107958737 }, { "content": "#[derive(StructOpt, Debug)]\n\nstruct MatchesArgs {\n\n /// Input fasta file with associated colors file (in the same folder)\n\n input_file: PathBuf,\n\n\n\n /// Debug print matches of a color index\n\n match_color: ColorIndexType,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 55, "score": 121266.89550814597 }, { "content": "pub trait HashFunctionFactory: Ord + Sized + Clone + Debug + 'static {\n\n type HashTypeUnextendable: UnextendableHashTraitType;\n\n type HashTypeExtendable: ExtendableHashTraitType<\n\n HashTypeUnextendable = Self::HashTypeUnextendable,\n\n >;\n\n type HashIterator<N: HashableSequence>: HashFunction<Self>;\n\n\n\n type PreferredRandomState: BuildHasher;\n\n fn get_random_state() -> Self::PreferredRandomState;\n\n\n\n const NULL_BASE: u8;\n\n\n\n fn new<N: HashableSequence>(seq: N, k: usize) -> Self::HashIterator<N>;\n\n\n\n /// Gets the first buckets count, used in MinimizerBucketing phase\n\n fn get_first_bucket(hash: Self::HashTypeUnextendable) -> BucketIndexType;\n\n\n\n /// Gets the second buckets count, used in KmersMerge phase to further split reads\n\n fn get_second_bucket(hash: Self::HashTypeUnextendable) -> BucketIndexType;\n\n\n", "file_path": "src/hashes/mod.rs", "rank": 56, "score": 119280.73260498293 }, { "content": "pub trait KmersTransformExecutor<'x, F: KmersTransformExecutorFactory> {\n\n fn preprocess_bucket(\n\n &mut self,\n\n global_data: &F::GlobalExtraData<'x>,\n\n flags: u8,\n\n input_extra_data: F::AssociatedExtraData,\n\n read: CompressedRead,\n\n ) -> ReadDispatchInfo<F::AssociatedExtraData>;\n\n\n\n fn maybe_swap_bucket(&mut self, global_data: &F::GlobalExtraData<'x>);\n\n\n\n fn process_group(\n\n &mut self,\n\n global_data: &F::GlobalExtraData<'x>,\n\n reads: &[ReadRef],\n\n memory: &[u8],\n\n );\n\n\n\n fn finalize(self, global_data: &F::GlobalExtraData<'x>);\n\n}\n", "file_path": "src/pipeline_common/kmers_transform/mod.rs", "rank": 57, "score": 119254.41353168187 }, { "content": "type IndexType = usize;\n\n\n\n#[repr(packed)]\n\n#[derive(Eq, PartialOrd, PartialEq, Ord, Copy, Clone, Debug)]\n\npub struct SortedData<const LEN: usize> {\n\n pub data: [u8; LEN],\n\n}\n\n\n\nimpl<const LEN: usize> SortedData<LEN> {\n\n #[inline(always)]\n\n pub fn new(data: [u8; LEN]) -> Self {\n\n Self { data }\n\n }\n\n}\n\n\n\nimpl<const LEN: usize> BucketWriter for SortedData<LEN> {\n\n type ExtraData = ();\n\n\n\n #[inline(always)]\n\n fn write_to(&self, bucket: &mut Vec<u8>, _: &Self::ExtraData) {\n\n bucket.write(&self.data[..]);\n\n }\n\n #[inline(always)]\n\n fn get_size(&self) -> usize {\n\n LEN\n\n }\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/fast_smart_bucket_sort.rs", "rank": 58, "score": 118913.93641901638 }, { "content": "enum WriterChannels {\n\n None,\n\n File(BufWriter<File>),\n\n CompressedFileGzip(BufWriter<GzEncoder<BufWriter<File>>>),\n\n CompressedFileLZ4(BufWriter<lz4::Encoder<BufWriter<File>>>),\n\n}\n\n\n\nimpl WriterChannels {\n\n fn get_writer(&mut self) -> &mut dyn Write {\n\n match self {\n\n WriterChannels::File(x) => x,\n\n WriterChannels::CompressedFileGzip(x) => x,\n\n WriterChannels::CompressedFileLZ4(x) => x,\n\n WriterChannels::None => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\npub struct ReadsWriter {\n\n writer: WriterChannels,\n", "file_path": "src/io/reads_writer.rs", "rank": 59, "score": 118814.49258571168 }, { "content": "#[inline(always)]\n\npub fn have_bits<I: DeflateInput, O: DeflateOutput>(\n\n data: &mut DecompressTempData<I, O>,\n\n n: usize,\n\n) -> bool {\n\n data.bitsleft >= n\n\n}\n\n\n\n/*\n\n * Load more bits from the input buffer until the specified number of bits is\n\n * present in the bitbuffer variable. 'n' cannot be too large; see MAX_ENSURE\n\n * and CAN_ENSURE().\n\n */\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 60, "score": 117100.83820246776 }, { "content": "#[inline(always)]\n\npub fn bits<I: DeflateInput, O: DeflateOutput>(\n\n data: &mut DecompressTempData<I, O>,\n\n n: usize,\n\n) -> u32 {\n\n (data.bitbuf as u32) & ((1u32 << (n)) - 1)\n\n}\n\n\n\n/*\n\n * Remove the next 'n' bits from the bitbuffer variable.\n\n */\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 61, "score": 117100.83820246776 }, { "content": "#[inline(always)]\n\npub fn ensure_bits<I: DeflateInput, O: DeflateOutput>(\n\n data: &mut DecompressTempData<I, O>,\n\n n: usize,\n\n) {\n\n if !have_bits(data, n) {\n\n if cfg!(target_endian = \"little\")\n\n && unsafe {\n\n likely(\n\n data.input_stream\n\n .ensure_length(std::mem::size_of::<BitBufType>()),\n\n )\n\n }\n\n {\n\n unsafe { fill_bits_wordwise(data); }\n\n } else {\n\n fill_bits_bytewise(data);\n\n }\n\n }\n\n}\n\n\n\n/*\n\n * Return the next 'n' bits from the bitbuffer variable without removing them.\n\n */\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 62, "score": 115623.00069251159 }, { "content": "#[inline(always)]\n\npub fn pop_bits<I: DeflateInput, O: DeflateOutput>(\n\n data: &mut DecompressTempData<I, O>,\n\n n: usize,\n\n) -> u32 {\n\n let tmp = bits(data, n);\n\n remove_bits(data, n);\n\n tmp\n\n}\n\n\n\n/*\n\n * Verify that the input buffer hasn't been overread, then align the input to\n\n * the next byte boundary, discarding any remaining bits in the current byte.\n\n *\n\n * Note that if the bitbuffer variable currently contains more than 7 bits, then\n\n * we must rewind 'in_next', effectively putting those bits back. Only the bits\n\n * in what would be the \"current\" byte if we were reading one byte at a time can\n\n * be actually discarded.\n\n */\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 63, "score": 115623.00069251159 }, { "content": "#[inline(always)]\n\npub fn remove_bits<I: DeflateInput, O: DeflateOutput>(\n\n data: &mut DecompressTempData<I, O>,\n\n n: usize,\n\n) {\n\n data.bitbuf >>= n;\n\n data.bitsleft -= n\n\n}\n\n\n\n/*\n\n * Remove and return the next 'n' bits from the bitbuffer variable.\n\n */\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 64, "score": 115623.00069251159 }, { "content": "#[inline(always)]\n\npub fn align_input<I: DeflateInput, O: DeflateOutput>(\n\n data: &mut DecompressTempData<I, O>,\n\n) -> Result<(), LibdeflateError> {\n\n safety_check!(data.overrun_count <= (data.bitsleft >> 3));\n\n data.input_stream\n\n .move_stream_pos(-(((data.bitsleft >> 3) - data.overrun_count) as isize));\n\n data.overrun_count = 0;\n\n data.bitbuf = 0;\n\n data.bitsleft = 0;\n\n Ok(())\n\n}\n\n\n\n/*\n\n * Read a 16-bit value from the input. This must have been preceded by a call\n\n * to ALIGN_INPUT(), and the caller must have already checked for overrun.\n\n */\n\n#[inline(always)]\n\npub unsafe fn read_u16<I: DeflateInput, O: DeflateOutput>(\n\n data: &mut DecompressTempData<I, O>,\n\n) -> u16 {\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 65, "score": 115623.00069251159 }, { "content": "struct SequentialReader {\n\n reader: lz4::Decoder<FileReader>,\n\n index: IntermediateReadsIndex,\n\n index_position: u64,\n\n}\n\n\n\nimpl Read for SequentialReader {\n\n fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {\n\n loop {\n\n match self.reader.read(buf) {\n\n Ok(read) => {\n\n if read != 0 {\n\n return Ok(read);\n\n }\n\n }\n\n Err(err) => {\n\n return Err(err);\n\n }\n\n }\n\n self.index_position += 1;\n", "file_path": "src/io/concurrent/intermediate_storage.rs", "rank": 66, "score": 115039.67254019096 }, { "content": "struct PointerDecoder {\n\n ptr: *const u8,\n\n}\n\n\n\nimpl Read for PointerDecoder {\n\n #[inline(always)]\n\n fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {\n\n unsafe {\n\n std::ptr::copy_nonoverlapping(self.ptr, buf.as_mut_ptr(), buf.len());\n\n self.ptr = self.ptr.add(buf.len());\n\n }\n\n Ok(buf.len())\n\n }\n\n\n\n #[inline(always)]\n\n fn read_exact(&mut self, buf: &mut [u8]) -> std::io::Result<()> {\n\n unsafe {\n\n std::ptr::copy_nonoverlapping(self.ptr, buf.as_mut_ptr(), buf.len());\n\n self.ptr = self.ptr.add(buf.len());\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/io/concurrent/intermediate_storage.rs", "rank": 67, "score": 115039.67254019096 }, { "content": "#[derive(Debug, Desse, DesseSized, Default)]\n\nstruct ColorsFileHeader {\n\n magic: [u8; 16],\n\n version: u64,\n\n index_offset: u64,\n\n colors_count: u64,\n\n subsets_count: u64,\n\n total_size: u64,\n\n total_uncompressed_size: u64,\n\n}\n\n\n\n#[derive(Clone, Copy, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq)]\n\npub struct ColorsIndexEntry {\n\n pub start_index: ColorIndexType,\n\n pub stride: ColorIndexType,\n\n file_offset: u64,\n\n}\n\n\n", "file_path": "src/colors/storage/serializer.rs", "rank": 68, "score": 114893.02578749631 }, { "content": "pub fn libdeflate_gzip_decompress<I: DeflateInput, O: DeflateOutput>(\n\n\td: &mut LibdeflateDecompressor,\n\n \tin_stream: &mut I,\n\n out_stream: &mut O,\n\n) -> Result<(), LibdeflateError> {\n\n\n\n\t/* ID1 */\n\n\tif in_stream.read_byte() != GZIP_ID1 {\n\n\t\treturn Err(LibdeflateError::BadData);\n\n\t}\n\n\t/* ID2 */\n\n\tif in_stream.read_byte() != GZIP_ID2 {\n\n\t\treturn Err(LibdeflateError::BadData);\n\n\t}\n\n\t/* CM */\n\n\tif in_stream.read_byte() != GZIP_CM_DEFLATE {\n\n\t\treturn Err(LibdeflateError::BadData);\n\n\t}\n\n\tlet flg = in_stream.read_byte();\n\n\n", "file_path": "libdeflate-rs/src/decompress_gzip.rs", "rank": 69, "score": 114202.11113783985 }, { "content": "pub fn libdeflate_deflate_decompress<I: DeflateInput, O: DeflateOutput>(\n\n d: &mut LibdeflateDecompressor,\n\n in_stream: &mut I,\n\n out_stream: &mut O,\n\n) -> Result<(), LibdeflateError> {\n\n deflate_decompress_template(d, in_stream, out_stream)\n\n}\n", "file_path": "libdeflate-rs/src/decompress_utils.rs", "rank": 70, "score": 114202.11113783985 }, { "content": "pub fn cast_static<T: ?Sized>(val: &T) -> &'static T {\n\n unsafe { std::mem::transmute(val) }\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 71, "score": 113427.4230230884 }, { "content": "pub fn deserialize<'de, D>(d: D) -> Result<u64, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n todo!()\n\n}\n\n\n\nmod tests {\n\n use crate::io::concurrent::intermediate_storage::{\n\n IntermediateReadsReader, IntermediateReadsWriter, VecReader,\n\n };\n\n use crate::io::varint::{\n\n decode_varint, decode_varint_flags, encode_varint, encode_varint_flags,\n\n };\n\n use crate::utils::compressed_read::{CompressedRead, CompressedReadIndipendent};\n\n use byteorder::WriteBytesExt;\n\n use parallel_processor::multi_thread_buckets::BucketType;\n\n use rand::RngCore;\n\n use std::io::{Cursor, Write};\n\n use std::iter::FromIterator;\n", "file_path": "src/io/varint.rs", "rank": 72, "score": 110346.4941629638 }, { "content": "pub trait ColorsSerializerImpl {\n\n fn decode_color(\n\n reader: impl Read,\n\n entry_info: ColorsIndexEntry,\n\n color: ColorIndexType,\n\n ) -> Vec<ColorIndexType>;\n\n // fn decode_colors(reader: impl Read) -> ;\n\n\n\n fn new(writer: ColorsFlushProcessing, checkpoint_distance: usize, colors_count: u64) -> Self;\n\n fn serialize_colors(&self, colors: &[ColorIndexType]) -> ColorIndexType;\n\n fn get_subsets_count(&self) -> u64;\n\n fn print_stats(&self);\n\n fn finalize(self) -> ColorsFlushProcessing;\n\n}\n", "file_path": "src/colors/storage/mod.rs", "rank": 73, "score": 108177.91427975608 }, { "content": "fn bincode_serialize_ref<S: Write, D: Serialize>(ser: &mut S, data: &D) {\n\n bincode::serialize_into(ser, data);\n\n}\n\n\n\nimpl<SI: ColorsSerializerImpl> Drop for ColorsSerializer<SI> {\n\n fn drop(&mut self) {\n\n let subsets_count = self.serializer_impl.get_subsets_count();\n\n\n\n let chunks_writer =\n\n unsafe { std::ptr::read(self.serializer_impl.deref() as *const SI).finalize() };\n\n\n\n let mut colors_lock = chunks_writer.colormap_file.lock();\n\n let (colors_file, index_map) = colors_lock.deref_mut();\n\n index_map.pairs.sort();\n\n\n\n colors_file.flush();\n\n\n\n let index_position = colors_file.stream_position().unwrap();\n\n\n\n bincode_serialize_ref(colors_file, index_map);\n", "file_path": "src/colors/storage/serializer.rs", "rank": 74, "score": 107894.94198607639 }, { "content": "#[inline(always)]\n\nfn xrc(base: HashIntegerType) -> HashIntegerType {\n\n base ^ 2\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::CanonicalSeqHashFactory;\n\n use super::HashIntegerType;\n\n use crate::hashes::tests::test_hash_function;\n\n use crate::hashes::{HashFunction, HashFunctionFactory};\n\n use std::mem::size_of;\n\n\n\n #[test]\n\n fn cn_seqhash_test() {\n\n test_hash_function::<CanonicalSeqHashFactory>(\n\n &(2..(size_of::<HashIntegerType>() * 4)).collect::<Vec<_>>(),\n\n true,\n\n );\n\n }\n\n}\n", "file_path": "src/hashes/base/cn_seqhash_base.rs", "rank": 75, "score": 106881.95082631758 }, { "content": "pub trait UnextendableHashTraitType = Copy\n\n + Clone\n\n + Debug\n\n + Default\n\n + Display\n\n + Eq\n\n + Ord\n\n + Hash\n\n + Send\n\n + Sync\n\n + Serialize\n\n + DeserializeOwned\n\n + 'static;\n\n\n", "file_path": "src/hashes/mod.rs", "rank": 76, "score": 104891.48280901188 }, { "content": "#[inline(always)]\n\npub fn serialize<S>(t: &u64, serializer: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n encode_varint(move |b| serializer.serialize_bytes(b), *t)\n\n}\n\n\n", "file_path": "src/io/varint.rs", "rank": 77, "score": 104826.67836658811 }, { "content": "/// Encoded color(s) of a minimizer bucketing step sequence\n\npub trait MinimizerBucketingSeqColorData:\n\n Default + Copy + Clone + SequenceExtraData + Send + Sync\n\n{\n\n fn create(file_index: u64) -> Self;\n\n}\n\n\n", "file_path": "src/colors/colors_manager.rs", "rank": 78, "score": 104777.29278704253 }, { "content": "pub trait BucketType: Send {\n\n type InitType: ?Sized;\n\n type DataType = u8;\n\n\n\n const SUPPORTS_LOCK_FREE: bool;\n\n\n\n fn new(init_data: &Self::InitType, index: usize) -> Self;\n\n fn write_data(&mut self, data: &[Self::DataType]);\n\n fn write_data_lock_free(&self, _data: &[Self::DataType]) {}\n\n fn get_path(&self) -> PathBuf;\n\n fn finalize(self);\n\n}\n\n\n\npub struct MultiThreadBuckets<B: BucketType> {\n\n buckets: Vec<RwLock<B>>,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct DecimationFactor {\n\n pub numerator: usize,\n", "file_path": "parallel-processor-rs/src/multi_thread_buckets.rs", "rank": 79, "score": 101604.26085944028 }, { "content": "pub trait SequenceExtraData: Sized + Send + Debug {\n\n fn decode_from_slice(slice: &[u8]) -> Option<Self> {\n\n let mut cursor = Cursor::new(slice);\n\n Self::decode(&mut cursor)\n\n }\n\n\n\n unsafe fn decode_from_pointer(ptr: *const u8) -> Option<Self> {\n\n let mut stream = PointerDecoder { ptr };\n\n Self::decode(&mut stream)\n\n }\n\n\n\n fn decode<'a>(reader: &'a mut impl Read) -> Option<Self>;\n\n fn encode<'a>(&self, writer: &'a mut impl Write);\n\n\n\n fn max_size(&self) -> usize;\n\n}\n\n\n\nimpl SequenceExtraData for () {\n\n #[inline(always)]\n\n fn decode<'a>(reader: &'a mut impl Read) -> Option<Self> {\n", "file_path": "src/io/concurrent/intermediate_storage.rs", "rank": 80, "score": 97506.4650424122 }, { "content": "pub trait RollingKseqImpl<T: Copy, U: Copy> {\n\n fn clear(&mut self, ksize: usize);\n\n fn init(&mut self, index: usize, base: T);\n\n fn iter(&mut self, index: usize, out_base: T, in_base: T) -> U;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct RollingKseqIterator {}\n\n\n\nimpl RollingKseqIterator {\n\n pub fn iter_seq<'a, T: Copy, U: Copy>(\n\n seq: &'a [T],\n\n k: usize,\n\n iter_impl: &'a mut (impl RollingKseqImpl<T, U> + 'a),\n\n ) -> impl Iterator<Item = U> + 'a {\n\n let k_minus1 = k - 1;\n\n\n\n let maxv = if seq.len() > k_minus1 {\n\n iter_impl.clear(k);\n\n for (i, v) in seq[0..k_minus1].iter().enumerate() {\n", "file_path": "src/rolling/kseq_iterator.rs", "rank": 81, "score": 96593.37031586817 }, { "content": "struct RwLockIterator<'a, A, B, I: Iterator<Item = B>> {\n\n lock: RwLockReadGuard<'a, A>,\n\n iterator: I,\n\n}\n\n\n\nimpl<'a, A, B, I: Iterator<Item = B>> Iterator for RwLockIterator<'a, A, B, I> {\n\n type Item = B;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.iterator.next()\n\n }\n\n}\n\n\n\npub struct MemoryFileChunksIterator<'a, I: Iterator<Item = &'a mut [u8]>> {\n\n iter: I,\n\n}\n\n\n\nimpl<'a, I: Iterator<Item = &'a mut [u8]>> Iterator for MemoryFileChunksIterator<'a, I> {\n\n type Item = &'a mut [u8];\n\n\n", "file_path": "parallel-processor-rs/src/memory_fs/file/internal.rs", "rank": 82, "score": 92677.16797412606 }, { "content": "struct UnitigLinkSerializer {\n\n link: UnitigLink,\n\n indexes: Vec<UnitigIndex>,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct UnitigPointer {\n\n pub entry: u64,\n\n pub link_index: u64,\n\n}\n\n\n\nimpl UnitigLink {\n\n pub fn read_from(mut reader: impl Read, out_vec: &mut Vec<UnitigIndex>) -> Option<Self> {\n\n let entry = decode_varint(|| reader.read_u8().ok())?;\n\n let flags = reader.read_u8().ok()?;\n\n\n\n let len = decode_varint(|| reader.read_u8().ok())? as usize;\n\n\n\n let start = out_vec.len();\n\n for _i in 0..len {\n", "file_path": "src/io/structs/unitig_link.rs", "rank": 83, "score": 82254.68885728891 }, { "content": "#[derive(StructOpt, Debug)]\n\nenum CliArgs {\n\n Build(AssemblerArgs),\n\n Matches(MatchesArgs),\n\n Query(QueryArgs),\n\n Utils(CmdUtilsArgs),\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 84, "score": 77884.7950164054 }, { "content": "#[derive(StructOpt, Debug)]\n\nstruct AssemblerArgs {\n\n /// The input files\n\n pub input: Vec<PathBuf>,\n\n\n\n /// The lists of input files\n\n #[structopt(short = \"l\", long = \"input-lists\")]\n\n pub input_lists: Vec<PathBuf>,\n\n\n\n /// Enable colors\n\n #[structopt(short, long)]\n\n pub colors: bool,\n\n\n\n /// Minimum multiplicity required to keep a kmer\n\n #[structopt(short = \"s\", long = \"min-multiplicity\", default_value = \"2\")]\n\n pub min_multiplicity: usize,\n\n\n\n // /// Minimum correctness probability for each kmer (using fastq quality checks)\n\n // #[structopt(short = \"q\", long = \"quality-threshold\")]\n\n // pub quality_threshold: Option<f64>,\n\n #[structopt(short = \"n\", long, default_value = \"0\")]\n", "file_path": "src/main.rs", "rank": 85, "score": 75978.49740981724 }, { "content": "#[derive(StructOpt, Debug)]\n\nstruct QueryArgs {\n\n /// The input graph\n\n pub input_graph: PathBuf,\n\n\n\n /// The input query as a .fasta file\n\n pub input_query: PathBuf,\n\n\n\n /// Enable colors\n\n #[structopt(short, long)]\n\n pub colors: bool,\n\n\n\n #[structopt(short = \"o\", long = \"output-file\", default_value = \"output.csv\")]\n\n pub output_file: PathBuf,\n\n\n\n #[structopt(short = \"x\", long, default_value = \"MinimizerBucketing\")]\n\n pub step: QuerierStartingStep,\n\n\n\n #[structopt(flatten)]\n\n pub common_args: CommonArgs,\n\n}\n", "file_path": "src/main.rs", "rank": 86, "score": 75978.49740981724 }, { "content": "#[derive(StructOpt, Debug)]\n\nstruct CommonArgs {\n\n /// Specifies the k-mers length\n\n #[structopt(short, default_value = \"32\")]\n\n pub klen: usize,\n\n\n\n /// Specifies the m-mers (minimizers) length, defaults to min(3, ceil((K + 2) / 3))\n\n #[structopt(long)]\n\n pub mlen: Option<usize>,\n\n\n\n /// Directory for temporary files (default .temp_files)\n\n #[structopt(short = \"t\", long = \"temp-dir\", default_value = \".temp_files\")]\n\n pub temp_dir: PathBuf,\n\n\n\n /// Keep intermediate temporary files for debugging purposes\n\n #[structopt(long = \"keep-temp-files\")]\n\n pub keep_temp_files: bool,\n\n\n\n #[structopt(short = \"j\", long, default_value = \"16\")]\n\n pub threads_count: usize,\n\n\n", "file_path": "src/main.rs", "rank": 87, "score": 75978.49740981724 }, { "content": "fn main() {\n\n let args: CliArgs = CliArgs::from_args();\n\n\n\n #[cfg(feature = \"mem-analysis\")]\n\n {\n\n parallel_processor::mem_tracker::init_memory_info();\n\n parallel_processor::mem_tracker::start_info_logging();\n\n }\n\n\n\n panic::set_hook(Box::new(move |info| {\n\n let stdout = std::io::stdout();\n\n let mut _lock = stdout.lock();\n\n\n\n let stderr = std::io::stderr();\n\n let mut err_lock = stderr.lock();\n\n\n\n writeln!(\n\n err_lock,\n\n \"Thread panicked at location: {:?}\",\n\n info.location()\n", "file_path": "src/main.rs", "rank": 88, "score": 73393.4139276927 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct ColorsIndexMap {\n\n pairs: Vec<ColorsIndexEntry>,\n\n}\n\n\n\npub struct ColorsSerializer<SI: ColorsSerializerImpl> {\n\n colors_count: u64,\n\n serializer_impl: ManuallyDrop<SI>,\n\n}\n\n\n\nimpl<SI: ColorsSerializerImpl> ColorsSerializer<SI> {\n\n pub fn read_color(file: impl AsRef<Path>, color_index: ColorIndexType) -> Vec<String> {\n\n let mut result = Vec::new();\n\n\n\n let mut file = File::open(file).unwrap();\n\n\n\n let mut header_buffer = [0; ColorsFileHeader::SIZE];\n\n file.read_exact(&mut header_buffer);\n\n\n\n let header: ColorsFileHeader = ColorsFileHeader::deserialize_from(&header_buffer);\n\n assert_eq!(header.magic, MAGIC_STRING);\n", "file_path": "src/colors/storage/serializer.rs", "rank": 89, "score": 72973.6031270407 }, { "content": "struct RoaringBitmapInstance {\n\n bitmap: RoaringBitmap,\n\n offset: ColorIndexType,\n\n colors_count: u64,\n\n checkpoint_distance: u64,\n\n stride: ColorIndexType,\n\n last_color: ColorIndexType,\n\n}\n\n\n\nimpl RoaringBitmapInstance {\n\n fn new(\n\n colors_count: u64,\n\n checkpoint_distance: u64,\n\n offset: ColorIndexType,\n\n stride: ColorIndexType,\n\n ) -> Self {\n\n Self {\n\n bitmap: RoaringBitmap::new(),\n\n offset,\n\n colors_count,\n", "file_path": "src/colors/storage/roaring.rs", "rank": 90, "score": 72973.6031270407 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct FinalUnitigInfo {\n\n is_start: bool,\n\n is_circular: bool,\n\n flags: UnitigFlags,\n\n}\n\n\n", "file_path": "src/assemble_pipeline/build_unitigs.rs", "rank": 91, "score": 72065.7791418128 }, { "content": "struct AllocLock {}\n\n\n\n#[thread_local]\n\nstatic IS_NESTED: AtomicBool = AtomicBool::new(false);\n\n\n\n// impl AllocLock {\n\n// fn lock() -> Self {\n\n// if unsafe { LOCK_COUNT } == 0 {\n\n// let id = std::thread::current().id();\n\n// while IS_LOCKED.swap(true, Ordering::Relaxed) {\n\n// std::thread::yield_now();\n\n// }\n\n// }\n\n// unsafe {\n\n// LOCK_COUNT += 1;\n\n// }\n\n// Self {}\n\n// }\n\n// }\n\n//\n", "file_path": "parallel-processor-rs/src/debug_allocator.rs", "rank": 92, "score": 72060.79410914528 }, { "content": "struct AllocationInfo {\n\n bt: String,\n\n current_count: AtomicUsize,\n\n current_size: AtomicUsize,\n\n max_size: AtomicUsize,\n\n total_count: AtomicUsize,\n\n}\n\n\n\nimpl AllocationInfo {\n\n pub fn as_writable(&self) -> AllocationInfoWritable {\n\n AllocationInfoWritable {\n\n bt: self.bt.clone(),\n\n current_count: self.current_count.load(Ordering::Relaxed),\n\n current_size: self.current_size.load(Ordering::Relaxed),\n\n max_size: self.max_size.load(Ordering::Relaxed),\n\n total_count: self.total_count.load(Ordering::Relaxed),\n\n }\n\n }\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/debug_allocator.rs", "rank": 93, "score": 72060.79410914528 }, { "content": "#[derive(Serialize, Deserialize, Clone)]\n\nstruct AllocationInfoWritable {\n\n bt: String,\n\n current_count: usize,\n\n current_size: usize,\n\n max_size: usize,\n\n total_count: usize,\n\n}\n\n\n\nlazy_static! {\n\n static ref ALLOCATION_INFOS: DashMap<String, AllocationInfo> = DashMap::new();\n\n static ref ADDRESSES_BACKTRACE: DashMap<usize, String> = DashMap::new();\n\n}\n\n\n", "file_path": "parallel-processor-rs/src/debug_allocator.rs", "rank": 94, "score": 71191.89477194923 }, { "content": "struct ContextExtraData {}\n\n\n\n#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]\n\npub struct KmersQueryData(pub u64);\n\n\n\n#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]\n\npub enum FileType {\n\n Graph,\n\n Query,\n\n}\n\n\n\nimpl Default for FileType {\n\n fn default() -> Self {\n\n Self::Graph\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]\n\npub enum ReadType {\n\n Graph,\n", "file_path": "src/query_pipeline/querier_minimizer_bucketing.rs", "rank": 95, "score": 71186.90973928169 }, { "content": "fn run_querier_from_args<\n\n BucketingHash: HashFunctionFactory,\n\n MergingHash: HashFunctionFactory,\n\n ColorsImpl: ColorsManager,\n\n const BUCKETS_COUNT: usize,\n\n>(\n\n args: QueryArgs,\n\n) {\n\n run_query::<BucketingHash, MergingHash, ColorsImpl, BUCKETS_COUNT>(\n\n args.common_args.klen,\n\n args.common_args\n\n .mlen\n\n .unwrap_or(compute_best_m(args.common_args.klen)),\n\n args.step,\n\n args.input_graph,\n\n args.input_query,\n\n args.output_file,\n\n args.common_args.temp_dir,\n\n args.common_args.threads_count,\n\n );\n", "file_path": "src/querier_generic_dispatcher.rs", "rank": 96, "score": 69305.84065183971 }, { "content": "fn run_assembler_from_args<\n\n BucketingHash: HashFunctionFactory,\n\n MergingHash: HashFunctionFactory,\n\n ColorsImpl: ColorsManager,\n\n const BUCKETS_COUNT: usize,\n\n>(\n\n args: AssemblerArgs,\n\n) {\n\n let mut inputs = args.input.clone();\n\n\n\n for list in args.input_lists {\n\n for input in BufReader::new(File::open(list).unwrap()).lines() {\n\n if let Ok(input) = input {\n\n inputs.push(PathBuf::from(input));\n\n }\n\n }\n\n }\n\n\n\n if inputs.is_empty() {\n\n println!(\"ERROR: No input files specified!\");\n", "file_path": "src/assembler_generic_dispatcher.rs", "rank": 97, "score": 69305.84065183971 }, { "content": "struct GlobalQueryMergeData<'a> {\n\n k: usize,\n\n m: usize,\n\n buckets_count: usize,\n\n counters_buckets: &'a MultiThreadBuckets<BinaryWriter>,\n\n global_resplit_data: MinimizerBucketingCommonData<()>,\n\n}\n\n\n", "file_path": "src/query_pipeline/parallel_kmers_query.rs", "rank": 98, "score": 68499.84264703936 }, { "content": "#[inline(always)]\n\nfn cnc_nt_manual_roll(\n\n hash: ExtCanonicalNtHash,\n\n k: usize,\n\n out_b: u8,\n\n in_b: u8,\n\n) -> ExtCanonicalNtHash {\n\n let res = hash.0.rotate_left(1) ^ h(in_b);\n\n let res_rc = hash.1 ^ rc(in_b).rotate_left(k as u32);\n\n\n\n ExtCanonicalNtHash(\n\n res ^ h(out_b).rotate_left(k as u32),\n\n (res_rc ^ rc(out_b)).rotate_right(1),\n\n )\n\n}\n\n\n", "file_path": "src/hashes/cn_nthash.rs", "rank": 99, "score": 68395.26144541318 } ]
Rust
riddle-image/src/image.rs
LaudateCorpus1/riddle-2
eec60152f2c934a0ebdb627a50286ce05fb6914d
use crate::*; use riddle_common::{Color, ColorElementConversion}; use riddle_math::*; use futures::{AsyncRead, AsyncReadExt}; use std::io::{BufReader, Cursor, Read, Write}; const ERR_PNG_ENCODE_FAILURE: &str = "Failed to encode Png"; const ERR_BMP_ENCODE_FAILURE: &str = "Failed to encode Bmp"; const ERR_JPEG_ENCODE_FAILURE: &str = "Failed to encode Jpeg"; const ERR_PNG_DECODE_FAILURE: &str = "Failed to decode Png"; const ERR_BMP_DECODE_FAILURE: &str = "Failed to decode Bmp"; const ERR_JPEG_DECODE_FAILURE: &str = "Failed to decode Jpeg"; #[derive(Clone, Debug)] pub struct Image { img: ::image::RgbaImage, } impl Image { pub fn load<R: Read>(mut r: R, format: ImageFormat) -> Result<Self> { let mut buf = vec![]; r.read_to_end(&mut buf)?; Self::from_bytes(&buf, format) } pub async fn load_async<R>(mut data: R, format: ImageFormat) -> Result<Self> where R: AsyncRead + Unpin, { let mut buf = vec![]; data.read_to_end(&mut buf).await?; Self::from_bytes(&buf, format) } pub fn save<W: Write>(&self, mut w: W, format: ImageFormat) -> Result<()> { match format { ImageFormat::Png => { ::image::png::PngEncoder::new(w) .encode( self.as_rgba8(), self.width(), self.height(), ::image::ColorType::Rgba8, ) .map_err(|_| ImageError::Save(ERR_PNG_ENCODE_FAILURE))?; } ImageFormat::Bmp => { ::image::bmp::BmpEncoder::new(&mut w) .encode( self.as_rgba8(), self.width(), self.height(), ::image::ColorType::Rgba8, ) .map_err(|_| ImageError::Save(ERR_BMP_ENCODE_FAILURE))?; } ImageFormat::Jpeg => { ::image::jpeg::JpegEncoder::new(&mut w) .encode( self.as_rgba8(), self.width(), self.height(), ::image::ColorType::Rgba8, ) .map_err(|_| ImageError::Save(ERR_JPEG_ENCODE_FAILURE))?; } } Ok(()) } pub fn from_bytes(bytes: &[u8], format: ImageFormat) -> Result<Self> { let buf_reader = BufReader::new(Cursor::new(bytes)); let img = match format { ImageFormat::Png => ::image::png::PngDecoder::new(buf_reader) .and_then(::image::DynamicImage::from_decoder) .map_err(|_| ImageError::Load(ERR_PNG_DECODE_FAILURE))?, ImageFormat::Bmp => ::image::bmp::BmpDecoder::new(buf_reader) .and_then(::image::DynamicImage::from_decoder) .map_err(|_| ImageError::Load(ERR_BMP_DECODE_FAILURE))?, ImageFormat::Jpeg => ::image::jpeg::JpegDecoder::new(buf_reader) .and_then(::image::DynamicImage::from_decoder) .map_err(|_| ImageError::Load(ERR_JPEG_DECODE_FAILURE))?, }; Ok(Image { img: img.into_rgba8(), }) } pub fn new(width: u32, height: u32) -> Self { let img = ::image::RgbaImage::from_raw(width, height, vec![0u8; (width * height * 4) as usize]) .unwrap(); Image { img } } pub fn get_pixel<L: Into<Vector2<u32>>>(&self, location: L) -> Color<u8> { let location = location.into(); let c: ::image::Rgba<u8> = *self.img.get_pixel(location.x, location.y); Color::rgba(c[0], c[1], c[2], c[3]) } pub fn set_pixel<L: Into<Vector2<u32>>, C: ColorElementConversion<Color<u8>>>( &mut self, location: L, color: C, ) { let color: Color<u8> = color.convert(); let color: [u8; 4] = color.into(); let location = location.into(); self.img.put_pixel(location.x, location.y, color.into()); } pub fn as_rgba8(&self) -> &[u8] { self.img.as_ref() } pub fn as_rgba8_mut(&mut self) -> &mut [u8] { self.img.as_mut() } pub fn byte_count(&self) -> usize { self.img.as_ref().len() } pub fn width(&self) -> u32 { self.img.width() } pub fn height(&self) -> u32 { self.img.height() } pub fn dimensions(&self) -> Vector2<u32> { let (w, h) = self.img.dimensions(); Vector2 { x: w, y: h } } pub fn rect(&self) -> Rect<u32> { Rect { location: Vector2 { x: 0, y: 0 }, dimensions: self.dimensions(), } } pub fn copy_rect(&self, source: &Rect<u32>) -> Image { let source_rect = self.rect().intersect(&source).unwrap_or_default(); let mut dest_img = Image::new(source_rect.dimensions.x, source_rect.dimensions.y); dest_img.blit_rect(self, &source_rect, Vector2::default()); dest_img } pub fn blit(&mut self, source: &Image, location: Vector2<i32>) { self.blit_rect(source, &source.rect(), location) } pub fn blit_rect(&mut self, source: &Image, source_rect: &Rect<u32>, location: Vector2<i32>) { let source_rect = if let Some(rect) = source.rect().intersect(source_rect) { rect } else { return; }; if let Some((rel_dest_rect, rel_src_rect)) = Rect::intersect_relative_to_both(self.dimensions(), source_rect.dimensions, location) { let abs_src_rec = Rect::new( rel_src_rect.location + source_rect.location.convert(), rel_src_rect.dimensions, ); let mut dest_view = self.create_view_mut(rel_dest_rect.clone().convert()); let src_view = source.create_view(abs_src_rec.convert()); for row in 0..(rel_dest_rect.dimensions.y as u32) { let dest = dest_view.get_row_rgba8_mut(row); let src = src_view.get_row_rgba8(row); dest.clone_from_slice(src); } } } pub fn fill_rect<C: ColorElementConversion<Color<u8>>>(&mut self, rect: Rect<u32>, color: C) { if let Some(dest_rect) = self.rect().intersect(&rect) { let color_bytes: [u8; 4] = color.convert().into(); let mut row_vec = Vec::with_capacity(dest_rect.dimensions.x as usize * 4); for _ in 0..dest_rect.dimensions.x { row_vec.extend_from_slice(&color_bytes[..]); } let mut dest_view = self.create_view_mut(dest_rect.clone().convert()); for row_idx in 0..(dest_rect.dimensions.y as u32) { let dest = dest_view.get_row_rgba8_mut(row_idx); dest.clone_from_slice(bytemuck::cast_slice(&row_vec[..])); } } } pub fn fill<C: ColorElementConversion<Color<u8>>>(&mut self, color: C) { self.fill_rect(self.rect(), color) } pub(crate) fn create_view(&self, rect: Rect<u32>) -> ImageView { ImageView::new(self, rect) } pub(crate) fn create_view_mut(&mut self, rect: Rect<u32>) -> ImageViewMut { ImageViewMut::new(self, rect) } } impl image_ext::ImageImageExt for Image { fn image_rgbaimage(&self) -> &::image::RgbaImage { &self.img } fn image_from_dynimage(img: ::image::DynamicImage) -> Self { Self { img: img.into_rgba8(), } } } #[derive(Debug, Eq, PartialEq, Copy, Clone)] pub enum ImageFormat { Png, Bmp, Jpeg, } impl ImageFormat { pub fn derive_from_path(path: &str) -> Option<Self> { let p = std::path::Path::new(path); let extension_str = p.extension()?.to_str()?; let extension = String::from(extension_str).to_lowercase(); match extension.as_str() { "png" => Some(ImageFormat::Png), "bmp" => Some(ImageFormat::Bmp), "jpeg" | "jpg" => Some(ImageFormat::Jpeg), _ => None, } } }
use crate::*; use riddle_common::{Color, ColorElementConversion}; use riddle_math::*; use futures::{AsyncRead, AsyncReadExt}; use std::io::{BufReader, Cursor, Read, Write}; const ERR_PNG_ENCODE_FAILURE: &str = "Failed to encode Png"; const ERR_BMP_ENCODE_FAILURE: &str = "Failed to encode Bmp"; const ERR_JPEG_ENCODE_FAILURE: &str = "Failed to encode Jpeg"; const ERR_PNG_DECODE_FAILURE: &str = "Failed to decode Png"; const ERR_BMP_DECODE_FAILURE: &str = "Failed to decode Bmp"; const ERR_JPEG_DECODE_FAILURE: &str = "Failed to decode Jpeg"; #[derive(Clone, Debug)] pub struct Image { img: ::image::RgbaImage, } impl Image { pub fn load<R: Read>(mut r: R, format: ImageFormat) -> Result<Self> { let mut buf = vec![]; r.read_to_end(&mut buf)?; Self::from_bytes(&buf, format) } pub async fn load_async<R>(mut data: R, format: ImageFormat) -> Result<Self> where R: AsyncRead + Unpin, { let mut buf = vec![]; data.read_to_end(&mut buf).await?; Self::from_bytes(&buf, format) } pub fn save<W: Write>(&self, mut w: W, format: ImageFormat) -> Result<()> { match format { ImageFormat::Png => { ::image::png::PngEncoder::new(w) .encode( self.as_rgba8(), self.width(), self.height(), ::image::ColorType::Rgba8, ) .map_err(|_| ImageError::Save(ERR_PNG_ENCODE_FAILURE))?; } ImageFormat::Bmp => { ::image::bmp::BmpEncoder::new(&mut w) .encode( self.as_rgba8(), self.width(), self.height(), ::image::ColorType::Rgba8, ) .map_err(|_| ImageError::Save(ERR_BMP_ENCODE_FAILURE))?; } ImageFormat::Jpeg => { ::image::jpeg::JpegEncoder::new(&mut w) .encode( self.as_rgba8(), self.width(), self.height(), ::image::ColorType::Rgba8, ) .map_err(|_| ImageError::Save(ERR_JPEG_ENCODE_FAILURE))?; } } Ok(()) } pub fn from_bytes(bytes: &[u8], format: ImageFormat) -> Result<Self> { let buf_reader = BufReader::new(Cursor::new(bytes)); let img = match format { ImageFormat::Png => ::image::png::PngDecoder::new(buf_reader) .and_then(::image::DynamicImage::from_decoder) .map_err(|_| ImageError::Load(ERR_PNG_DECODE_FAILURE))?, ImageFormat::Bmp => ::image::bmp::BmpDecoder::new(buf_reader) .and_then(::image::DynamicImage::from_decoder) .map_err(|_| ImageError::Load(ERR_BMP_DECODE_FAILURE))?, ImageFormat::Jpeg => ::image::jpeg::JpegDecoder::new(buf_reader) .and_then(::image::DynamicImage::from_decoder) .map_err(|_| ImageError::Load(ERR_JPEG_DECODE_FAILURE))?, }; Ok(Image { img: img.into_rgba8(), }) } pub fn new(width: u32, height: u32) -> Self { let img = ::image::RgbaImage::from_raw(width, height, vec![0u8; (width * height * 4) as usize]) .unwrap(); Image { img } } pub fn get_pixel<L: Into<Vector2<u32>>>(&self, location: L) -> Color<u8> { let location = location.into(); let c: ::image::Rgba<u8> = *self.img.get_pixel(location.x, location.y); Color::rgba(c[0], c[1], c[2], c[3]) } pub fn set_pixel<L: Into<Vector2<u32>>, C: ColorElementConversion<Color<u8>>>( &mut self, location: L, color: C, ) { let color: Color<u8> = color
rel_src_rect.dimensions, ); let mut dest_view = self.create_view_mut(rel_dest_rect.clone().convert()); let src_view = source.create_view(abs_src_rec.convert()); for row in 0..(rel_dest_rect.dimensions.y as u32) { let dest = dest_view.get_row_rgba8_mut(row); let src = src_view.get_row_rgba8(row); dest.clone_from_slice(src); } } } pub fn fill_rect<C: ColorElementConversion<Color<u8>>>(&mut self, rect: Rect<u32>, color: C) { if let Some(dest_rect) = self.rect().intersect(&rect) { let color_bytes: [u8; 4] = color.convert().into(); let mut row_vec = Vec::with_capacity(dest_rect.dimensions.x as usize * 4); for _ in 0..dest_rect.dimensions.x { row_vec.extend_from_slice(&color_bytes[..]); } let mut dest_view = self.create_view_mut(dest_rect.clone().convert()); for row_idx in 0..(dest_rect.dimensions.y as u32) { let dest = dest_view.get_row_rgba8_mut(row_idx); dest.clone_from_slice(bytemuck::cast_slice(&row_vec[..])); } } } pub fn fill<C: ColorElementConversion<Color<u8>>>(&mut self, color: C) { self.fill_rect(self.rect(), color) } pub(crate) fn create_view(&self, rect: Rect<u32>) -> ImageView { ImageView::new(self, rect) } pub(crate) fn create_view_mut(&mut self, rect: Rect<u32>) -> ImageViewMut { ImageViewMut::new(self, rect) } } impl image_ext::ImageImageExt for Image { fn image_rgbaimage(&self) -> &::image::RgbaImage { &self.img } fn image_from_dynimage(img: ::image::DynamicImage) -> Self { Self { img: img.into_rgba8(), } } } #[derive(Debug, Eq, PartialEq, Copy, Clone)] pub enum ImageFormat { Png, Bmp, Jpeg, } impl ImageFormat { pub fn derive_from_path(path: &str) -> Option<Self> { let p = std::path::Path::new(path); let extension_str = p.extension()?.to_str()?; let extension = String::from(extension_str).to_lowercase(); match extension.as_str() { "png" => Some(ImageFormat::Png), "bmp" => Some(ImageFormat::Bmp), "jpeg" | "jpg" => Some(ImageFormat::Jpeg), _ => None, } } }
.convert(); let color: [u8; 4] = color.into(); let location = location.into(); self.img.put_pixel(location.x, location.y, color.into()); } pub fn as_rgba8(&self) -> &[u8] { self.img.as_ref() } pub fn as_rgba8_mut(&mut self) -> &mut [u8] { self.img.as_mut() } pub fn byte_count(&self) -> usize { self.img.as_ref().len() } pub fn width(&self) -> u32 { self.img.width() } pub fn height(&self) -> u32 { self.img.height() } pub fn dimensions(&self) -> Vector2<u32> { let (w, h) = self.img.dimensions(); Vector2 { x: w, y: h } } pub fn rect(&self) -> Rect<u32> { Rect { location: Vector2 { x: 0, y: 0 }, dimensions: self.dimensions(), } } pub fn copy_rect(&self, source: &Rect<u32>) -> Image { let source_rect = self.rect().intersect(&source).unwrap_or_default(); let mut dest_img = Image::new(source_rect.dimensions.x, source_rect.dimensions.y); dest_img.blit_rect(self, &source_rect, Vector2::default()); dest_img } pub fn blit(&mut self, source: &Image, location: Vector2<i32>) { self.blit_rect(source, &source.rect(), location) } pub fn blit_rect(&mut self, source: &Image, source_rect: &Rect<u32>, location: Vector2<i32>) { let source_rect = if let Some(rect) = source.rect().intersect(source_rect) { rect } else { return; }; if let Some((rel_dest_rect, rel_src_rect)) = Rect::intersect_relative_to_both(self.dimensions(), source_rect.dimensions, location) { let abs_src_rec = Rect::new( rel_src_rect.location + source_rect.location.convert(),
random
[ { "content": "pub fn simple<R, F: FnOnce(&AudioSystem) -> Result<R>>(f: F) {\n\n\tlet (audio_system, _main_thread_state) = AudioSystem::new_system_pair().unwrap();\n\n\tlet _r = f(&audio_system).unwrap();\n\n\tlet start_time = Instant::now();\n\n\twhile Instant::now() - start_time < Duration::from_secs(2) {\n\n\t\taudio_system.process_frame();\n\n\t}\n\n}\n\n\n", "file_path": "riddle-audio/src/doctest.rs", "rank": 0, "score": 201845.07001721353 }, { "content": "type Result<R> = std::result::Result<R, ImageError>;\n", "file_path": "riddle-image/src/lib.rs", "rank": 1, "score": 182823.29519469113 }, { "content": "pub fn simple<R, F: FnOnce(&TimeSystem) -> R>(f: F) {\n\n\tlet time_system = TimeSystem::new();\n\n\tlet _r = f(&time_system);\n\n\tlet start_time = Instant::now();\n\n\twhile Instant::now() - start_time < Duration::from_secs(2) {\n\n\t\tstd::thread::sleep(std::time::Duration::from_millis(100));\n\n\t\ttime_system.process_frame();\n\n\t}\n\n}\n\n\n", "file_path": "riddle-time/src/doctest.rs", "rank": 2, "score": 162397.45066497772 }, { "content": "fn do_grid_passes(grid: &mut Vec<Vector2<i64>>, bounds: &Rect<i64>) {\n\n\tdo_pass(\n\n\t\tgrid,\n\n\t\tbounds,\n\n\t\t0_i64..bounds.dimensions.y,\n\n\t\t0_i64..bounds.dimensions.x,\n\n\t\t&[\n\n\t\t\tVector2 { x: -1, y: -1 },\n\n\t\t\tVector2 { x: 0, y: -1 },\n\n\t\t\tVector2 { x: 1, y: -1 },\n\n\t\t\tVector2 { x: -1, y: 0 },\n\n\t\t\tVector2 { x: 0, y: 0 },\n\n\t\t],\n\n\t);\n\n\n\n\tdo_pass(\n\n\t\tgrid,\n\n\t\tbounds,\n\n\t\t0_i64..bounds.dimensions.y,\n\n\t\t(0_i64..bounds.dimensions.x).rev(),\n", "file_path": "riddle-image/src/filters/distance_field.rs", "rank": 3, "score": 160558.81676028646 }, { "content": "fn main() -> Result<(), ImageError> {\n\n\tlet png_bytes = include_bytes!(\"sample.png\");\n\n\tlet png_img = Image::load(&png_bytes[..], ImageFormat::Png)?;\n\n\tprintln!(\"+ Image Loaded...\");\n\n\n\n\tlet processed_img = filters::distance_field(png_img, FIELD_SCALE);\n\n\tprintln!(\"+ Distance Field Calculated...\");\n\n\n\n\tlet mut out_path = env::temp_dir();\n\n\tout_path.push(OUT_FILE);\n\n\n\n\tlet out_file = File::create(out_path.clone())?;\n\n\tprocessed_img.save(out_file, ImageFormat::Png)?;\n\n\tprintln!(\"+ Image Saved ({:?})...\", out_path);\n\n\tprintln!(\"+ Done.\");\n\n\n\n\tOk(())\n\n}\n", "file_path": "riddle-image/examples/image-distance-field/main.rs", "rank": 4, "score": 156349.66499791108 }, { "content": "type Result<R> = std::result::Result<R, RiddleError>;\n", "file_path": "riddle/src/lib.rs", "rank": 5, "score": 140806.46345016334 }, { "content": "type Result<R> = std::result::Result<R, FontError>;\n", "file_path": "riddle-font/src/lib.rs", "rank": 6, "score": 139627.68564894158 }, { "content": "type Result<R> = std::result::Result<R, AudioError>;\n", "file_path": "riddle-audio/src/lib.rs", "rank": 7, "score": 139627.68564894158 }, { "content": "type Result<R> = std::result::Result<R, InputError>;\n", "file_path": "riddle-input/src/lib.rs", "rank": 8, "score": 139627.68564894158 }, { "content": "/// Generate a distance field for the given image using 8SSEDT. The scale argument roughly maps to\n\n/// how many pixels deep the boundary region is between min and max values.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # use riddle_image::*; fn main() -> Result<(), ImageError> {\n\n/// let png_bytes = include_bytes!(\"../../examples/image-distance-field/sample.png\");\n\n/// let png_img = Image::load(&png_bytes[..], ImageFormat::Png)?;\n\n///\n\n/// let processed_img = filters::distance_field(png_img, 10.0);\n\n/// # Ok(()) }\n\n/// ```\n\n///\n\n/// # Resources:\n\n///\n\n/// * <http://www.codersnotes.com/notes/signed-distance-fields/>\n\n/// * <https://github.com/Lisapple/8SSEDT>\n\npub fn distance_field(source: Image, scale: f64) -> Image {\n\n\tlet source_bounds: Rect<i64> = source.rect().convert();\n\n\tlet source_img = source.image_rgbaimage();\n\n\tlet mut inside_grid = vec![Vector2::new(0, 0); (source.width() * source.height()) as usize];\n\n\tlet mut outside_grid = vec![Vector2::new(0, 0); (source.width() * source.height()) as usize];\n\n\n\n\t// Generate initial grids\n\n\tfor y in 0..source.height() {\n\n\t\tfor x in 0..source.width() {\n\n\t\t\tlet grid_offset = (y * source.width() + x) as usize;\n\n\t\t\tlet source_pixel = source_img.get_pixel(x, y).to_luma();\n\n\t\t\tif source_pixel.0[0] > 127 {\n\n\t\t\t\toutside_grid[grid_offset] = GRID_INF;\n\n\t\t\t} else {\n\n\t\t\t\tinside_grid[grid_offset] = GRID_INF;\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tdo_grid_passes(&mut inside_grid, &source_bounds);\n", "file_path": "riddle-image/src/filters/distance_field.rs", "rank": 9, "score": 138637.6119720888 }, { "content": "type Result<R> = std::result::Result<R, PlatformError>;\n", "file_path": "riddle-platform-winit/src/lib.rs", "rank": 10, "score": 138478.72420174937 }, { "content": "type Result<R> = std::result::Result<R, WgpuRendererError>;\n\n\n\nuse buffered_renderer::*;\n\npub use device::*;\n\npub use error::*;\n\npub use renderer::*;\n\nuse shader::*;\n\npub use sprite::*;\n\npub use sprite_atlas::*;\n\npub use sprite_render_target::*;\n\nuse swap_chain_target::*;\n\nuse target::*;\n\nuse texture::*;\n\npub use window_device::*;\n\n\n\npub use riddle_renderer_common::*;\n\n\n\nuse riddle_renderer_common::vertex::*;\n\n\n\npub type DefaultRenderer = Renderer<WindowWgpuDevice>;\n", "file_path": "riddle-renderer-wgpu/src/lib.rs", "rank": 11, "score": 137358.43962327577 }, { "content": "fn main() -> Result<()> {\n\n\tlet rdl = RiddleLib::new()?;\n\n\tlet mut demo = WGPURendererDemo::new(&rdl)?;\n\n\n\n\trdl.run(move |rdl| match rdl.event() {\n\n\t\tEvent::Platform(PlatformEvent::WindowClose(_)) => {\n\n\t\t\trdl.quit();\n\n\t\t}\n\n\t\tEvent::ProcessFrame => {\n\n\t\t\tdemo.update(rdl).unwrap();\n\n\t\t\tdemo.render().unwrap();\n\n\t\t}\n\n\t\t_ => (),\n\n\t});\n\n}\n\n\n", "file_path": "riddle/examples/wgpu-renderer-overlay/main.rs", "rank": 12, "score": 117128.69014661509 }, { "content": "fn main() -> Result<(), RiddleError> {\n\n\tlet rdl = RiddleLib::new()?;\n\n\tlet mut state = DemoState::new(&rdl)?;\n\n\n\n\trdl.run(move |rdl| match rdl.event() {\n\n\t\tEvent::Platform(PlatformEvent::WindowClose(_)) => rdl.quit(),\n\n\t\tEvent::Input(InputEvent::MouseButtonDown {\n\n\t\t\tbutton: MouseButton::Left,\n\n\t\t\t..\n\n\t\t}) => state.on_mouse_down().unwrap(),\n\n\t\tEvent::Input(InputEvent::MouseButtonDown {\n\n\t\t\tbutton: MouseButton::Right,\n\n\t\t\t..\n\n\t\t}) => {\n\n\t\t\trdl.time()\n\n\t\t\t\t.register_timer(std::time::Duration::from_secs(2), || {\n\n\t\t\t\t\tprintln!(\"Timer Done\");\n\n\t\t\t\t});\n\n\t\t}\n\n\t\tEvent::Input(InputEvent::KeyDown {\n", "file_path": "riddle/examples/sandbox/main.rs", "rank": 13, "score": 112722.77537953862 }, { "content": "fn main() -> Result<(), RiddleError> {\n\n\tlet rdl = RiddleLib::new()?;\n\n\tlet mut pong = Pong::new(&rdl)?;\n\n\n\n\trdl.run(move |rdl| match rdl.event() {\n\n\t\tEvent::Platform(PlatformEvent::WindowClose(_)) => {\n\n\t\t\trdl.quit();\n\n\t\t}\n\n\t\tEvent::ProcessFrame => {\n\n\t\t\tpong.update().unwrap();\n\n\t\t\tpong.render().unwrap();\n\n\t\t}\n\n\t\t_ => (),\n\n\t});\n\n}\n\n\n", "file_path": "riddle/examples/pong/main.rs", "rank": 14, "score": 112722.77537953862 }, { "content": "pub trait ImageImageExt {\n\n\tfn image_from_dynimage(img: DynamicImage) -> Self;\n\n\tfn image_rgbaimage(&self) -> &::image::RgbaImage;\n\n}\n", "file_path": "riddle-image/src/image_ext/image.rs", "rank": 15, "score": 109927.27310797921 }, { "content": "struct ImageViewDetails {\n\n\tstart_offset: usize,\n\n\tend_offset: usize,\n\n\tstride: usize,\n\n\trow_len: usize,\n\n}\n\n\n\npub(crate) struct ImageView<'a> {\n\n\timage: &'a Image,\n\n\tdetails: ImageViewDetails,\n\n}\n\n\n\npub(crate) struct ImageViewMut<'a> {\n\n\timage: &'a mut Image,\n\n\tdetails: ImageViewDetails,\n\n}\n\n\n\nimpl ImageViewDetails {\n\n\tfn new(image: &Image, bounds: Rect<u32>) -> Self {\n\n\t\tif let Some(bounds) = image.rect().intersect(&bounds) {\n", "file_path": "riddle-image/src/imageview.rs", "rank": 16, "score": 109306.27252533876 }, { "content": "fn main() -> Result<(), FontError> {\n\n\tlet font_bytes = include_bytes!(\"../../../example_assets/Roboto-Regular.ttf\");\n\n\tlet ttf_font = TtFont::load(&font_bytes[..])?;\n\n\tprintln!(\"+ TTF Loaded...\");\n\n\n\n\tlet img_font = ImgFontGenerator::new(CHAR_SET, 32).generate(&ttf_font)?;\n\n\tprintln!(\"+ ImgFont Generated...\");\n\n\n\n\tlet mut out_path = env::temp_dir();\n\n\tout_path.push(FONT_OUT_FILE);\n\n\tlet out_file = File::create(out_path.clone())?;\n\n\timg_font.image().save(out_file, ImageFormat::Png)?;\n\n\tprintln!(\"+ ImgFont Image Saved ({:?})...\", out_path);\n\n\n\n\tlet rendered_img = img_font.render_simple(TEST_STRING)?;\n\n\tprintln!(\"+ ImgFont String Rendered...\");\n\n\n\n\tlet mut out_path = env::temp_dir();\n\n\tout_path.push(IMG_OUT_FILE);\n\n\tlet out_file = File::create(out_path.clone())?;\n\n\trendered_img.save(out_file, ImageFormat::Png)?;\n\n\tprintln!(\"+ Output Image Saved ({:?})...\", out_path);\n\n\n\n\tOk(())\n\n}\n", "file_path": "riddle-font/examples/font-imgfont-rendersimple/main.rs", "rank": 17, "score": 107707.41516170945 }, { "content": "fn main() -> Result<(), FontError> {\n\n\tlet font_bytes = include_bytes!(\"../../../example_assets/Roboto-Regular.ttf\");\n\n\tlet ttf_font = TtFont::load(&font_bytes[..])?;\n\n\tprintln!(\"+ TTF Loaded...\");\n\n\n\n\tlet img_font = ImgFontGenerator::new(CHAR_SET, 32).generate(&ttf_font)?;\n\n\tprintln!(\"+ ImgFont Generated...\");\n\n\n\n\tlet mut out_path = env::temp_dir();\n\n\tout_path.push(IMG_OUT_FILE);\n\n\tlet out_file = File::create(out_path.clone())?;\n\n\timg_font.image().save(out_file, ImageFormat::Png)?;\n\n\tprintln!(\"+ ImgFont Image Saved ({:?})...\", out_path);\n\n\n\n\tprintln!(\"+ Glyph Data \");\n\n\tprintln!(\"+ +++++++++++++++++++++++++++++\");\n\n\tfor (character, glyph) in img_font.glyphs() {\n\n\t\tprintln!(\"+ '{}': {:?}\", character, glyph);\n\n\t}\n\n\tprintln!(\"+ +++++++++++++++++++++++++++++\");\n\n\n\n\tprintln!(\"+ Done.\");\n\n\tOk(())\n\n}\n", "file_path": "riddle-font/examples/font-imgfont-generator/main.rs", "rank": 18, "score": 107707.41516170945 }, { "content": "/// The context provided to render callbacks\n\npub trait RenderContext<R: CommonRenderer> {\n\n\t/// Replace the current world transform.\n\n\tfn set_transform(&mut self, transform: mint::ColumnMatrix4<f32>) -> Result<(), R::Error>;\n\n\n\n\t/// Fill the target with a flat color.\n\n\tfn clear(&mut self, color: Color<f32>) -> Result<(), R::Error>;\n\n\n\n\t/// Draw a `Renderable` to the target with the current world transform.\n\n\tfn draw(&mut self, renderable: &Renderable<'_, R>) -> Result<(), R::Error>;\n\n\n\n\t/// Draw a solid rect with the given color.\n\n\tfn fill_rect(&mut self, rect: &Rect<f32>, color: Color<f32>) -> Result<(), R::Error>;\n\n\n\n\t/// Consume the context and present any outstanding draw calls.\n\n\tfn present(self) -> Result<(), R::Error>;\n\n}\n\n\n\npub struct Renderable<'a, R: CommonRenderer> {\n\n\tpub texture: R::Texture,\n\n\tpub shader: R::Shader,\n\n\tpub verts: &'a [Vertex],\n\n\tpub indices: &'a [u16],\n\n}\n", "file_path": "riddle-renderer-common/src/renderer.rs", "rank": 19, "score": 103945.8228413684 }, { "content": "/// Define the mapping between two ColorElement types.\n\n///\n\n/// Implemented by both individual color channel types, and compound types like\n\n/// [`Color`].\n\npub trait ColorElementConversion<T> {\n\n\t/// Given a value that implements this trait, produce an equivalent color\n\n\t/// element of the destination type.\n\n\t///\n\n\t/// # Example\n\n\t///\n\n\t/// ```\n\n\t/// # use riddle_common::*;\n\n\t/// // Convert a float color channel value in to a byte color channel value.\n\n\t/// let byte_val: u8 = 255;\n\n\t/// let float_val: f32 = 1.0;\n\n\t/// assert_eq!(byte_val, float_val.convert());\n\n\t/// ```\n\n\tfn convert(&self) -> T;\n\n}\n\n\n\nimpl ColorElementConversion<f32> for u8 {\n\n\t#[inline]\n\n\tfn convert(&self) -> f32 {\n\n\t\t(*self as f32) / 255.0\n", "file_path": "riddle-common/src/color.rs", "rank": 20, "score": 100813.72906277841 }, { "content": "/// A type that represents a single channel in a color value.\n\npub trait ColorElement: Copy + Default {\n\n\tconst ZERO: Self;\n\n\tconst SATURATED: Self;\n\n}\n\n\n\n/// A u8 represents a color channel value in the range 0-255\n\nimpl ColorElement for u8 {\n\n\tconst ZERO: u8 = 0;\n\n\tconst SATURATED: u8 = 255;\n\n}\n\n\n\n/// An f32 represents a color channel value in the range 0.0 - 1.0\n\nimpl ColorElement for f32 {\n\n\tconst ZERO: f32 = 0.0;\n\n\tconst SATURATED: f32 = 1.0;\n\n}\n\n\n\n/////////////////////////////////////////////////////////////////////////////\n\n// trait ColorElementConversion\n\n/////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "riddle-common/src/color.rs", "rank": 21, "score": 99062.07545239272 }, { "content": "fn get_grid_point(\n\n\tgrid: &[Vector2<i64>],\n\n\tbounds: &Rect<i64>,\n\n\tlocation: Vector2<i64>,\n\n) -> Vector2<i64> {\n\n\tif bounds.contains_point(location) {\n\n\t\tgrid[((location.y * bounds.dimensions.x) + location.x) as usize]\n\n\t} else {\n\n\t\tGRID_INF\n\n\t}\n\n}\n\n\n", "file_path": "riddle-image/src/filters/distance_field.rs", "rank": 22, "score": 95633.90288483075 }, { "content": "fn set_grid_point(\n\n\tgrid: &mut Vec<Vector2<i64>>,\n\n\tbounds: &Rect<i64>,\n\n\tlocation: Vector2<i64>,\n\n\tvalue: Vector2<i64>,\n\n) {\n\n\tgrid[((location.y * bounds.dimensions.x) + location.x) as usize] = value;\n\n}\n\n\n", "file_path": "riddle-image/src/filters/distance_field.rs", "rank": 23, "score": 95633.90288483075 }, { "content": "/// Sprites are conceptually both a reference to an image, and the sub region of the image\n\n/// which represents the logical sprite.\n\npub trait CommonSprite<R: CommonRenderer>: Sized + Clone {\n\n\t/// Construct a new sprite from an image. The image contents are copied to a texture\n\n\t/// in RGBA8 format. The entire image will be used\n\n\tfn new_from_image(\n\n\t\trenderer: &R,\n\n\t\timg: &Image,\n\n\t\tinit_args: &SpriteInitArgs,\n\n\t) -> Result<Self, R::Error>;\n\n\n\n\t/// Build a sprite that shares the same underlying texture but represents a different portion\n\n\t/// of the texture.\n\n\t///\n\n\t/// # Arguments\n\n\t///\n\n\t/// * **source_rect** - The portion of the texture that the new sprite will render, relative to\n\n\t/// the current sprite's bounds. The bounds of the output sprite will be\n\n\t/// the intersection of the sprite's rect and the source_rect, so the dimensions\n\n\t/// of the output sprite may not match the `source_rect` dimensions.\n\n\t///\n\n\t/// # Example\n", "file_path": "riddle-renderer-common/src/sprite.rs", "rank": 24, "score": 95481.62163613772 }, { "content": "pub fn pump_for_secs(time_system: &TimeSystem, secs: u64) {\n\n\tlet start_time = Instant::now();\n\n\twhile Instant::now() - start_time < Duration::from_secs(secs) {\n\n\t\tstd::thread::sleep(std::time::Duration::from_millis(100));\n\n\t\ttime_system.process_frame();\n\n\t}\n\n}\n", "file_path": "riddle-time/src/doctest.rs", "rank": 25, "score": 95017.58086942494 }, { "content": "pub fn pump_for_secs(audio_system: &AudioSystem, secs: u64) {\n\n\tlet start_time = Instant::now();\n\n\twhile Instant::now() - start_time < Duration::from_secs(secs) {\n\n\t\taudio_system.process_frame();\n\n\t}\n\n}\n", "file_path": "riddle-audio/src/doctest.rs", "rank": 26, "score": 95017.58086942494 }, { "content": "#[inline]\n\npub fn vec2<T>(x: T, y: T) -> Vector2<T> {\n\n\tVector2::new(x, y)\n\n}\n\n\n\nimpl<T> Vector2<T> {\n\n\t/// Create a vector with the given coordinatates.\n\n\t#[inline]\n\n\tpub fn new(x: T, y: T) -> Self {\n\n\t\tSelf { x, y }\n\n\t}\n\n}\n\n\n\nimpl<T: SpacialNumeric> Vector2<T> {\n\n\t/// Calculate the square of the magnitude of the vector\n\n\t///\n\n\t/// # Example\n\n\t///\n\n\t/// ```\n\n\t/// # use riddle_math::*;\n\n\t/// let v = Vector2::new(2, 2);\n", "file_path": "riddle-math/src/vector.rs", "rank": 27, "score": 92428.72163783989 }, { "content": "/// Types which as well as being defined as SpacialNumeric, may be negated.\n\npub trait SignedSpacialNumeric: SpacialNumeric + std::ops::Neg<Output = Self> {}\n\n\n", "file_path": "riddle-math/src/spacial_numeric.rs", "rank": 28, "score": 87918.44800743938 }, { "content": "fn do_pass<YIter: Iterator<Item = i64>, XIter: Iterator<Item = i64> + Clone>(\n\n\tgrid: &mut Vec<Vector2<i64>>,\n\n\tbounds: &Rect<i64>,\n\n\ty_range: YIter,\n\n\tx_range: XIter,\n\n\tneighbours: &[Vector2<i64>],\n\n) {\n\n\tfor y in y_range {\n\n\t\tfor x in x_range.clone() {\n\n\t\t\tlet location = Vector2::new(x, y);\n\n\t\t\tlet new_val = neighbours\n\n\t\t\t\t.iter()\n\n\t\t\t\t.map(|offset| {\n\n\t\t\t\t\tlet other = get_grid_point(grid, bounds, location + *offset);\n\n\t\t\t\t\tother + *offset\n\n\t\t\t\t})\n\n\t\t\t\t.min_by_key(|p| p.magnitude_squared())\n\n\t\t\t\t.unwrap();\n\n\t\t\tset_grid_point(grid, bounds, location, new_val);\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "riddle-image/src/filters/distance_field.rs", "rank": 29, "score": 70098.16145849718 }, { "content": "struct Timer {\n\n\ttime_remaining: std::time::Duration,\n\n\tcallback: Option<Box<dyn FnOnce() + Send>>,\n\n\tshared_state: Arc<SharedTimerState>,\n\n}\n\n\n\nimpl Timer {\n\n\tpub fn new(duration: std::time::Duration, callback: Box<dyn FnOnce() + Send>) -> Self {\n\n\t\tSelf {\n\n\t\t\ttime_remaining: duration,\n\n\t\t\tcallback: Some(callback),\n\n\t\t\tshared_state: Arc::new(SharedTimerState::default()),\n\n\t\t}\n\n\t}\n\n\n\n\tpub fn pending(&self) -> bool {\n\n\t\t!self.shared_state.cancelled() && self.time_remaining > std::time::Duration::from_secs(0)\n\n\t}\n\n\n\n\tfn update(&mut self, dt: std::time::Duration) {\n", "file_path": "riddle-time/src/timer.rs", "rank": 30, "score": 64721.42114156721 }, { "content": "struct Pong {\n\n\twindow: Window,\n\n\trenderer: Renderer<WindowWgpuDevice>,\n\n\tstate: RiddleState,\n\n\n\n\tclip: audio::Clip,\n\n\t_clip_player: Option<audio::ClipPlayer>,\n\n\n\n\tleft_paddle: Rect<f32>,\n\n\tright_paddle: Rect<f32>,\n\n\n\n\tball: Rect<f32>,\n\n\tvelocity: Vector2<f32>,\n\n}\n\n\n\nimpl Pong {\n\n\tfn new(rdl: &RiddleLib) -> Result<Self, RiddleError> {\n\n\t\tlet window = WindowBuilder::new()\n\n\t\t\t.title(\"Riddle Pong\")\n\n\t\t\t.dimensions(800, 600)\n", "file_path": "riddle/examples/pong/main.rs", "rank": 31, "score": 64721.42114156721 }, { "content": "struct DemoState {\n\n\tstate: RiddleState,\n\n\n\n\tmouse_location: Arc<Mutex<input::LogicalPosition>>,\n\n\n\n\tclip: audio::Clip,\n\n\tmusic_player: audio::ClipPlayer,\n\n\t_blip_player: Option<audio::ClipPlayer>,\n\n}\n\n\n\nimpl DemoState {\n\n\tfn new(rdl: &RiddleLib) -> Result<Self, RiddleError> {\n\n\t\tlet window = WindowBuilder::new().build(rdl.context())?;\n\n\n\n\t\tlet renderer = renderer::Renderer::new_from_window(&window)?;\n\n\n\n\t\tlet img = {\n\n\t\t\tlet img_bytes = include_bytes!(\"../../../example_assets/image.png\");\n\n\t\t\timage::Image::load(&img_bytes[..], image::ImageFormat::Png)?\n\n\t\t};\n", "file_path": "riddle/examples/sandbox/main.rs", "rank": 32, "score": 63702.70373637204 }, { "content": "struct RendererState {\n\n\trenderer: AppRenderer,\n\n\tsprite: <AppRenderer as CommonRenderer>::Sprite,\n\n\tsubsprite: <AppRenderer as CommonRenderer>::Sprite,\n\n\tlabel_sprite: <AppRenderer as CommonRenderer>::Sprite,\n\n\tsprite_font: <AppRenderer as CommonRenderer>::SpriteFont,\n\n\n\n\ttarget: renderer::SpriteRenderTarget<WindowWgpuDevice>,\n\n\n\n\tmouse_location: Arc<Mutex<input::LogicalPosition>>,\n\n\tprev_frame_time: std::time::Instant,\n\n}\n\n\n\nimpl RendererState {\n\n\tfn run(&mut self) {\n\n\t\tloop {\n\n\t\t\tself.render_frame().unwrap();\n\n\t\t}\n\n\t}\n\n\n", "file_path": "riddle/examples/sandbox/main.rs", "rank": 33, "score": 63702.70373637204 }, { "content": "#[derive(Debug)]\n\nstruct SharedTimerState {\n\n\tpending: AtomicBool,\n\n\tcancelled: AtomicBool,\n\n}\n\n\n\nimpl SharedTimerState {\n\n\tfn cancelled(&self) -> bool {\n\n\t\tself.cancelled.load(Ordering::Relaxed)\n\n\t}\n\n\n\n\tfn pending(&self) -> bool {\n\n\t\tself.pending.load(Ordering::Relaxed)\n\n\t}\n\n}\n\n\n\nimpl Default for SharedTimerState {\n\n\tfn default() -> Self {\n\n\t\tSelf {\n\n\t\t\tpending: AtomicBool::new(true),\n\n\t\t\tcancelled: AtomicBool::new(false),\n", "file_path": "riddle-time/src/timer.rs", "rank": 34, "score": 62739.488672163956 }, { "content": "struct FrameTime {\n\n\tframe_instant: instant::Instant,\n\n\tframe_delta: instant::Duration,\n\n\tfps: f32,\n\n}\n\n\n\nimpl FrameTime {\n\n\tfn new() -> Self {\n\n\t\tSelf {\n\n\t\t\tframe_instant: instant::Instant::now(),\n\n\t\t\tframe_delta: Default::default(),\n\n\t\t\tfps: 0.0,\n\n\t\t}\n\n\t}\n\n\n\n\tfn update(&mut self) {\n\n\t\tlet now = instant::Instant::now();\n\n\t\tself.frame_delta = now.duration_since(self.frame_instant);\n\n\t\tself.fps = 1.0 / self.frame_delta.as_secs_f32().max(0.0001);\n\n\t\tself.frame_instant = now;\n\n\t}\n\n}\n", "file_path": "riddle-time/src/time_system.rs", "rank": 35, "score": 62734.82772436282 }, { "content": "struct WindowInputState {\n\n\tmouse: MouseState,\n\n\tkeyboard: KeyboardState,\n\n}\n\n\n\n/// The Riddle input system core state, along with [`InputMainThreadState`].\n\n///\n\n/// This stores the thread safe input state which can be queried to inspect the\n\n/// status of input devices. It is updated by [`InputMainThreadState::process_input`].\n\n#[derive(Clone)]\n\npub struct InputSystem {\n\n\tinternal: std::sync::Arc<InputSystemInternal>,\n\n}\n\n\n\npub struct InputSystemInternal {\n\n\twindow_states: Mutex<HashMap<WindowId, WindowInputState>>,\n\n\tgamepad_states: Mutex<GamePadStateMap>,\n\n\toutgoing_input_events: Mutex<Vec<InputEvent>>,\n\n}\n\n\n", "file_path": "riddle-input/src/input_system.rs", "rank": 36, "score": 61814.07973574202 }, { "content": "struct WGPURendererDemo {\n\n\t_window: Window,\n\n\n\n\tcustom_renderer: CustomRendererHandle,\n\n\trdl_renderer: Renderer<CustomRendererHandle>,\n\n\n\n\tlabel_sprite: Sprite<CustomRendererHandle>,\n\n\n\n\tpoint_cloud: Vec<Vertex>,\n\n\trotation: f32,\n\n}\n\n\n\nimpl WGPURendererDemo {\n\n\tfn new(rdl: &RiddleLib) -> Result<Self> {\n\n\t\tlet window = WindowBuilder::new()\n\n\t\t\t.title(\"Riddle WGPU Custom Renderer Overlay\")\n\n\t\t\t.dimensions(800, 600)\n\n\t\t\t.resizeable(false)\n\n\t\t\t.build(&rdl.context())?;\n\n\n", "file_path": "riddle/examples/wgpu-renderer-overlay/main.rs", "rank": 37, "score": 60937.09943019344 }, { "content": "#[derive(Clone)]\n\nstruct CustomRendererHandle {\n\n\twindow: WindowId,\n\n\trenderer: std::rc::Rc<std::cell::RefCell<CustomRenderer>>,\n\n}\n\n\n\nimpl WgpuDevice for CustomRendererHandle {\n\n\tfn begin_frame(&self) -> Result<(), WgpuRendererError> {\n\n\t\tself.renderer.borrow_mut().commit();\n\n\t\tOk(())\n\n\t}\n\n\n\n\tfn end_frame(&self) {\n\n\t\tself.renderer.borrow_mut().commit();\n\n\t}\n\n\n\n\tfn viewport_dimensions(&self) -> Vector2<f32> {\n\n\t\tvec2(800.0, 600.0)\n\n\t}\n\n\n\n\tfn with_device_info<R, F>(&self, f: F) -> Result<R, WgpuRendererError>\n", "file_path": "riddle/examples/wgpu-renderer-overlay/main.rs", "rank": 38, "score": 60937.09943019344 }, { "content": "fn convert_winit_window_event(\n\n\twindow: &Window,\n\n\tevent: winit::event::WindowEvent,\n\n) -> Option<PlatformEvent> {\n\n\tmatch event {\n\n\t\twinit::event::WindowEvent::CloseRequested => Some(PlatformEvent::WindowClose(window.id())),\n\n\t\twinit::event::WindowEvent::Resized(_) => Some(PlatformEvent::WindowResize(window.id())),\n\n\t\twinit::event::WindowEvent::CursorMoved { position, .. } => {\n\n\t\t\tSome(PlatformEvent::CursorMove {\n\n\t\t\t\twindow: window.id(),\n\n\t\t\t\tposition: dimensions::logical_pos_from_winit(\n\n\t\t\t\t\tposition.to_logical(window.scale_factor()),\n\n\t\t\t\t),\n\n\t\t\t})\n\n\t\t}\n\n\t\twinit::event::WindowEvent::MouseInput { state, button, .. } => match state {\n\n\t\t\twinit::event::ElementState::Pressed => Some(PlatformEvent::MouseButtonDown {\n\n\t\t\t\twindow: window.id(),\n\n\t\t\t\tbutton: winit_mousebutton_to_mousebutton(button),\n\n\t\t\t}),\n", "file_path": "riddle-platform-winit/src/event.rs", "rank": 39, "score": 60225.94622074914 }, { "content": "struct EventQueue<T> {\n\n\tqueue: Mutex<Vec<T>>,\n\n\tfilter: Box<dyn Fn(&T) -> bool + Sync + Send>,\n\n}\n\n\n\nimpl<T> EventQueue<T> {\n\n\tfn new() -> Self {\n\n\t\tSelf::new_with_filter(|_| true)\n\n\t}\n\n\n\n\tfn new_with_filter<F>(filter: F) -> Self\n\n\twhere\n\n\t\tF: Fn(&T) -> bool + Send + Sync + 'static,\n\n\t{\n\n\t\tSelf {\n\n\t\t\tqueue: Mutex::new(vec![]),\n\n\t\t\tfilter: Box::new(filter),\n\n\t\t}\n\n\t}\n\n\n", "file_path": "riddle-common/src/eventpub.rs", "rank": 40, "score": 59922.920987681544 }, { "content": "/// A [`Renderer`] compatible WGPU device.\n\n///\n\n/// A default implementation exists for `riddle_platform_winit::Window`\n\n/// in [`WindowWgpuDevice`].\n\n///\n\n/// The application may implement this trait to layer the renderer on\n\n/// top of custom WGPU renderer.\n\n///\n\n/// # Example\n\n///\n\n/// ```no_run\n\n/// use std::sync::Arc;\n\n/// use riddle::{common::Color, platform::{common::WindowId, *}, renderer::*, *};\n\n///\n\n/// #[derive(Clone)]\n\n/// struct ACustomRendererHandle {\n\n/// // [..]\n\n/// }\n\n///\n\n/// impl ACustomRendererHandle {\n\n/// // [..]\n\n/// # fn new() -> Self { todo!() }\n\n/// # fn start_render(&self) { todo!() }\n\n/// # fn end_render(&self) { todo!() }\n\n/// # fn render_3d_scene(&self) { todo!() }\n\n/// }\n\n///\n\n/// impl WgpuDevice for ACustomRendererHandle {\n\n/// // [..]\n\n/// # fn begin_frame(&self) -> Result<(), WgpuRendererError> { todo!() }\n\n/// # fn end_frame(&self) { todo!() }\n\n/// # fn viewport_dimensions(&self) -> math::Vector2<f32> { todo!() }\n\n/// # fn with_device_info<R, F: FnOnce(&WgpuDeviceInfo) -> Result<R, WgpuRendererError>>(&self, f: F) -> Result<R, WgpuRendererError> { todo!() }\n\n/// # fn with_frame<R, F: FnOnce(&wgpu::SwapChainFrame) -> Result<R, WgpuRendererError>>(&self, f: F) -> Result<R, WgpuRendererError> { todo!() }\n\n/// # fn window_id(&self) -> WindowId { todo!() }\n\n/// }\n\n///\n\n/// fn main() -> Result<(), RiddleError> {\n\n/// let rdl = RiddleLib::new()?;\n\n/// let window = WindowBuilder::new().build(rdl.context())?;\n\n///\n\n/// let custom_renderer = ACustomRendererHandle::new();\n\n///\n\n/// let renderer = Renderer::new_from_device(custom_renderer.clone())?;\n\n///\n\n/// rdl.run(move |rdl| match rdl.event() {\n\n/// Event::Platform(PlatformEvent::WindowClose(_)) => rdl.quit(),\n\n/// Event::ProcessFrame => {\n\n/// custom_renderer.start_render();\n\n/// custom_renderer.render_3d_scene();\n\n///\n\n/// renderer.render(|render_ctx| {\n\n/// render_ctx.clear(Color::RED)\n\n/// }).unwrap();\n\n///\n\n/// custom_renderer.end_render();\n\n/// }\n\n/// _ => (),\n\n/// })\n\n/// }\n\n/// ```\n\npub trait WgpuDevice {\n\n\t/// Called when the [`Renderer`] begins rendering to the swap chain frame.\n\n\t///\n\n\t/// Invoked through [`Renderer::render`]\n\n\tfn begin_frame(&self) -> Result<()>;\n\n\n\n\t/// When the renderer is done renderering to the swap chain frame.\n\n\t///\n\n\t/// Invoked by a [`RenderContext::present`] call on the context returned from\n\n\t/// [`Renderer::render`].\n\n\tfn end_frame(&self);\n\n\n\n\t/// The viewport dimensions of the swapchain frame.\n\n\t///\n\n\t/// This controls the projection matrix used by the sprite renderer.\n\n\tfn viewport_dimensions(&self) -> Vector2<f32>;\n\n\n\n\t/// Provides a reference to the set of wgpu device state for use by the renderer.\n\n\tfn with_device_info<R, F: FnOnce(&WgpuDeviceInfo) -> Result<R>>(&self, f: F) -> Result<R>;\n\n\n", "file_path": "riddle-renderer-wgpu/src/device.rs", "rank": 41, "score": 57955.20143855087 }, { "content": "/// Numeric types over which [`crate::Rect`] and [`crate::Vector2`] are defined\n\n///\n\n/// Types which implement this have basic operations and comparisons defined\n\n/// that can work for all signed, unsigned numbers, integer, and floating point\n\n/// numbers.\n\npub trait SpacialNumeric:\n\n\tstd::cmp::PartialOrd\n\n\t+ Copy\n\n\t+ Clone\n\n\t+ std::fmt::Debug\n\n\t+ std::ops::Add<Self, Output = Self>\n\n\t+ std::ops::Sub<Self, Output = Self>\n\n\t+ std::ops::Mul<Self, Output = Self>\n\n\t+ std::ops::Div<Self, Output = Self>\n\n\t+ std::default::Default\n\n{\n\n}\n\n\n", "file_path": "riddle-math/src/spacial_numeric.rs", "rank": 42, "score": 57929.412628758924 }, { "content": "/// Functionality Window types need in order to be able to interact with platform common.\n\npub trait WindowCommon {\n\n\t/// Convert a logical vec2 in to a physical pixel unit (x,y) pair\n\n\tfn logical_to_physical<L: Into<LogicalVec2>>(&self, vec2: L) -> (u32, u32);\n\n}\n", "file_path": "riddle-platform-common/src/traits/window.rs", "rank": 43, "score": 57051.07245306576 }, { "content": "pub trait TimeSystemExt {\n\n\t/// Create a new time system. The time the system is created is used as the time\n\n\t/// of the 0th frame.\n\n\tfn new() -> Self;\n\n\n\n\t/// Update the time system state, marking the beginning of a the next frame.\n\n\t///\n\n\t/// The instant that this method is called is taken as the reference time for\n\n\t/// the frame that is about to be executed.\n\n\t///\n\n\t/// Timers will also be triggered during this function call if they are due\n\n\t/// to trigger.\n\n\t///\n\n\t/// **Do not** call this function directly if you are using this through the\n\n\t/// `riddle` crate.\n\n\t///\n\n\t/// # Example\n\n\t///\n\n\t/// ```\n\n\t/// # use riddle_time::*; doctest::simple(|time_system| {\n", "file_path": "riddle-time/src/ext/time_system.rs", "rank": 44, "score": 56217.876009593885 }, { "content": "pub trait InputSystemExt {\n\n\t/// Create the input system, initializing any input device libraries needed.\n\n\t///\n\n\t/// This will be instantiated automatically if `riddle` is being used.\n\n\t///\n\n\t/// Returns a pair of objects, one of which is the system state which is like\n\n\t/// most other riddle system states - it is thread safe, stored in `RiddleState`\n\n\t/// and is the main means by which client code interacts with the system.\n\n\t///\n\n\t/// The other return value stores the portion of this object's state that should\n\n\t/// stay on the main thread - [`InputMainThreadState`].\n\n\tfn new_system_pair(\n\n\t\tsys_events: &EventPub<PlatformEvent>,\n\n\t) -> Result<(InputSystem, InputMainThreadState)>;\n\n\n\n\t/// Collect any buffered [`InputEvent`]s emitted by the input system.\n\n\t///\n\n\t/// This clears the system's buffer, so should only be called from a single location.\n\n\t///\n\n\t/// **Do not** call this if you are using `riddle`. `riddle` manages taking these\n", "file_path": "riddle-input/src/ext/input_system.rs", "rank": 45, "score": 56217.876009593885 }, { "content": "pub trait AudioSystemExt {\n\n\t/// Create the audio system, connected to the default audio output device.\n\n\tfn new_system_pair() -> Result<(AudioSystem, AudioMainThreadState)>;\n\n\n\n\t/// Update the system's state.\n\n\t///\n\n\t/// Updates all [`ClipPlayer`] fades. This must be called periodically for the [`AudioSystem`]\n\n\t/// to function. **Do not** call this if the `riddle` crate is being used.\n\n\t///\n\n\t/// # Example\n\n\t/// ```no_run\n\n\t/// # use riddle_audio::{ext::*, *}; fn main() -> Result<(), AudioError> {\n\n\t/// let (audio_system, _audio_main_thread_state) = AudioSystem::new_system_pair()?;\n\n\t///\n\n\t/// // Tick the audio system every 100ms\n\n\t/// let start_time = std::time::Instant::now();\n\n\t/// while std::time::Instant::now() - start_time < std::time::Duration::from_secs(2) {\n\n\t/// audio_system.process_frame();\n\n\t/// std::thread::sleep(std::time::Duration::from_millis(100));\n\n\t/// }\n\n\t/// # Ok(()) }\n\n\t/// ```\n\n\tfn process_frame(&self);\n\n}\n", "file_path": "riddle-audio/src/ext/audio_system.rs", "rank": 46, "score": 56217.876009593885 }, { "content": "pub trait WinitWindowExt {\n\n\tfn winit_window(&self) -> &winit::window::Window;\n\n}\n", "file_path": "riddle-platform-winit/src/winit_ext/window.rs", "rank": 47, "score": 55422.488260107006 }, { "content": "pub trait PlatformSystemExt {\n\n\t/// Create a new platform system and its main thread state pair.\n\n\t///\n\n\t/// **Do not** call if using the `riddle` crate as recommended, as ` RiddleLib` manages\n\n\t/// the creation and platform lifetime automatically.\n\n\t///\n\n\t/// # Example\n\n\t///\n\n\t/// This example is only relevant if you're not using `riddle`\n\n\t///\n\n\t/// ```no_run\n\n\t/// # use riddle_platform_winit::{ext::*, *};\n\n\t/// # fn main() -> Result<(), PlatformError> {\n\n\t/// let (platform_system, main_thread_state) = PlatformSystem::new_system_pair();\n\n\t/// let window = WindowBuilder::new().build(main_thread_state.borrow_context())?;\n\n\t///\n\n\t/// main_thread_state.run::<PlatformError, _>(move |ctx| {\n\n\t/// match ctx.event() {\n\n\t/// PlatformEvent::WindowClose(_) => { ctx.quit(); }\n\n\t/// _ => ()\n\n\t/// };\n\n\t/// Ok(())\n\n\t/// })\n\n\t/// # }\n\n\t/// ```\n\n\tfn new_system_pair() -> (PlatformSystem, PlatformMainThreadState);\n\n}\n", "file_path": "riddle-platform-winit/src/ext/platform_system.rs", "rank": 48, "score": 55422.488260107006 }, { "content": "/// The root object of a renderer implementation, associated with a single display.\n\npub trait CommonRenderer: Sized {\n\n\ttype RenderContext: RenderContext<Self>;\n\n\ttype Sprite: CommonSprite<Self>;\n\n\ttype Texture;\n\n\ttype Shader;\n\n\ttype SpriteFont;\n\n\ttype Error: std::error::Error + std::convert::From<ImageError>;\n\n\n\n\tfn dimensions(&self) -> Vector2<f32>;\n\n\tfn window_id(&self) -> WindowId;\n\n\n\n\tfn render<R, F>(&self, f: F) -> Result<R, Self::Error>\n\n\twhere\n\n\t\tF: FnOnce(&mut Self::RenderContext) -> Result<R, Self::Error>;\n\n}\n\n\n", "file_path": "riddle-renderer-common/src/renderer.rs", "rank": 49, "score": 54905.02844632092 }, { "content": "pub trait RustTypeTtFontExt {\n\n\tfn rustype_font(&self) -> &rusttype::Font<'static>;\n\n}\n", "file_path": "riddle-font/src/rusttype_ext/ttfont.rs", "rank": 50, "score": 54662.3927816241 }, { "content": "/// Define the conversion between two SpacialNumeric types.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use riddle_math::*;\n\n/// let a: u32 = 1;\n\n/// let b: f32 = a.convert();\n\n/// ```\n\npub trait SpacialNumericConversion<T> {\n\n\t/// Convert one SpacialNumeric value to another. This conversion can not fail.\n\n\tfn convert(self) -> T;\n\n}\n\n\n\nimpl<T: SpacialNumeric> SpacialNumericConversion<T> for T {\n\n\t#[inline]\n\n\tfn convert(self) -> T {\n\n\t\tself\n\n\t}\n\n}\n\n\n\nmacro_rules! define_spacial_numeric {\n\n (Conv, $f:ty, As($t:ty)) => {\n\n impl SpacialNumericConversion<$t> for $f {\n\n #[inline]\n\n fn convert(self) -> $t {\n\n self as $t\n\n }\n\n }\n", "file_path": "riddle-math/src/spacial_numeric.rs", "rank": 51, "score": 54084.55845646817 }, { "content": "use ::image::DynamicImage;\n\n\n", "file_path": "riddle-image/src/image_ext/image.rs", "rank": 52, "score": 51450.958037674565 }, { "content": "#[doc(hidden)]\n\npub trait WgpuRenderTargetDesc<Device: WgpuDevice> {\n\n\tfn begin_render(&self) -> Result<()>;\n\n\tfn end_render(&self);\n\n\tfn renderer(&self) -> &Renderer<Device>;\n\n\tfn dimensions(&self) -> Vector2<f32>;\n\n\tfn standard_resources(&self) -> &StandardResources;\n\n\tfn with_view<F: FnOnce(&wgpu::TextureView) -> Result<()>>(&self, f: F) -> Result<()>;\n\n}\n", "file_path": "riddle-renderer-wgpu/src/target.rs", "rank": 53, "score": 50038.39468054776 }, { "content": "fn filter_to_wgpu(filter: FilterMode) -> wgpu::FilterMode {\n\n\tmatch filter {\n\n\t\tFilterMode::Nearest => wgpu::FilterMode::Nearest,\n\n\t\tFilterMode::Linear => wgpu::FilterMode::Linear,\n\n\t}\n\n}\n\n\n\npub(crate) enum TextureType {\n\n\tPlain,\n\n\tRenderTarget,\n\n}\n\n\n\npub(crate) struct TextureInternal {\n\n\tpub texture: wgpu::Texture,\n\n\tpub sampler: wgpu::Sampler,\n\n\tpub dimensions: Vector2<u32>,\n\n}\n\n\n\nimpl TextureInternal {\n\n\tpub(crate) fn new(\n", "file_path": "riddle-renderer-wgpu/src/texture.rs", "rank": 58, "score": 48915.56085241398 }, { "content": "//! Traits which expose underlying image crate's types\n\n\n\nmod image;\n\n\n\npub use self::image::*;\n", "file_path": "riddle-image/src/image_ext/mod.rs", "rank": 79, "score": 47328.16130633909 }, { "content": "use riddle_image::*;\n\nuse std::env;\n\nuse std::fs::File;\n\n\n\nconst OUT_FILE: &str = \"image-distance-field.out.png\";\n\nconst FIELD_SCALE: f64 = 20.0;\n\n\n", "file_path": "riddle-image/examples/image-distance-field/main.rs", "rank": 80, "score": 46588.771887860545 }, { "content": "fn winit_mousebutton_to_mousebutton(button: winit::event::MouseButton) -> MouseButton {\n\n\tmatch button {\n\n\t\twinit::event::MouseButton::Left => MouseButton::Left,\n\n\t\twinit::event::MouseButton::Right => MouseButton::Right,\n\n\t\twinit::event::MouseButton::Middle => MouseButton::Middle,\n\n\t\twinit::event::MouseButton::Other(b) => MouseButton::Other(b as u32),\n\n\t}\n\n}\n", "file_path": "riddle-platform-winit/src/event.rs", "rank": 81, "score": 46298.662807912464 }, { "content": "\t\t}\n\n\t}\n\n}\n\n\n\nimpl Color<u8> {\n\n\t/// Convert the color in to an RGBA32.\n\n\t///\n\n\t/// Note that casting this value to a &[u8] will result in platform dependent component\n\n\t/// ordering.\n\n\t///\n\n\t///\n\n\t/// # Example\n\n\t///\n\n\t/// ```\n\n\t/// # use riddle_common::*;\n\n\t/// let c = Color::rgb(0, 255, 0);\n\n\t/// assert_eq!(0x00FF00FF, c.into_rgba8());\n\n\t/// ```\n\n\tpub fn into_rgba8(self) -> u32 {\n\n\t\t(self.r as u32) << 24 | (self.g as u32) << 16 | (self.b as u32) << 8 | (self.a as u32)\n", "file_path": "riddle-common/src/color.rs", "rank": 82, "score": 40872.34226745838 }, { "content": "\t\t*self\n\n\t}\n\n}\n\n\n\n/////////////////////////////////////////////////////////////////////////////\n\n// struct Color\n\n/////////////////////////////////////////////////////////////////////////////\n\n\n\n/// An RGBA color, parameterized over color channel type.\n\n///\n\n/// The two supported channel types are [`u8`] and [`f32`].\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use riddle_common::*;\n\n/// let c = Color{ r: 1.0, g: 0.0, b: 0.0, a: 1.0 };\n\n/// ```\n\n#[repr(C)]\n\n#[derive(Clone, Debug, Default)]\n", "file_path": "riddle-common/src/color.rs", "rank": 83, "score": 40865.22226526635 }, { "content": "\t}\n\n}\n\n\n\nimpl ColorElementConversion<u8> for u8 {\n\n\t#[inline]\n\n\tfn convert(&self) -> Self {\n\n\t\t*self\n\n\t}\n\n}\n\n\n\nimpl ColorElementConversion<u8> for f32 {\n\n\t#[inline]\n\n\tfn convert(&self) -> u8 {\n\n\t\t(self * 255.0) as u8\n\n\t}\n\n}\n\n\n\nimpl ColorElementConversion<f32> for f32 {\n\n\t#[inline]\n\n\tfn convert(&self) -> Self {\n", "file_path": "riddle-common/src/color.rs", "rank": 84, "score": 40864.640965132516 }, { "content": "\tpub fn rgba(r: E, g: E, b: E, a: E) -> Self {\n\n\t\tSelf { r, g, b, a }\n\n\t}\n\n\n\n\t/// Build an opaque color using rgb channels.\n\n\t///\n\n\t/// # Example\n\n\t///\n\n\t/// ```\n\n\t/// # use riddle_common::*;\n\n\t/// let c = Color::rgb(1.0, 0.0, 0.0);\n\n\t/// assert_eq!(Color::rgba(1.0, 0.0, 0.0, 1.0), c);\n\n\t/// ```\n\n\t#[inline]\n\n\tpub fn rgb(r: E, g: E, b: E) -> Self {\n\n\t\tSelf {\n\n\t\t\tr,\n\n\t\t\tg,\n\n\t\t\tb,\n\n\t\t\ta: E::SATURATED,\n", "file_path": "riddle-common/src/color.rs", "rank": 85, "score": 40862.88425358661 }, { "content": "pub struct Color<E: ColorElement> {\n\n\t/// Red\n\n\tpub r: E,\n\n\n\n\t/// Green\n\n\tpub g: E,\n\n\n\n\t/// Blue\n\n\tpub b: E,\n\n\n\n\t/// Alpha\n\n\tpub a: E,\n\n}\n\n\n\nimpl<E: ColorElement> Color<E> {\n\n\t/// Opaque primary red\n\n\tpub const RED: Self = Self {\n\n\t\tr: E::SATURATED,\n\n\t\tg: E::ZERO,\n\n\t\tb: E::ZERO,\n", "file_path": "riddle-common/src/color.rs", "rank": 86, "score": 40862.47056161472 }, { "content": "\t\ta: E::ZERO,\n\n\t};\n\n\n\n\t/// Transparent black\n\n\tpub const ZERO: Self = Self {\n\n\t\tr: E::ZERO,\n\n\t\tg: E::ZERO,\n\n\t\tb: E::ZERO,\n\n\t\ta: E::ZERO,\n\n\t};\n\n\n\n\t/// Build a color using all 4 channels.\n\n\t///\n\n\t/// # Example\n\n\t///\n\n\t/// ```\n\n\t/// # use riddle_common::*;\n\n\t/// let c = Color::rgba(1.0, 0.0, 0.0, 1.0);\n\n\t/// ```\n\n\t#[inline]\n", "file_path": "riddle-common/src/color.rs", "rank": 87, "score": 40861.489292772065 }, { "content": "\t}\n\n}\n\n\n\n/// Support converting colors between element types\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use riddle_common::*;\n\n/// let a: Color<f32> = Color::RED;\n\n/// let b: Color<u8> = Color::RED;\n\n/// let a_converted: Color<u8> = a.convert();\n\n/// assert_eq!(b, a_converted);\n\n/// ```\n\nimpl<T: ColorElement, F: ColorElementConversion<T> + ColorElement> ColorElementConversion<Color<T>>\n\n\tfor Color<F>\n\n{\n\n\t#[inline]\n\n\tfn convert(&self) -> Color<T> {\n\n\t\tColor::rgba(\n", "file_path": "riddle-common/src/color.rs", "rank": 88, "score": 40860.675697428895 }, { "content": "\t}\n\n}\n\n\n\nimpl<E: ColorElement> From<[E; 3]> for Color<E> {\n\n\t#[inline]\n\n\tfn from(c: [E; 3]) -> Self {\n\n\t\tSelf::rgb(c[0], c[1], c[2])\n\n\t}\n\n}\n\n\n\nimpl<E: Copy + ColorElement> From<Color<E>> for [E; 4] {\n\n\t#[inline]\n\n\tfn from(c: Color<E>) -> Self {\n\n\t\t[c.r, c.g, c.b, c.a]\n\n\t}\n\n}\n\n\n\nimpl<E: Copy + ColorElement> From<Color<E>> for [E; 3] {\n\n\t#[inline]\n\n\tfn from(c: Color<E>) -> Self {\n\n\t\t[c.r, c.g, c.b]\n\n\t}\n\n}\n", "file_path": "riddle-common/src/color.rs", "rank": 89, "score": 40860.217432659825 }, { "content": "\t\t\tself.r.convert(),\n\n\t\t\tself.g.convert(),\n\n\t\t\tself.b.convert(),\n\n\t\t\tself.a.convert(),\n\n\t\t)\n\n\t}\n\n}\n\n\n\nimpl<E: PartialEq + ColorElement> PartialEq for Color<E> {\n\n\tfn eq(&self, other: &Self) -> bool {\n\n\t\tself.r == other.r && self.g == other.g && self.b == other.b && self.a == other.a\n\n\t}\n\n}\n\n\n\nimpl<E: PartialEq + ColorElement> Eq for Color<E> {}\n\n\n\nimpl<E: ColorElement> From<[E; 4]> for Color<E> {\n\n\t#[inline]\n\n\tfn from(c: [E; 4]) -> Self {\n\n\t\tSelf::rgba(c[0], c[1], c[2], c[3])\n", "file_path": "riddle-common/src/color.rs", "rank": 90, "score": 40859.67364234076 }, { "content": "\tpub const BLACK: Self = Self {\n\n\t\tr: E::ZERO,\n\n\t\tg: E::ZERO,\n\n\t\tb: E::ZERO,\n\n\t\ta: E::SATURATED,\n\n\t};\n\n\n\n\t/// Opaque white\n\n\tpub const WHITE: Self = Self {\n\n\t\tr: E::SATURATED,\n\n\t\tg: E::SATURATED,\n\n\t\tb: E::SATURATED,\n\n\t\ta: E::SATURATED,\n\n\t};\n\n\n\n\t/// Transparent black\n\n\tpub const TRANSPARENT_BLACK: Self = Self {\n\n\t\tr: E::ZERO,\n\n\t\tg: E::ZERO,\n\n\t\tb: E::ZERO,\n", "file_path": "riddle-common/src/color.rs", "rank": 91, "score": 40854.62030852635 }, { "content": "\t\ta: E::SATURATED,\n\n\t};\n\n\n\n\t/// Opaque primary green\n\n\tpub const GREEN: Self = Self {\n\n\t\tr: E::ZERO,\n\n\t\tg: E::SATURATED,\n\n\t\tb: E::ZERO,\n\n\t\ta: E::SATURATED,\n\n\t};\n\n\n\n\t/// Opaque primary blue\n\n\tpub const BLUE: Self = Self {\n\n\t\tr: E::ZERO,\n\n\t\tg: E::ZERO,\n\n\t\tb: E::SATURATED,\n\n\t\ta: E::SATURATED,\n\n\t};\n\n\n\n\t/// Opaque black\n", "file_path": "riddle-common/src/color.rs", "rank": 92, "score": 40854.0371302497 }, { "content": "/////////////////////////////////////////////////////////////////////////////\n\n// trait ColorElement\n\n/////////////////////////////////////////////////////////////////////////////\n\n\n\n/// A type that represents a single channel in a color value.\n", "file_path": "riddle-common/src/color.rs", "rank": 93, "score": 40850.09760773927 }, { "content": "#![deny(clippy::all)]\n\n\n\n//! Riddle crate for loading and manipulating image data in main memory.\n\n//!\n\n//! Built largely on the back of `::image` and its dependencies.\n\n//!\n\n//! # Example\n\n//!\n\n//! ```\n\n//! # use riddle_image::*;\n\n//! # fn main() -> Result<(), ImageError> {\n\n//! // Load an image from a png\n\n//! let png_bytes = include_bytes!(\"../../example_assets/image.png\");\n\n//! let png_img = Image::load(&png_bytes[..], ImageFormat::Png)?;\n\n//!\n\n//! // Make a blank image and blit the png on to it\n\n//! let mut blank_img = Image::new(256, 256);\n\n//! blank_img.blit(&png_img, [0, 0].into());\n\n//! # Ok (()) }\n\n//! ```\n", "file_path": "riddle-image/src/lib.rs", "rank": 94, "score": 40217.93296810341 }, { "content": "\t}\n\n}\n\n\n\nimpl<'a> ImageViewMut<'a> {\n\n\tpub(crate) fn new(image: &'a mut Image, bounds: Rect<u32>) -> Self {\n\n\t\tlet details = ImageViewDetails::new(image, bounds);\n\n\t\tSelf { image, details }\n\n\t}\n\n\n\n\tpub fn get_row_rgba8_mut<'b>(&'b mut self, row: u32) -> &'b mut [u8]\n\n\twhere\n\n\t\t'a: 'b,\n\n\t{\n\n\t\tlet offset = self.details.start_offset + (self.details.stride * (row as usize));\n\n\t\tif offset < self.details.end_offset {\n\n\t\t\t&mut self.image.as_rgba8_mut()[offset..offset + self.details.row_len]\n\n\t\t} else {\n\n\t\t\t&mut self.image.as_rgba8_mut()[0..0]\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "riddle-image/src/imageview.rs", "rank": 95, "score": 40216.09283714641 }, { "content": "use crate::*;\n\n\n\nuse thiserror::Error;\n\n\n\n#[derive(Debug, Error)]\n\npub enum ImageError {\n\n\t#[error(\"Image decoding error: {0}\")]\n\n\tLoad(&'static str),\n\n\n\n\t#[error(\"Image encoding error: {0}\")]\n\n\tSave(&'static str),\n\n\n\n\t#[error(\"Image packing error: {0}\")]\n\n\tPacking(&'static str),\n\n\n\n\t#[error(transparent)]\n\n\tCommon(#[from] CommonError),\n\n}\n\n\n\nimpl From<ImageError> for CommonError {\n", "file_path": "riddle-image/src/error.rs", "rank": 96, "score": 40215.67475570592 }, { "content": "\n\n\t\tlet mut sorted_images: Vec<(usize, &Image)> = images\n\n\t\t\t.iter()\n\n\t\t\t.enumerate()\n\n\t\t\t.map(|(i, img)| (i, *img))\n\n\t\t\t.collect();\n\n\t\tsorted_images\n\n\t\t\t.sort_by(|(_, a), (_, b)| (b.height() * b.width()).cmp(&(a.height() * a.width())));\n\n\n\n\t\tlet mut current_size = self.size_policy.initial_size();\n\n\n\n\t\t'PACK_WITH_SIZE: loop {\n\n\t\t\tlet mut rects = vec![Rect::<u32>::default(); images.len()];\n\n\n\n\t\t\tlet mut output_image = Image::new(current_size.x, current_size.y);\n\n\t\t\tlet mut occupancy_image = Image::new(current_size.x, current_size.y);\n\n\n\n\t\t\tlet mut current_y = 0;\n\n\t\t\tlet mut current_x = 0;\n\n\n", "file_path": "riddle-image/src/packer.rs", "rank": 97, "score": 40214.47979280794 }, { "content": "\tfn pow2square_pack_single_pixel_image() {\n\n\t\tlet mut img = Image::new(1, 1);\n\n\t\timg.fill(Color::<u8>::RED);\n\n\n\n\t\tlet packed = ImagePacker::new()\n\n\t\t\t.size_policy(ImagePackerSizePolicy::Pow2Square)\n\n\t\t\t.pack(&[&img])\n\n\t\t\t.unwrap();\n\n\n\n\t\tassert_valid_result(&packed);\n\n\t\tassert_eq!(\n\n\t\t\tColor::<u8>::RED,\n\n\t\t\tpacked.image().get_pixel(packed.rects()[0].location)\n\n\t\t);\n\n\t}\n\n\n\n\t#[test]\n\n\tfn pow2square_pack_3x3_in_pow2_square() {\n\n\t\tlet mut img = Image::new(3, 3);\n\n\t\timg.fill(Color::<u8>::RED);\n", "file_path": "riddle-image/src/packer.rs", "rank": 98, "score": 40213.89637880959 }, { "content": "\t\t}\n\n\t}\n\n}\n\n\n\nimpl<'a> ImageView<'a> {\n\n\tpub(crate) fn new(image: &'a Image, bounds: Rect<u32>) -> Self {\n\n\t\tlet details = ImageViewDetails::new(image, bounds);\n\n\t\tSelf { image, details }\n\n\t}\n\n\n\n\tpub fn get_row_rgba8<'b>(&'b self, row: u32) -> &'b [u8]\n\n\twhere\n\n\t\t'a: 'b,\n\n\t{\n\n\t\tlet offset = self.details.start_offset + (self.details.stride * (row as usize));\n\n\t\tif offset < self.details.end_offset {\n\n\t\t\t&self.image.as_rgba8()[offset..offset + self.details.row_len]\n\n\t\t} else {\n\n\t\t\t&self.image.as_rgba8()[0..0]\n\n\t\t}\n", "file_path": "riddle-image/src/imageview.rs", "rank": 99, "score": 40212.17114069828 } ]
Rust
src/lang.rs
alamminsalo/ram
22ecebf41d1919e8f996779dc8fccb6a80740c88
use super::assets::Assets; use super::util; use super::Model; use failure::Fallible; use handlebars::Handlebars; use handlebars::*; use itertools::Itertools; use maplit::hashmap; use regex::Regex; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::path::{Path, PathBuf}; #[derive(Debug, Deserialize, Serialize, Clone)] pub struct Lang { #[serde(skip)] pub path: PathBuf, #[serde(default)] pub types: HashMap<String, Type>, #[serde(default)] pub helpers: HashMap<String, String>, #[serde(default)] pub files: Vec<AddFile>, #[serde(default)] pub paths: HashMap<String, String>, #[serde(default)] pub reserved: Vec<String>, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct AddFile { pub filename: Option<String>, pub template: String, #[serde(rename = "in")] pub file_in: Option<String>, pub path: Option<String>, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct Type { #[serde(default)] pub alias: Vec<String>, pub format: HashMap<String, Format>, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct Format { #[serde(rename = "type")] pub schema_type: String, } impl Lang { pub fn load_file(path: &Path) -> Fallible<Self> { let mut pathbuf = path.to_owned(); let data = { if path.extension().is_none() { pathbuf = PathBuf::from(&format!( "{lang}/{lang}.yaml", lang = &path.to_str().unwrap() )); } Assets::read_file(&PathBuf::from(&pathbuf))? }; let ext = path .extension() .and_then(std::ffi::OsStr::to_str) .unwrap_or("yaml"); let mut lang: Self = match ext { "yaml" | "yml" => serde_yaml::from_str(&data)?, "json" | _ => serde_json::from_str(&data)?, }; lang.path = pathbuf .parent() .expect("failed to get lang parent dir") .to_owned(); if lang.paths.get("root") == None { lang.paths.insert("root".into(), "".into()); } Ok(lang) } pub fn default_path(&self, path: &str) -> PathBuf { PathBuf::from( &self .paths .get(path) .expect(&format!("failed to find default path: {}", path)), ) } pub fn files_relative(&self) -> Vec<AddFile> { self.files .iter() .map(|af| AddFile { template: util::join_relative(&self.path, &PathBuf::from(&af.template)) .to_str() .unwrap() .into(), ..af.clone() }) .collect() } /* * Formatter functions */ pub fn format(&self, template_key: &str, value: &String) -> Fallible<String> { match template_key { "r" if !self.reserved.contains(&value) => Ok(value.clone()), _ => { let mut map = HashMap::new(); map.insert("value", value.as_str()); Ok(self.format_map(template_key, &map)) } } } pub fn format_map(&self, template_key: &str, map: &HashMap<&str, &str>) -> String { let mut hb = Handlebars::new(); util::init_handlebars(&mut hb); self.add_helpers(&mut hb); self.helpers .get(template_key) .and_then(|template| hb.render_template(template, map).ok()) .unwrap_or_else(|| map.get("value").unwrap().to_string()) } pub fn translate_modelname(&self, name: &String) -> String { let modelname = self.format("classname", &name).unwrap_or(name.clone()); self.format("object_property", &modelname) .unwrap_or(modelname) } pub fn translate_array(&self, m: &Model) -> String { let child = m .items .as_ref() .expect("array child type is None") .clone() .translate(self); self.format_map( "array", &hashmap!["value" => m.name.as_str(), "type" => child.schema_type.as_str(), "name" => m.name.as_str()], ) } pub fn translate_primitive(&self, schema_type: &String, format: &String) -> String { self.types .iter() .find(|(name, t)| *name == schema_type || t.alias.contains(schema_type)) .and_then(|(_, t)| t.format.get(format).or_else(|| t.format.get("default"))) .map(|f| f.schema_type.clone()) .expect(&format!( "Error while processing {}: failed to find primitive type {}", schema_type, format )) } pub fn add_helpers(&self, hb: &mut Handlebars) { for k in self.helpers.keys() { let lang = self.clone(); let key = k.clone(); let closure = move |h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output| -> HelperResult { let param = h .param(0) .and_then(|v| v.value().as_str()) .expect("parameter is missing") .to_string(); out.write(&lang.format(&key, &param).unwrap_or(param))?; Ok(()) }; hb.register_helper(k, Box::new(closure)); } } pub fn format_path(&self, p: String) -> String { let re = Regex::new(r"^\{(\w+)\}$").unwrap(); self.helpers .get("pathparam") .map(|_| { format!( "/{}", &Path::new(&p) .iter() .skip(1) .map(|part| part.to_str().unwrap()) .map(|part| { if let Some(cap) = re.captures_iter(part).next() { self.format("pathparam", &cap[1].to_owned()) .unwrap_or(part.to_string()) } else { part.to_string() } }) .join("/") ) }) .unwrap_or(p) } }
use super::assets::Assets; use super::util; use super::Model; use failure::Fallible; use handlebars::Handlebars; use handlebars::*; use itertools::Itertools; use maplit::hashmap; use regex::Regex; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::path::{Path, PathBuf}; #[derive(Debug, Deserialize, Serialize, Clone)] pub struct Lang { #[serde(skip)] pub path: PathBuf, #[serde(default)] pub types: HashMap<String, Type>, #[serde(default)] pub helpers: HashMap<String, String>, #[serde(default)] pub files: Vec<AddFile>, #[serde(default)] pub paths: HashMap<String, String>, #[serde(default)] pub reserved: Vec<String>, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct AddFile { pub filename: Option<String>, pub template: String, #[serde(rename = "in")] pub file_in: Option<String>, pub path: Option<String>, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct Type { #[serde(default)] pub alias: Vec<String>, pub format: HashMap<String, Format>, } #[derive(Debug, Deserialize, Serialize, Clone)] pub struct Format { #[serde(rename = "type")] pub schema_type: String, } impl Lang { pub fn load_file(path: &Path) -> Fallible<Self> { let mut pathbuf = path.to_owned(); let data = { if path.extension().is_none() { pathbuf = PathBuf::from(&format!( "{lang}/{lang}.yaml", lang = &path.to_str().unwrap() )); } Assets::read_file(&PathBuf::from(&pathbuf))? }; let ext = path .extension() .and_then(std::ffi::OsStr::to_str) .unwrap_or("yaml"); let mut lang: Self = match ext { "yaml" | "yml" => serde_yaml::from_str(&data)?, "
pub fn default_path(&self, path: &str) -> PathBuf { PathBuf::from( &self .paths .get(path) .expect(&format!("failed to find default path: {}", path)), ) } pub fn files_relative(&self) -> Vec<AddFile> { self.files .iter() .map(|af| AddFile { template: util::join_relative(&self.path, &PathBuf::from(&af.template)) .to_str() .unwrap() .into(), ..af.clone() }) .collect() } /* * Formatter functions */ pub fn format(&self, template_key: &str, value: &String) -> Fallible<String> { match template_key { "r" if !self.reserved.contains(&value) => Ok(value.clone()), _ => { let mut map = HashMap::new(); map.insert("value", value.as_str()); Ok(self.format_map(template_key, &map)) } } } pub fn format_map(&self, template_key: &str, map: &HashMap<&str, &str>) -> String { let mut hb = Handlebars::new(); util::init_handlebars(&mut hb); self.add_helpers(&mut hb); self.helpers .get(template_key) .and_then(|template| hb.render_template(template, map).ok()) .unwrap_or_else(|| map.get("value").unwrap().to_string()) } pub fn translate_modelname(&self, name: &String) -> String { let modelname = self.format("classname", &name).unwrap_or(name.clone()); self.format("object_property", &modelname) .unwrap_or(modelname) } pub fn translate_array(&self, m: &Model) -> String { let child = m .items .as_ref() .expect("array child type is None") .clone() .translate(self); self.format_map( "array", &hashmap!["value" => m.name.as_str(), "type" => child.schema_type.as_str(), "name" => m.name.as_str()], ) } pub fn translate_primitive(&self, schema_type: &String, format: &String) -> String { self.types .iter() .find(|(name, t)| *name == schema_type || t.alias.contains(schema_type)) .and_then(|(_, t)| t.format.get(format).or_else(|| t.format.get("default"))) .map(|f| f.schema_type.clone()) .expect(&format!( "Error while processing {}: failed to find primitive type {}", schema_type, format )) } pub fn add_helpers(&self, hb: &mut Handlebars) { for k in self.helpers.keys() { let lang = self.clone(); let key = k.clone(); let closure = move |h: &Helper, _: &Handlebars, _: &Context, _: &mut RenderContext, out: &mut dyn Output| -> HelperResult { let param = h .param(0) .and_then(|v| v.value().as_str()) .expect("parameter is missing") .to_string(); out.write(&lang.format(&key, &param).unwrap_or(param))?; Ok(()) }; hb.register_helper(k, Box::new(closure)); } } pub fn format_path(&self, p: String) -> String { let re = Regex::new(r"^\{(\w+)\}$").unwrap(); self.helpers .get("pathparam") .map(|_| { format!( "/{}", &Path::new(&p) .iter() .skip(1) .map(|part| part.to_str().unwrap()) .map(|part| { if let Some(cap) = re.captures_iter(part).next() { self.format("pathparam", &cap[1].to_owned()) .unwrap_or(part.to_string()) } else { part.to_string() } }) .join("/") ) }) .unwrap_or(p) } }
json" | _ => serde_json::from_str(&data)?, }; lang.path = pathbuf .parent() .expect("failed to get lang parent dir") .to_owned(); if lang.paths.get("root") == None { lang.paths.insert("root".into(), "".into()); } Ok(lang) }
function_block-function_prefix_line
[ { "content": "pub fn register_helpers(hb: &mut Handlebars) {\n\n hb.register_helper(\"lowercase\", Box::new(lowercase));\n\n hb.register_helper(\"uppercase\", Box::new(uppercase));\n\n hb.register_helper(\"pascalcase\", Box::new(pascalcase));\n\n hb.register_helper(\"snakecase\", Box::new(snakecase));\n\n hb.register_helper(\"screamingcase\", Box::new(screamingcase));\n\n hb.register_helper(\"camelcase\", Box::new(camelcase));\n\n hb.register_helper(\"kebabcase\", Box::new(kebabcase));\n\n}\n", "file_path": "src/helper.rs", "rank": 0, "score": 143703.1461484059 }, { "content": "pub fn split_files(content: String, dirpath: PathBuf) -> Vec<(PathBuf, String)> {\n\n let mut filemap: Vec<(PathBuf, String)> = vec![];\n\n let mut mark: Option<PathBuf> = None;\n\n let mut data: Vec<&str> = vec![];\n\n\n\n for line in content.lines() {\n\n let line_trimmed = line.trim();\n\n // check if ran into filebegin mark\n\n if line_trimmed.len() > 10 && &line_trimmed[..10] == \"%filebegin\" {\n\n // if mark is set, push contents and reset\n\n if mark.is_some() {\n\n filemap.push((mark.take().unwrap(), data.join(\"\\n\")));\n\n }\n\n // clear previous data\n\n data.clear();\n\n // set filebegin mark\n\n // concate with dirpath\n\n mark = Some(dirpath.join(&line_trimmed[11..]));\n\n } else if mark.is_some() {\n\n // push to data line as it was\n", "file_path": "src/util.rs", "rank": 1, "score": 140918.34289485717 }, { "content": "// writes files in map\n\npub fn write_files(root: &Path, map: HashMap<PathBuf, String>) {\n\n let ignored = ignore_patterns();\n\n for (file, data) in map.iter() {\n\n let path = root.join(&file);\n\n if ignored.iter().any(|p| p.matches_path(&path)) {\n\n info!(\"ignoring file {}\", path.to_str().unwrap_or(\"\"));\n\n continue;\n\n }\n\n info!(\"writing {}\", &path.to_str().unwrap());\n\n // create dirs if needed\n\n fs::create_dir_all(path.parent().expect(\"failed to get parent dir\"))\n\n .expect(\"failed to create directory\");\n\n fs::write(path, data).expect(&format!(\"failed to write file {}\", &file.display()));\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 2, "score": 137456.8309995946 }, { "content": "pub fn generate_files(state: State) -> HashMap<PathBuf, String> {\n\n info!(\"Generating files...\");\n\n let mut hb = Handlebars::new();\n\n util::init_handlebars(&mut hb);\n\n\n\n // add lang helpers to hb\n\n state.lang.add_helpers(&mut hb);\n\n\n\n // render files\n\n let files: Vec<AddFile> = state.cfg.get_files(match state.no_defaults {\n\n // by default (false), include language-defined templates\n\n false => Some(&state.lang),\n\n _ => None,\n\n });\n\n\n\n info!(\"Rendering templates...\");\n\n render_files(&mut hb, &state, files)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 125061.4680743829 }, { "content": "pub fn init_handlebars(hb: &mut Handlebars) {\n\n // disable html escaping\n\n hb.register_escape_fn(handlebars::no_escape);\n\n\n\n // register custom helpers\n\n helper::register_helpers(hb);\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 4, "score": 123931.52417317781 }, { "content": "// joins a + b if b is relative path, otherwise returns b\n\npub fn join_relative(a: &Path, b: &Path) -> PathBuf {\n\n if b.is_relative() {\n\n a.join(b)\n\n } else {\n\n PathBuf::from(b)\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 5, "score": 117634.22231004701 }, { "content": "// Returns model name from ref path\n\npub fn model_name_from_ref(ref_path: &str) -> Option<String> {\n\n if let Some(idx) = ref_path.rfind('/') {\n\n Some(ref_path[idx + 1..].to_string())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 6, "score": 102149.51218990327 }, { "content": "// reads schemas from file\n\nfn read_schemas(path: &Path) -> Fallible<HashMap<String, Schema>> {\n\n let ext: Option<&str> = path.extension().and_then(std::ffi::OsStr::to_str);\n\n let data = std::fs::read_to_string(path)?;\n\n\n\n Ok(match ext {\n\n Some(\"yaml\") | Some(\"yml\") => serde_yaml::from_str(&data)?,\n\n Some(\"json\") => serde_json::from_str(&data)?,\n\n _ => failure::bail!(\"unsupported file type\"),\n\n })\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 7, "score": 97724.04510948746 }, { "content": "/// Returns parameter lists by location\n\npub fn get_params_path(\n\n path: &PathItem,\n\n location: &str,\n\n parameters: &HashMap<String, Parameter>,\n\n) -> Vec<Param> {\n\n path.parameters\n\n .iter()\n\n .flat_map(|params| {\n\n params.iter().filter_map(|p| match p {\n\n ObjectOrReference::Object(t) => Some(t),\n\n ObjectOrReference::Ref { ref_path } => {\n\n util::model_name_from_ref(&ref_path).and_then(|name| parameters.get(&name))\n\n }\n\n })\n\n })\n\n .filter(|p| p.location == location)\n\n .filter_map(from_param)\n\n .collect()\n\n}\n", "file_path": "src/param.rs", "rank": 8, "score": 97052.87400895494 }, { "content": "// creates file path,\n\n// removing everything after '#'\n\nfn ref_file<'a>(ref_path: &'a String) -> Option<&'a str> {\n\n ref_path\n\n .split(\"#\")\n\n .next()\n\n .and_then(|p| if !p.is_empty() { Some(p) } else { None })\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 9, "score": 93285.49115590451 }, { "content": "pub fn collect_schemas<'a>(spec: &'a Spec, root: &'a Path) -> Fallible<HashMap<String, Schema>> {\n\n let component_schemas = spec\n\n .components\n\n .iter()\n\n .flat_map(|components| {\n\n components\n\n .schemas\n\n .iter()\n\n .flatten()\n\n .filter_map(|(k, v)| match v {\n\n ObjectOrReference::Object(t) => Some((k.clone(), t.clone())),\n\n _ => None,\n\n })\n\n })\n\n .collect::<HashMap<String, Schema>>();\n\n\n\n // collect and return schemas\n\n Ok(iter_spec_schemas(spec)\n\n .flat_map(|schema| iter_ref_paths(&schema))\n\n .filter_map(ref_file)\n\n .unique()\n\n .flat_map(move |path| schemas_from_ref(&root, path, &HashMap::new()))\n\n .flatten()\n\n .chain(component_schemas)\n\n .collect())\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 10, "score": 89659.34296719794 }, { "content": "pub fn extract_model_name(schema: &Schema) -> Option<String> {\n\n schema\n\n .ref_path\n\n .as_ref()\n\n .and_then(|ref_path| model_name_from_ref(&ref_path))\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 11, "score": 87196.46849262543 }, { "content": "pub fn generate_models_v3(spec: &Spec, root: &Path) -> Vec<Model> {\n\n // iterate components\n\n // + generate models\n\n util::collect_schemas(spec, root)\n\n .expect(\"failed to collect schemas\")\n\n .iter()\n\n .map(|(key, schema)| Model::new(key, schema, key))\n\n .collect()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 12, "score": 78122.78983163828 }, { "content": "/// Creates ready to use state value with translated models\n\npub fn create_state(\n\n cfg: Config,\n\n mut models: Vec<Model>,\n\n mut resource_groups: Vec<ResourceGroup>,\n\n no_defaults: bool,\n\n) -> State {\n\n // get lang config\n\n let lang = cfg.get_lang().expect(\"failed to create lang spec!\");\n\n\n\n // translate and format models and resource groups\n\n models = translate_models(&lang, models);\n\n resource_groups = translate_resource_groups(&lang, resource_groups);\n\n\n\n State {\n\n cfg,\n\n models,\n\n resource_groups,\n\n lang,\n\n no_defaults,\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 13, "score": 77857.27021373509 }, { "content": "/// Groups resources with given grouping strategy\n\npub fn group_resources(\n\n paths: &IndexMap<String, PathItem>,\n\n grouping_strategy: GroupingStrategy,\n\n parameters: &HashMap<String, Parameter>,\n\n) -> Vec<ResourceGroup> {\n\n let iter = paths.iter().flat_map(|(path, item)| {\n\n let path_params = get_params_path(item, \"path\", parameters);\n\n let query_params = get_params_path(item, \"query\", parameters);\n\n vec![\n\n (\n\n path.clone(),\n\n \"GET\",\n\n item.get.as_ref(),\n\n path_params.clone(),\n\n query_params.clone(),\n\n ),\n\n (\n\n path.clone(),\n\n \"PUT\",\n\n item.put.as_ref(),\n", "file_path": "src/resource.rs", "rank": 14, "score": 77854.4495413596 }, { "content": "/// Returns parameter lists by location\n\npub fn get_params_operation(\n\n operation: &Operation,\n\n location: &str,\n\n parameters: &HashMap<String, Parameter>,\n\n) -> Vec<Param> {\n\n operation\n\n .parameters\n\n .iter()\n\n .flat_map(|params| {\n\n params.iter().filter_map(|p| match p {\n\n ObjectOrReference::Object(t) => Some(t),\n\n ObjectOrReference::Ref { ref_path } => {\n\n util::model_name_from_ref(&ref_path).and_then(|name| parameters.get(&name))\n\n }\n\n })\n\n })\n\n .filter(|p| p.location == location)\n\n .filter_map(from_param)\n\n .collect()\n\n}\n\n\n", "file_path": "src/param.rs", "rank": 15, "score": 75630.825815262 }, { "content": "pub fn generate_resources_v3(\n\n spec: &Spec,\n\n root: &Path,\n\n grouping_strategy: GroupingStrategy,\n\n) -> Vec<ResourceGroup> {\n\n let parameters_map =\n\n util::collect_parameters(spec, root).expect(\"failed to collect parameters\");\n\n resource::group_resources(&spec.paths, grouping_strategy, &parameters_map)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 16, "score": 75630.825815262 }, { "content": "pub fn collect_parameters<'a>(\n\n spec: &'a Spec,\n\n _root: &'a Path,\n\n) -> Fallible<HashMap<String, Parameter>> {\n\n let component_parameters = spec\n\n .components\n\n .iter()\n\n .flat_map(|components| {\n\n components\n\n .parameters\n\n .iter()\n\n .flatten()\n\n .filter_map(|(k, v)| match v {\n\n ObjectOrReference::Object(t) => Some((k.clone(), t.clone())),\n\n _ => None,\n\n })\n\n })\n\n .collect::<HashMap<String, Parameter>>();\n\n\n\n Ok(component_parameters)\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 17, "score": 73907.18140473016 }, { "content": "// iterates all the schemas in Spec\n\npub fn iter_spec_schemas<'a>(spec: &'a Spec) -> impl Iterator<Item = &'a Schema> {\n\n // helper function to map ObjectOrReference inner types\n\n fn map_obj_or_refence<'a, T: 'a>(\n\n iter: impl Iterator<Item = &'a ObjectOrReference<T>>,\n\n ) -> impl Iterator<Item = &'a T> {\n\n iter.filter_map(|obj_or_ref| match obj_or_ref {\n\n ObjectOrReference::Object(t) => Some(t),\n\n _ => None,\n\n })\n\n };\n\n\n\n let components_schemas = spec.components.iter().flat_map(|components| {\n\n components\n\n .schemas\n\n .iter()\n\n .flat_map(|hashmap| map_obj_or_refence(hashmap.values()))\n\n });\n\n\n\n let path_schemas = spec.paths.values().flat_map(move |p| {\n\n std::iter::empty()\n", "file_path": "src/util.rs", "rank": 18, "score": 73180.7335703799 }, { "content": "// runs lang translations on all models\n\nfn translate_models(lang: &Lang, models: Vec<Model>) -> Vec<Model> {\n\n models.into_iter().map(|m| m.translate(lang)).collect()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 19, "score": 67590.60043056935 }, { "content": "// returns ignore patterns from '.ramignore'\n\npub fn ignore_patterns() -> Vec<Pattern> {\n\n fs::read_to_string(\".ramignore\")\n\n .and_then(|contents| {\n\n Ok(contents\n\n .split(\"\\n\")\n\n .filter(|l| !l.is_empty())\n\n .filter(|l| !l.starts_with(\"#\"))\n\n .filter_map(|line| Pattern::new(line).ok())\n\n .collect())\n\n })\n\n .unwrap_or(vec![])\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 20, "score": 66544.93706462428 }, { "content": "// Renders extra files\n\nfn render_files(\n\n hb: &mut Handlebars,\n\n state: &State,\n\n files: Vec<AddFile>,\n\n) -> HashMap<PathBuf, String> {\n\n // state to serde json value\n\n let statejson = json!(&state);\n\n\n\n // render files\n\n files\n\n .into_iter()\n\n .flat_map(|f: AddFile| {\n\n // get data from assets and render it\n\n let template = Assets::read_file(&PathBuf::from(&f.template)).unwrap();\n\n let render = hb\n\n .render_template(&template, &statejson)\n\n .expect(\"failed to render additional file template\");\n\n // make path\n\n let dirpath: PathBuf = if let Some(ref abspath) = f.path {\n\n // get from absolute path\n", "file_path": "src/lib.rs", "rank": 21, "score": 65222.66169491298 }, { "content": "/// Returns all ref_paths for schema\n\nfn iter_ref_paths<'a>(schema: &'a Schema) -> Box<dyn Iterator<Item = &'a String> + 'a> {\n\n Box::new(\n\n schema\n\n .ref_path\n\n .iter()\n\n .map(|s| Some(s))\n\n .filter_map(|r| r)\n\n .chain(\n\n schema\n\n .properties\n\n .iter()\n\n .flat_map(|hashmap| hashmap.values().flat_map(|s| iter_ref_paths(&s))),\n\n )\n\n .chain(schema.items.iter().flat_map(|s| iter_ref_paths(&s)))\n\n .chain(\n\n schema\n\n .additional_properties\n\n .iter()\n\n .map(|obj_or_ref| match obj_or_ref {\n\n ObjectOrReference::Object(schema) => Some(schema),\n\n _ => None,\n\n })\n\n .filter_map(|x| x)\n\n .flat_map(|s| iter_ref_paths(&s)),\n\n ),\n\n )\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 22, "score": 63621.74479046836 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"ram\", about = \"openapi generator\")]\n\nstruct Arguments {\n\n /// ram configuration file path\n\n #[structopt(short, long)]\n\n config: PathBuf,\n\n\n\n /// input openapi spec file\n\n #[structopt(short, long)]\n\n input: PathBuf,\n\n\n\n /// output path\n\n #[structopt(short, long)]\n\n output: Option<PathBuf>,\n\n\n\n /// prints state passed to templates as json\n\n #[structopt(short, long)]\n\n json: bool,\n\n\n\n /// skips generating default asset files\n\n #[structopt(short, long)]\n\n no_defaults: bool,\n\n\n\n /// quiet logging level\n\n #[structopt(short, long)]\n\n quiet: bool,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 23, "score": 48275.779808089326 }, { "content": "fn main() {\n\n let args = Arguments::from_args();\n\n\n\n init_logging(args.quiet);\n\n\n\n let cfg = Config::load_file(&args.config).unwrap();\n\n let spec = openapi::from_path(&args.input).unwrap();\n\n\n\n let mut specpath = args.input;\n\n specpath.pop();\n\n\n\n // assemble state variable\n\n let state = match spec {\n\n openapi::OpenApi::V3_0(spec) => {\n\n let models = ram::generate_models_v3(&spec, &specpath);\n\n let resource_groups = ram::generate_resources_v3(\n\n &spec,\n\n &specpath,\n\n cfg.grouping_strategy.unwrap_or(GroupingStrategy::FirstTag),\n\n );\n", "file_path": "src/main.rs", "rank": 24, "score": 42454.93305414716 }, { "content": "fn schemas_from_ref(\n\n root: &Path,\n\n ref_path: &str,\n\n a: &HashMap<String, Schema>,\n\n) -> Fallible<HashMap<String, Schema>> {\n\n let mut path: PathBuf = root.join(&ref_path);\n\n\n\n // read schemas from file to map b,\n\n // filtering out schemas that are already in map a\n\n let b: HashMap<String, Schema> = read_schemas(&path)\n\n .expect(\"failed to read schemas!\")\n\n .into_iter()\n\n .filter(|(k, _)| !a.contains_key(k))\n\n .collect();\n\n\n\n // merge together in a map\n\n let mut merged = a.clone();\n\n merged.extend(b.clone());\n\n\n\n // create next root path by popping filename from path\n", "file_path": "src/util.rs", "rank": 25, "score": 41095.613283979066 }, { "content": "#[test]\n\nfn it_reads_models() {\n\n let spec = openapi::from_path(\"examples/openapi/petstore.yaml\").unwrap();\n\n let specpath = PathBuf::from(\"examples/openapi/\");\n\n match spec {\n\n openapi::OpenApi::V3_0(spec) => {\n\n let models = ram::generate_models_v3(&spec, &specpath);\n\n assert_eq!(models.len(), 4);\n\n }\n\n _ => {}\n\n };\n\n}\n\n\n", "file_path": "tests/test_generate.rs", "rank": 26, "score": 39882.23897079293 }, { "content": "fn translate_resource_groups(\n\n lang: &Lang,\n\n resource_groups: Vec<ResourceGroup>,\n\n) -> Vec<ResourceGroup> {\n\n resource_groups\n\n .into_iter()\n\n // run format on all resources\n\n .map(|rg| {\n\n let mut rg2 = rg.clone();\n\n rg2.resources = rg2\n\n .resources\n\n .into_iter()\n\n .map(|r| r.translate(lang))\n\n .collect();\n\n rg2\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 27, "score": 39882.23897079293 }, { "content": "fn main() {\n\n HttpServer::new(|| App::new().service(api::routes()))\n\n .bind(\"127.0.0.1:8000\")\n\n .expect(\"Can not bind to port 8000\")\n\n .run()\n\n .unwrap();\n\n}\n", "file_path": "examples/rust/actix/src/main.rs", "rank": 28, "score": 38792.50313842961 }, { "content": "fn main() {\n\n}\n", "file_path": "examples/rust/models/src/main.rs", "rank": 29, "score": 38792.50313842961 }, { "content": "fn main() {\n\n rocket::ignite().mount(\"/\", api::routes()).launch();\n\n}\n", "file_path": "examples/rust/rocket/src/main.rs", "rank": 30, "score": 38792.50313842961 }, { "content": "#[test]\n\nfn it_generates_resources_rust() {\n\n let cfg = Config::load_file(&PathBuf::from(\"examples/rust/rocket/rocket.yaml\")).unwrap();\n\n let output = PathBuf::from(\"tests_output/res\");\n\n\n\n let spec = openapi::from_path(\"examples/openapi/farm.yaml\").unwrap();\n\n let specpath = PathBuf::from(\"examples/openapi/\");\n\n\n\n // assert vars\n\n let res_count = 1;\n\n let mut resource_groups = vec![];\n\n\n\n match spec {\n\n openapi::OpenApi::V3_0(spec) => {\n\n resource_groups = ram::generate_resources_v3(\n\n &spec,\n\n &specpath,\n\n cfg.grouping_strategy.unwrap_or(GroupingStrategy::FirstTag),\n\n );\n\n assert_eq!(resource_groups.len(), res_count);\n\n let state = ram::create_state(cfg, vec![], resource_groups.clone(), false);\n", "file_path": "tests/test_generate.rs", "rank": 31, "score": 38792.50313842961 }, { "content": "#[test]\n\nfn it_generates_models_rust() {\n\n let cfg = Config {\n\n lang: String::from(\"rust\"),\n\n path: PathBuf::from(\"./tests\"),\n\n files: vec![],\n\n helpers: HashMap::new(),\n\n paths: HashMap::new(),\n\n grouping_strategy: None,\n\n };\n\n let output = PathBuf::from(\"tests_output/models\");\n\n\n\n let spec = openapi::from_path(\"examples/openapi/farm.yaml\").unwrap();\n\n let specpath = PathBuf::from(\"examples/openapi/\");\n\n\n\n // assert vars\n\n let models_count = 7;\n\n let mut models = vec![];\n\n\n\n match spec {\n\n openapi::OpenApi::V3_0(spec) => {\n", "file_path": "tests/test_generate.rs", "rank": 32, "score": 38792.50313842961 }, { "content": "fn init_logging(quiet: bool) {\n\n env_logger::builder()\n\n .format_timestamp(None)\n\n .format_module_path(false)\n\n .format_level(true)\n\n .filter(\n\n None,\n\n match quiet {\n\n true => LevelFilter::Error,\n\n _ => LevelFilter::Info,\n\n },\n\n )\n\n .init();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 33, "score": 34524.85401150253 }, { "content": "fn from_param(p: &Parameter) -> Option<Param> {\n\n p.schema.as_ref().and_then(|schema| {\n\n Some(Param {\n\n name: p.name.clone(),\n\n model: Model::new(&p.name, &schema, \"\"),\n\n required: p.required.unwrap_or(false),\n\n })\n\n })\n\n}\n\n\n", "file_path": "src/param.rs", "rank": 34, "score": 31857.573307173116 }, { "content": "#[allow(dead_code)]\n\nfn normalize_models(models: Vec<Model>) -> Vec<Model> {\n\n // map top-level models by name\n\n let models_map = models\n\n .iter()\n\n .map(|m| (m.def.clone(), m.clone()))\n\n .collect::<HashMap<String, Model>>();\n\n\n\n // normalize models\n\n models\n\n .into_iter()\n\n .map(|m| m.normalize(&models_map))\n\n .collect()\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 35, "score": 28175.57203163363 }, { "content": "use handlebars::{handlebars_helper, Handlebars};\n\nuse inflector::Inflector;\n\n\n\nhandlebars_helper!(lowercase: |s: str| s.to_lowercase());\n\nhandlebars_helper!(uppercase: |s: str| s.to_uppercase());\n\nhandlebars_helper!(pascalcase: |s: str| s.to_pascal_case());\n\nhandlebars_helper!(snakecase: |s: str| s.to_snake_case());\n\nhandlebars_helper!(screamingcase: |s: str| s.to_screaming_snake_case());\n\nhandlebars_helper!(camelcase: |s: str| s.to_camel_case());\n\nhandlebars_helper!(kebabcase: |s: str| s.to_kebab_case());\n\n\n", "file_path": "src/helper.rs", "rank": 36, "score": 27372.246556693164 }, { "content": "use super::util;\n\nuse super::{AddFile, GroupingStrategy, Lang};\n\nuse failure::Fallible;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\nuse std::fs::File;\n\nuse std::io::BufReader;\n\nuse std::path::{Path, PathBuf};\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct Config {\n\n #[serde(skip)]\n\n pub path: PathBuf,\n\n\n\n // Defines language or direct path to custom lang spec\n\n pub lang: String,\n\n\n\n #[serde(default)]\n\n pub paths: HashMap<String, String>,\n\n\n", "file_path": "src/config.rs", "rank": 48, "score": 19.839296317256057 }, { "content": "use super::lang::Lang;\n\nuse super::util;\n\nuse indexmap::IndexMap;\n\nuse openapi::v3_0::{ObjectOrReference, Schema};\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::Value;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, Clone)]\n\npub enum ModelType {\n\n Primitive,\n\n Object,\n\n Array,\n\n}\n\n\n\n#[derive(Default, Serialize, Deserialize, Debug, Clone)]\n\npub struct Model {\n\n pub def: String,\n\n pub name: String,\n\n #[serde(rename = \"type\")]\n", "file_path": "src/model.rs", "rank": 49, "score": 18.720133201711853 }, { "content": " Lang::load_file(&path).and_then(|mut lang| {\n\n // add custom formatters to lang formatters\n\n lang.helpers.extend(self.helpers.clone());\n\n Ok(lang)\n\n })\n\n }\n\n\n\n // Returns formatted path according to config / lang spec defaults\n\n pub fn get_path(&self, path_key: &str, lang: &Lang) -> PathBuf {\n\n self.paths\n\n .get(path_key)\n\n .and_then(|p| Some(PathBuf::from(&p)))\n\n .or_else(|| Some(lang.default_path(path_key)))\n\n .unwrap()\n\n }\n\n\n\n pub fn get_files(&self, lang: Option<&Lang>) -> Vec<AddFile> {\n\n let config_files = self.files.iter().map(|f: &AddFile| {\n\n // join relative cfg path\n\n let template = util::join_relative(&self.path, &PathBuf::from(&f.template))\n", "file_path": "src/config.rs", "rank": 50, "score": 17.572965626220167 }, { "content": "## Templating\n\n\n\nSupports using built-in or custom templates by configuration.\n\n\n\nTemplating uses [handlebars](https://handlebars-draft.knappi.org/guide) syntax, though [some features are missing in templating library](https://github.com/sunng87/handlebars-rust#limited-but-essential-control-structure-built-in).\n\n\n\nExample template file (from default golang model template):\n\n```\n\n{{#each models}}\n\n%filebegin={{filename name}}\n\npackage model\n\n\n\n{{#if has_datetime}}\n\nimport (\n\n \"time\"\n\n)\n\n{{/if}}\n\n\n\n{{#if is_object}}\n\ntype {{pascalcase name}} struct {\n\n{{#each properties}}\n\n {{ pascalcase name }} {{ type }} `json:\"{{ camelcase name }}\" {{ x-go-custom-tag }}`\n\n{{/each}}\n\n{{#if additional_properties}}\n\n{{#with additional_properties}}\n\n{{#each properties}}\n\n {{ pascalcase name }} {{type}} `json:\"-\" {{ x-go-custom-tag }}`\n\n{{/each}}\n\n{{/with}}\n\n{{/if}}\n\n}\n\n{{/if}}\n\n{{/each}}\n\n```\n\n\n\nTemplate white-space formatting is cumbersome, so usage of a language formatter is recommended.\n\n\n\n## Helpers\n\n\n\nIncludes some built-in [custom helpers](https://handlebars-draft.knappi.org/guide/#custom-helpers), which can be used in templates:\n\n```\n\n* lowercase - lowercase\n\n* uppercase - UPPERCASE\n\n* snakecase - snake_case\n\n* pascalcase - PascalCase\n\n* screamingcase - SCREAMING_SNAKE_CASE\n\n* camelcase - camelCase\n\n* kebabcase - kebab-case\n\n* r - Formats reserved keywords according to language spec (Rust example: type -> r#type). Kept short for convenience.\n\n```\n\n\n\nAlso includes [all built-in helpers from handlebars lib](https://docs.rs/handlebars/3.0.0-beta.1/handlebars/#built-in-helpers).\n\n\n\n## Ignoring files\n\n\n\nIgnoring files can be done with `.ramignore`, which follows `.gitignore` format:\n\n```\n\nsrc/some/file/to/ignore.rs\n\nsrc/some/files/to/ignore/*.rs\n\nsrc/some/**/*.rs\n\n```\n\n\n\nNote that ignorefile currently only matches entries relative to current working directory, \n\nso for example ignorefile in different output directory won't get matched.\n\n\n\n## Debugging json state\n\n\n\nEvery template is passed the whole state object with translated field names and other preprocessed data.\n\nTo output this state as a json object: use the `--json` flag.\n\n\n", "file_path": "README.md", "rank": 51, "score": 17.224772649336497 }, { "content": "use super::param::{get_params_operation, get_params_path, Param};\n\nuse super::Lang;\n\nuse super::Model;\n\nuse indexmap::IndexMap;\n\nuse itertools::Itertools;\n\nuse openapi::v3_0::ObjectOrReference;\n\nuse openapi::v3_0::{Operation, Parameter, PathItem};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Resource {\n\n /// Resource URI\n\n pub path: String,\n\n\n\n /// HTTP method\n\n pub method: String,\n\n\n\n /// Resource name usable for function names\n\n pub name: String,\n", "file_path": "src/resource.rs", "rank": 52, "score": 17.167064217813973 }, { "content": " let mut cfg: Config = match ext {\n\n \"yaml\" | \"yml\" => serde_yaml::from_reader(reader)?,\n\n \"json\" | _ => serde_json::from_reader(reader)?,\n\n };\n\n\n\n // set cfg path\n\n cfg.path = path.canonicalize().unwrap().parent().unwrap().into();\n\n\n\n Ok(cfg)\n\n }\n\n\n\n pub fn get_lang(&self) -> Fallible<Lang> {\n\n let f = &self.lang;\n\n\n\n // if file has extension set, assume its a path to file and join path\n\n let mut path = PathBuf::from(f);\n\n if path.extension().is_some() {\n\n path = util::join_relative(&self.path, &path);\n\n }\n\n // load lang file\n", "file_path": "src/config.rs", "rank": 53, "score": 16.493840633573164 }, { "content": " // custom formatters, these are added to lang formatters\n\n #[serde(default)]\n\n pub helpers: HashMap<String, String>,\n\n\n\n /// Additional files to generate\n\n #[serde(default)]\n\n pub files: Vec<AddFile>,\n\n\n\n #[serde(default)]\n\n pub grouping_strategy: Option<GroupingStrategy>,\n\n}\n\n\n\nimpl Config {\n\n pub fn load_file(path: &Path) -> Fallible<Config> {\n\n let file = File::open(path)?;\n\n let reader = BufReader::new(file);\n\n\n\n let ext = path.extension().expect(\"failed to get extension\");\n\n let ext: &str = ext.to_str().expect(\"failed to read extension\");\n\n\n", "file_path": "src/config.rs", "rank": 54, "score": 16.398781495237458 }, { "content": "use super::util;\n\nuse super::Model;\n\nuse openapi::v3_0::{ObjectOrReference, Operation, Parameter, PathItem};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct Param {\n\n pub name: String,\n\n pub model: Model,\n\n pub required: bool,\n\n}\n\n\n", "file_path": "src/param.rs", "rank": 55, "score": 16.35865229192216 }, { "content": "mod assets;\n\nmod config;\n\nmod helper;\n\nmod lang;\n\nmod model;\n\nmod param;\n\nmod resource;\n\nmod state;\n\npub mod util;\n\n\n\nuse assets::Assets;\n\npub use config::Config;\n\npub use lang::{AddFile, Lang};\n\npub use model::{Model, ModelType};\n\npub use param::Param;\n\npub use resource::{GroupingStrategy, Resource, ResourceGroup};\n\npub use state::State;\n\n\n\nuse handlebars::Handlebars;\n\nuse log::info;\n\nuse openapi::v3_0::Spec;\n\nuse serde_json::json;\n\nuse std::collections::HashMap;\n\nuse std::path::{Path, PathBuf};\n\n\n\n#[allow(dead_code)]\n", "file_path": "src/lib.rs", "rank": 56, "score": 16.17059205777349 }, { "content": " .into_iter()\n\n .map(|(key, model)| (key, model.translate(lang)))\n\n .collect(),\n\n ..self\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct ResourceGroup {\n\n /// Group name\n\n /// Resources are grouped by first tag on them\n\n pub name: String,\n\n /// Resources under this group\n\n pub resources: Vec<Resource>,\n\n /// Grouping strategy used\n\n pub grouping_strategy: GroupingStrategy,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, Serialize, Deserialize)]\n\npub enum GroupingStrategy {\n\n Nothing,\n\n Path,\n\n FirstTag,\n\n Operation,\n\n}\n\n\n\n/// Groups resources with given grouping strategy\n", "file_path": "src/resource.rs", "rank": 57, "score": 14.873433506923865 }, { "content": "# Rusty API Modeler\n\n\n\n[![pipeline status](https://gitlab.com/alamminsalo/ram/badges/master/pipeline.svg)](https://gitlab.com/alamminsalo/ram/commits/master)\n\n[![coverage report](https://gitlab.com/alamminsalo/ram/badges/master/coverage.svg)](https://gitlab.com/alamminsalo/ram/commits/master)\n\n\n\nLanguage-agnostic openapi code generator.\n\n\n\nExamples available in the following use cases:\n\n* Rust (Rocket, Actix)\n\n* Go (Echo)\n\n* Postgresql schema\n\n* Java (classes, Spark)\n\n\n\nHowever, a language can be implemented by supplying a language yaml file and some needed templates for generation. Contributions are welcome!\n\n\n\n## Usage\n\n\n\nStart off with an example `config.yaml`:\n\n\n\n```\n\n# Target lang spec\n\n# Can be file path or built-in yaml spec\n\nlang: \"rust\"\n\n\n\n# Template files, optional and used for overriding built-in templates\n\ntemplates:\n\n # Path for custom model template file\n\n model: \"templates/custom_model.rs.template\"\n\n\n\n# File paths, optional\n\n# This example places models to <output>/src/models\n\npaths:\n\n root: \"src\",\n\n model: \"models\"\n\n\n\n# Custom formatting rule example\n\n# Can be used in templates with {{anglebrackets \"something\"}}\n\nformat:\n\n anglebrackets: \"<{{value}}>\"\n\n```\n\n\n\nThen simply run `ram -c config.yaml -i <path/to/openapi.yaml> -o <output/folder>` to run code generation.\n\n\n", "file_path": "README.md", "rank": 58, "score": 13.72319054737294 }, { "content": "use failure::{format_err, Error};\n\nuse rust_embed::RustEmbed;\n\nuse std::path::Path;\n\n\n\n#[derive(RustEmbed)]\n\n#[folder = \"assets/\"]\n\npub struct Assets;\n\n\n\nimpl Assets {\n\n /// Tries to read file first from fs\n\n /// then from bundled assets\n\n pub fn read_file(path: &Path) -> Result<String, Error> {\n\n std::fs::read_to_string(path).or_else(|_| {\n\n let pathstr = path.to_str().unwrap();\n\n Self::get(pathstr)\n\n .and_then(|cow| Some(cow.into_owned()))\n\n .and_then(|bytes| String::from_utf8(bytes).ok())\n\n .ok_or(format_err!(\"failed to read asset: {}\", pathstr))\n\n })\n\n }\n\n}\n", "file_path": "src/assets.rs", "rank": 59, "score": 13.686895337633068 }, { "content": " .filter(|f| f.is_array)\n\n .collect()\n\n }\n\n\n\n pub fn model_type(&self) -> ModelType {\n\n if self.is_array {\n\n ModelType::Array\n\n } else if self.is_object {\n\n ModelType::Object\n\n } else {\n\n ModelType::Primitive\n\n }\n\n }\n\n\n\n // translates model\n\n pub fn translate(self, lang: &Lang) -> Model {\n\n let mut translated_type = match self.model_type() {\n\n ModelType::Array => lang.translate_array(&self),\n\n ModelType::Object => {\n\n if let Some(ref refpath) = self.ref_path {\n", "file_path": "src/model.rs", "rank": 60, "score": 13.546233703599173 }, { "content": "use super::{Config, Lang, Model, ResourceGroup};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n// full model generation state, to contain processed models and apis\n\n#[derive(Debug, Deserialize, Serialize)]\n\npub struct State {\n\n pub models: Vec<Model>,\n\n pub cfg: Config,\n\n pub lang: Lang,\n\n pub resource_groups: Vec<ResourceGroup>,\n\n pub no_defaults: bool,\n\n}\n", "file_path": "src/state.rs", "rank": 61, "score": 13.376710205494861 }, { "content": " PathBuf::from(abspath)\n\n } else if let Some(ref inpath) = f.file_in {\n\n // get location from 'in' using config.files\n\n let path = state.cfg.get_path(inpath, &state.lang);\n\n path\n\n } else {\n\n // use rootpath\n\n let path = state.cfg.get_path(\"root\", &state.lang);\n\n path\n\n };\n\n\n\n // If file name is defined, use it as output for file.\n\n // If not, then assume the filenames are found inside the templates\n\n match f.filename {\n\n Some(filename) => vec![(dirpath.join(filename), render)],\n\n _ => util::split_files(render, dirpath),\n\n }\n\n })\n\n .collect()\n\n}\n", "file_path": "src/lib.rs", "rank": 62, "score": 12.20474255668784 }, { "content": " pub schema_type: String,\n\n pub properties: Vec<Box<Model>>,\n\n pub readonly: bool,\n\n pub additional_properties: Option<Box<Model>>,\n\n pub items: Option<Box<Model>>,\n\n pub description: Option<String>,\n\n pub format: Option<String>,\n\n pub nullable: bool,\n\n pub default: Option<Value>,\n\n #[serde(skip)]\n\n pub ref_path: Option<String>,\n\n\n\n /// Model extensions.\n\n /// Used for additional non-openapi specific information.\n\n /// Examples: `x-sql-table`, `x-go-tag`\n\n /// Flattened: use directly from model `{{ x-sql-name }}`\n\n #[serde(flatten)]\n\n pub extensions: IndexMap<String, Value>,\n\n\n\n // additional helper properties, these are derived from the 'base' properties\n", "file_path": "src/model.rs", "rank": 63, "score": 11.482038674981254 }, { "content": "\n\n /// Short summary\n\n pub summary: Option<String>,\n\n\n\n /// Resource description\n\n pub description: Option<String>,\n\n\n\n /// Path params\n\n pub path_params: Vec<Param>,\n\n\n\n /// Query params\n\n pub query_params: Vec<Param>,\n\n\n\n /// Result\n\n pub responses: HashMap<String, Model>,\n\n}\n\n\n\nimpl Resource {\n\n pub fn new(\n\n path: &str,\n", "file_path": "src/resource.rs", "rank": 64, "score": 10.630709885447839 }, { "content": " .into_iter()\n\n .map(|m| Box::new(m.translate(lang)))\n\n .collect(),\n\n array_properties: self\n\n .array_properties\n\n .into_iter()\n\n .map(|m| Box::new(m.translate(lang)))\n\n .collect(),\n\n ..self\n\n }\n\n }\n\n\n\n // normalizes child refs (clones object from input map)\n\n pub fn normalize(self, models_map: &HashMap<String, Self>) -> Self {\n\n Self {\n\n object_properties: self\n\n .object_properties\n\n .into_iter()\n\n .map(|m| {\n\n models_map\n", "file_path": "src/model.rs", "rank": 65, "score": 10.51018256667089 }, { "content": "use super::helper;\n\nuse failure::Fallible;\n\nuse glob::Pattern;\n\nuse handlebars::Handlebars;\n\nuse itertools::Itertools;\n\nuse log::info;\n\nuse openapi::v3_0::{ObjectOrReference, Parameter, Schema, Spec};\n\nuse std::collections::HashMap;\n\nuse std::fs;\n\nuse std::path::{Path, PathBuf};\n\n\n\n// returns ignore patterns from '.ramignore'\n", "file_path": "src/util.rs", "rank": 66, "score": 10.425629506331932 }, { "content": " .to_str()\n\n .unwrap()\n\n .to_owned();\n\n AddFile {\n\n template,\n\n ..f.clone()\n\n }\n\n });\n\n\n\n lang.into_iter()\n\n .flat_map(|l| l.files_relative())\n\n .chain(config_files)\n\n .collect()\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 67, "score": 10.1771272264921 }, { "content": " .additional_properties\n\n .as_ref()\n\n .and_then(|obj_or_ref| match obj_or_ref {\n\n ObjectOrReference::Object(s) => Some(Box::new(Model::new(\"\", &s, \"\"))),\n\n _ => None,\n\n });\n\n\n\n let schema_type = schema\n\n .schema_type\n\n .as_ref()\n\n .unwrap_or(&String::from(\"object\"))\n\n .to_owned();\n\n\n\n // If input name is \"\", try to extract one from ref_path.\n\n // Otherwise use the name.\n\n let def: String = if def == \"\" {\n\n util::extract_model_name(schema).unwrap_or_default()\n\n } else {\n\n def.into()\n\n };\n", "file_path": "src/model.rs", "rank": 68, "score": 9.758188062783317 }, { "content": " Some(item) => item.schema_type == \"object\",\n\n _ => false,\n\n })\n\n })\n\n .count();\n\n let count_vec = props_iter\n\n .clone()\n\n .filter(|p| p.schema_type == \"array\")\n\n .count();\n\n\n\n println!(\"counted test variables!\");\n\n\n\n // do some regex checking\n\n // check that \"model.rs\" contains 9 occurences of 'pub'\n\n let contents: String = std::fs::read_to_string(\n\n files\n\n .get(&format!(\"{}.rs\", &model.name.to_snake_case()))\n\n .unwrap()\n\n .path(),\n\n )\n", "file_path": "tests/test_generate.rs", "rank": 69, "score": 9.438326217400395 }, { "content": "\n\n // gather some variables from models\n\n for model in models {\n\n let props_iter = model.properties.iter().chain(\n\n model\n\n .additional_properties\n\n .iter()\n\n .flat_map(|p| &p.properties),\n\n );\n\n\n\n // pub should occur in field names and struct def\n\n let count_pub = props_iter.clone().count() + 1;\n\n let count_i32 = props_iter\n\n .clone()\n\n .filter(|p| {\n\n p.schema_type == \"integer\"\n\n || (p.schema_type == \"array\"\n\n && match p.items.as_ref() {\n\n Some(item) => item.schema_type == \"integer\",\n\n _ => false,\n", "file_path": "tests/test_generate.rs", "rank": 70, "score": 8.800967383498806 }, { "content": " // this is a reference to another object\n\n // get model name from ref_path\n\n util::model_name_from_ref(&refpath)\n\n .map(|t| lang.translate_modelname(&t))\n\n .expect(\"failed to get model name from ref\")\n\n } else {\n\n // this is an inline object, which we name by it's key\n\n lang.translate_modelname(&self.name)\n\n }\n\n }\n\n ModelType::Primitive => lang.translate_primitive(\n\n &self.schema_type,\n\n self.format.as_ref().unwrap_or(&String::from(\"default\")),\n\n ),\n\n };\n\n\n\n // format if nullable\n\n if self.nullable {\n\n translated_type = lang\n\n .format(\"nullable\", &translated_type)\n", "file_path": "src/model.rs", "rank": 71, "score": 8.739864337526587 }, { "content": "use log::{info, LevelFilter};\n\nuse ram::{Config, GroupingStrategy};\n\nuse std::panic;\n\nuse std::path::PathBuf;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"ram\", about = \"openapi generator\")]\n", "file_path": "src/main.rs", "rank": 72, "score": 8.636615980700913 }, { "content": " method: &str,\n\n op: &Operation,\n\n parameters: &HashMap<String, Parameter>,\n\n mut path_params: Vec<Param>,\n\n mut query_params: Vec<Param>,\n\n ) -> Resource {\n\n // extend route params with local method params\n\n path_params.extend(get_params_operation(op, \"path\", parameters));\n\n query_params.extend(get_params_operation(op, \"query\", parameters));\n\n\n\n Resource {\n\n path: path.into(),\n\n method: method.into(),\n\n name: op\n\n .operation_id\n\n .as_ref()\n\n .expect(\"missing operation_id on resource\")\n\n .clone(),\n\n summary: op.summary.clone(),\n\n description: op.description.clone(),\n", "file_path": "src/resource.rs", "rank": 73, "score": 8.289327962368631 }, { "content": " })\n\n })\n\n .count();\n\n let count_option = props_iter\n\n .clone()\n\n .filter(|p| {\n\n p.nullable\n\n || (p.schema_type == \"array\"\n\n && match p.items.as_ref() {\n\n Some(item) => item.nullable,\n\n _ => false,\n\n })\n\n })\n\n .count();\n\n let count_box = props_iter\n\n .clone()\n\n .filter(|p| {\n\n p.schema_type == \"object\"\n\n || (p.schema_type == \"array\"\n\n && match p.items.as_ref() {\n", "file_path": "tests/test_generate.rs", "rank": 74, "score": 8.283759017137633 }, { "content": " }\n\n\n\n pub fn translate(self, lang: &Lang) -> Resource {\n\n let tr_params = |params: Vec<Param>| {\n\n params\n\n .into_iter()\n\n .map(|p| Param {\n\n model: p.model.translate(lang),\n\n ..p\n\n })\n\n .collect()\n\n };\n\n\n\n Resource {\n\n // also formats path\n\n path: lang.format_path(self.path),\n\n query_params: tr_params(self.query_params),\n\n path_params: tr_params(self.path_params),\n\n responses: self\n\n .responses\n", "file_path": "src/resource.rs", "rank": 75, "score": 7.805754744482945 }, { "content": " path.pop();\n\n\n\n Ok(\n\n // fold values in map b with map c\n\n // (which contains now all the schemas)\n\n // recursively so we keep track of collected schemas so far\n\n b.values()\n\n .fold(merged, |mut acc: HashMap<String, Schema>, schema| {\n\n for ref_path in iter_ref_paths(&schema).filter_map(ref_file).unique() {\n\n acc.extend(schemas_from_ref(&path, ref_path, &acc).unwrap());\n\n }\n\n\n\n acc\n\n }),\n\n )\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 76, "score": 7.656067959116074 }, { "content": "\n\n let mut model = Model {\n\n name: name.into(),\n\n ref_path: schema.ref_path.clone(),\n\n items: schema.items.as_ref().map(|s| {\n\n let name = util::extract_model_name(s).unwrap_or_default();\n\n Box::new(Model::new(&name, &s, \"\"))\n\n }),\n\n nullable: schema.nullable.unwrap_or(false),\n\n description: schema.description.clone(),\n\n format: schema.format.clone(),\n\n default: schema.default.clone(),\n\n extensions: schema.extensions.clone(),\n\n readonly: schema.read_only.unwrap_or(false),\n\n def,\n\n schema_type,\n\n properties,\n\n additional_properties,\n\n ..Default::default()\n\n };\n", "file_path": "src/model.rs", "rank": 77, "score": 7.619885657875415 }, { "content": " data.push(line);\n\n }\n\n }\n\n\n\n // push last file\n\n if mark.is_some() {\n\n filemap.push((mark.take().unwrap(), data.join(\"\\n\")));\n\n }\n\n\n\n filemap\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_split_files() {\n\n let input = \"\n\n %filebegin=a.bar\n", "file_path": "src/util.rs", "rank": 78, "score": 6.87893367448351 }, { "content": " path_params.clone(),\n\n query_params.clone(),\n\n ),\n\n (\n\n path.clone(),\n\n \"POST\",\n\n item.post.as_ref(),\n\n path_params.clone(),\n\n query_params.clone(),\n\n ),\n\n (\n\n path.clone(),\n\n \"DELETE\",\n\n item.delete.as_ref(),\n\n path_params.clone(),\n\n query_params.clone(),\n\n ),\n\n (\n\n path.clone(),\n\n \"OPTIONS\",\n", "file_path": "src/resource.rs", "rank": 79, "score": 6.837897763040724 }, { "content": " item.options.as_ref(),\n\n path_params.clone(),\n\n query_params.clone(),\n\n ),\n\n (\n\n path.clone(),\n\n \"HEAD\",\n\n item.head.as_ref(),\n\n path_params.clone(),\n\n query_params.clone(),\n\n ),\n\n (\n\n path.clone(),\n\n \"PATCH\",\n\n item.patch.as_ref(),\n\n path_params.clone(),\n\n query_params.clone(),\n\n ),\n\n (\n\n path.clone(),\n", "file_path": "src/resource.rs", "rank": 80, "score": 6.791429487293911 }, { "content": " \"TRACE\",\n\n item.trace.as_ref(),\n\n path_params.clone(),\n\n query_params.clone(),\n\n ),\n\n ]\n\n .into_iter()\n\n .filter_map(|(path, method, op, path_params, query_params)| {\n\n op.and_then(|op| Some((path, method, op, path_params, query_params)))\n\n })\n\n });\n\n let strat_iter = iter.filter_map(|(path, method, op, path_params, query_params)| {\n\n match grouping_strategy {\n\n // everything is in same group\n\n GroupingStrategy::Nothing => {\n\n Some((\"\".into(), path, method, op, path_params, query_params))\n\n }\n\n\n\n // groups by path\n\n GroupingStrategy::Path => {\n", "file_path": "src/resource.rs", "rank": 81, "score": 6.707934411839583 }, { "content": " fieldformat == \"date-time\"\n\n } else {\n\n false\n\n }\n\n })\n\n }\n\n\n\n fn set_is_object(&mut self) {\n\n self.is_object = self.schema_type == \"object\"\n\n }\n\n\n\n fn set_is_array(&mut self) {\n\n self.is_array = self.schema_type == \"array\"\n\n }\n\n\n\n fn set_is_primitive(&mut self) {\n\n self.is_primitive = !self.is_array && !self.is_object\n\n }\n\n\n\n fn set_primitive_properties<'a>(&'a mut self) {\n", "file_path": "src/model.rs", "rank": 82, "score": 6.208142488027062 }, { "content": " pub is_object: bool,\n\n pub is_array: bool,\n\n pub is_primitive: bool,\n\n pub has_date: bool,\n\n pub has_datetime: bool,\n\n pub object_properties: Vec<Box<Model>>,\n\n pub array_properties: Vec<Box<Model>>,\n\n pub primitive_properties: Vec<Box<Model>>,\n\n}\n\n\n\nimpl Model {\n\n pub fn new(name: &str, schema: &Schema, def: &str) -> Self {\n\n let properties: Vec<Box<Model>> = schema\n\n .properties\n\n .iter()\n\n .flatten()\n\n .map(|(name, schema)| Box::new(Model::new(&name, schema, \"\")))\n\n .collect();\n\n\n\n let additional_properties: Option<Box<Model>> = schema\n", "file_path": "src/model.rs", "rank": 83, "score": 6.1081645688514605 }, { "content": "use inflector::Inflector;\n\nuse ram::{Config, GroupingStrategy};\n\nuse regex::Regex;\n\nuse std::collections::HashMap;\n\nuse std::fs::DirEntry;\n\nuse std::panic;\n\nuse std::path::PathBuf;\n\n\n\n#[test]\n", "file_path": "tests/test_generate.rs", "rank": 84, "score": 6.065716514835517 }, { "content": " .unwrap_or(translated_type)\n\n };\n\n\n\n Model {\n\n schema_type: translated_type,\n\n properties: self\n\n .properties\n\n .into_iter()\n\n .map(|m| Box::new(m.translate(lang)))\n\n .collect(),\n\n additional_properties: self\n\n .additional_properties\n\n .and_then(|m| Some(Box::new(m.translate(lang)))),\n\n primitive_properties: self\n\n .primitive_properties\n\n .into_iter()\n\n .map(|m| Box::new(m.translate(lang)))\n\n .collect(),\n\n object_properties: self\n\n .object_properties\n", "file_path": "src/model.rs", "rank": 85, "score": 5.833171036598367 }, { "content": " Some((path.clone(), path, method, op, path_params, query_params))\n\n }\n\n\n\n // groups by first tag\n\n GroupingStrategy::FirstTag => op\n\n .tags\n\n .as_ref()\n\n .and_then(|tags| tags.get(0))\n\n .and_then(|tag| Some((tag.clone(), path, method, op, path_params, query_params))),\n\n\n\n // groups by operation id\n\n GroupingStrategy::Operation => op.operation_id.as_ref().and_then(|operationid| {\n\n Some((\n\n operationid.clone(),\n\n path,\n\n method,\n\n op,\n\n path_params,\n\n query_params,\n\n ))\n", "file_path": "src/resource.rs", "rank": 86, "score": 5.65015995112916 }, { "content": " models = ram::generate_models_v3(&spec, &specpath);\n\n assert_eq!(models.len(), models_count);\n\n let state = ram::create_state(cfg, models.clone(), vec![], false);\n\n ram::util::write_files(&output, ram::generate_files(state));\n\n }\n\n _ => {}\n\n };\n\n\n\n // map files to name -> file\n\n let files: HashMap<String, DirEntry> =\n\n std::fs::read_dir(&PathBuf::from(\"tests_output/models/src/model\"))\n\n .unwrap()\n\n .map(|f| {\n\n let f = f.unwrap();\n\n (f.file_name().to_str().unwrap().into(), f)\n\n })\n\n .collect();\n\n\n\n // assert files count: models + mod file\n\n assert_eq!(files.len(), models_count + 1);\n", "file_path": "tests/test_generate.rs", "rank": 87, "score": 5.492852801645476 }, { "content": " ram::create_state(cfg, models, resource_groups, args.no_defaults)\n\n }\n\n _ => {\n\n panic!(\"unsupported openapi version\");\n\n }\n\n };\n\n\n\n // output raw state as json\n\n if args.json {\n\n println!(\n\n \"{}\",\n\n serde_json::to_string(&state).expect(\"failed to serialize state!\")\n\n );\n\n }\n\n\n\n // if output defined, write files\n\n if let Some(output) = args.output {\n\n let files = ram::generate_files(state);\n\n ram::util::write_files(&output, files);\n\n info!(\"All operations finished!\")\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 88, "score": 5.411316050862741 }, { "content": " self.primitive_properties = self\n\n .properties\n\n .iter()\n\n .cloned()\n\n .chain(\n\n self.additional_properties\n\n .iter()\n\n .cloned()\n\n .flat_map(|p| p.primitive_properties),\n\n )\n\n .filter(|f| f.is_primitive)\n\n .collect();\n\n }\n\n\n\n fn set_object_properties<'a>(&'a mut self) {\n\n self.object_properties = self\n\n .properties\n\n .iter()\n\n .cloned()\n\n .chain(\n", "file_path": "src/model.rs", "rank": 89, "score": 5.170945364132342 }, { "content": " self.additional_properties\n\n .iter()\n\n .cloned()\n\n .flat_map(|p| p.object_properties),\n\n )\n\n .filter(|f| f.is_object)\n\n .collect()\n\n }\n\n\n\n fn set_array_properties<'a>(&'a mut self) {\n\n self.array_properties = self\n\n .properties\n\n .iter()\n\n .cloned()\n\n .chain(\n\n self.additional_properties\n\n .iter()\n\n .cloned()\n\n .flat_map(|p| p.array_properties),\n\n )\n", "file_path": "src/model.rs", "rank": 90, "score": 5.0687886796695825 }, { "content": " path_params,\n\n query_params,\n\n responses: op\n\n // Take 200 application/json from response content and apply that as type\n\n .responses\n\n .iter()\n\n .filter_map(|(code, resp)| {\n\n resp.content.as_ref().and_then(|contentmap| {\n\n contentmap.get(\"application/json\").and_then(|mediatype| {\n\n match &mediatype.schema {\n\n Some(ObjectOrReference::Object(schema)) => {\n\n Some((code.clone(), Model::new(\"\", &schema, \"\")))\n\n }\n\n _ => None,\n\n }\n\n })\n\n })\n\n })\n\n .collect(),\n\n }\n", "file_path": "src/resource.rs", "rank": 91, "score": 4.963403420914544 }, { "content": " ram::util::write_files(&output, ram::generate_files(state));\n\n }\n\n _ => {}\n\n };\n\n\n\n // map files to name -> file\n\n let files: HashMap<String, DirEntry> =\n\n std::fs::read_dir(&PathBuf::from(\"tests_output/res/src/api\"))\n\n .unwrap()\n\n .map(|f| {\n\n let f = f.unwrap();\n\n (f.file_name().to_str().unwrap().into(), f)\n\n })\n\n .collect();\n\n\n\n // assert files count: res_count + mod file\n\n assert_eq!(files.len(), res_count + 1);\n\n\n\n // gather some variables from models\n\n for group in resource_groups {\n", "file_path": "tests/test_generate.rs", "rank": 92, "score": 4.840370783400718 }, { "content": " // do some regex checking\n\n let contents: String = std::fs::read_to_string(\n\n files\n\n .get(&format!(\"{}.rs\", &group.name.to_snake_case()))\n\n .unwrap()\n\n .path(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(\n\n Regex::new(r\"#\\[get(.+)\\]\")\n\n .unwrap()\n\n .find_iter(&contents)\n\n .count(),\n\n group.resources.iter().filter(|r| r.method == \"GET\").count()\n\n );\n\n\n\n // TODO enable back after rocket Responder is sorted out in template\n\n // assert_eq!(\n\n // Regex::new(r\"Box<.+>\").unwrap().find_iter(&contents).count(),\n\n // group\n\n // .resources\n\n // .iter()\n\n // .filter(|r| r.responses.get(\"200\").is_some())\n\n // .count()\n\n // );\n\n }\n\n}\n", "file_path": "tests/test_generate.rs", "rank": 93, "score": 4.769646030218854 }, { "content": "\n\n // do this only once in creation\n\n model.apply_properties();\n\n\n\n // add helpers\n\n model\n\n }\n\n\n\n fn apply_properties(&mut self) {\n\n self.set_has_date();\n\n self.set_has_datetime();\n\n self.set_is_object();\n\n self.set_is_array();\n\n self.set_is_primitive();\n\n\n\n // set child properties\n\n for child in self.properties.iter_mut() {\n\n child.apply_properties();\n\n }\n\n\n", "file_path": "src/model.rs", "rank": 94, "score": 4.740798247907982 }, { "content": " }),\n\n }\n\n });\n\n\n\n // collect resourcegroups\n\n strat_iter\n\n .group_by(|(key, _, _, _, _, _)| key.clone())\n\n .into_iter()\n\n .map(|(key, group)| ResourceGroup {\n\n name: key.into(),\n\n resources: group\n\n .into_iter()\n\n .map(|(_, path, method, op, path_params, query_params)| {\n\n Resource::new(\n\n path.as_str(),\n\n method,\n\n op,\n\n parameters,\n\n path_params,\n\n query_params,\n\n )\n\n })\n\n .collect(),\n\n grouping_strategy,\n\n })\n\n .collect()\n\n}\n", "file_path": "src/resource.rs", "rank": 95, "score": 4.435219603704194 }, { "content": " .get(&m.def)\n\n .expect(&format!(\"failed to get model '{}' from map\", &m.def))\n\n })\n\n .cloned()\n\n .map(Box::new)\n\n .collect(),\n\n\n\n array_properties: self\n\n .array_properties\n\n .into_iter()\n\n .map(|m| {\n\n let mut items: Box<Model> = m.items.expect(\"array item was None\").clone();\n\n if items.is_object {\n\n items = Box::new(\n\n models_map\n\n .get(&items.def)\n\n .expect(&format!(\n\n \"failed to get array item '{}' from map\",\n\n &items.def\n\n ))\n", "file_path": "src/model.rs", "rank": 96, "score": 3.948065375145623 }, { "content": " aaaaaaaaaaaaaaa\n\n %filebegin=b.bar\n\n aaaaaaaaaaaaaaa\n\n %filebegin=c.bar\n\n aaaaaaaaaaaaaaa\n\n %filebegin=d.bar\n\n aaaaaaaaaaaaaaa\n\n %filebegin=e.bar\n\n aaaaaaaaaaaaa\n\n %filebegin=f.bar\n\n aaaaaaaaaaaaaaa\n\n %filebegin=g.bar\n\n aaaaaaaaaaaaaaa\n\n %filebegin=h.bar\n\n aaaaaaaaaaaaaaa\n\n \";\n\n\n\n let output = split_files(input.into(), PathBuf::new());\n\n\n\n assert_eq!(output.len(), 8);\n\n }\n\n}\n", "file_path": "src/util.rs", "rank": 97, "score": 3.573139764727952 }, { "content": "Copyright 2020 Antti Lamminsalo\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the \"Software\"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.\n", "file_path": "LICENSE.md", "rank": 98, "score": 2.8656857546554475 }, { "content": " .clone(),\n\n )\n\n }\n\n Model {\n\n items: Some(items),\n\n ..*m\n\n }\n\n })\n\n .map(Box::new)\n\n .collect(),\n\n\n\n ..self\n\n }\n\n }\n\n}\n", "file_path": "src/model.rs", "rank": 99, "score": 2.708170240876135 } ]
Rust
src/curve25519/point.rs
cronokirby/eddo
015b8ce77249605f2f760ea5d907a0e6e1ea79bd
use std::{ convert::{TryFrom, TryInto}, ops::{Add, Mul}, }; use subtle::{Choice, ConditionallySelectable, ConstantTimeEq}; use super::{arithmetic::U256, error::SignatureError, field::Z25519, scalar::Scalar}; const D: Z25519 = Z25519 { value: U256 { limbs: [ 0x75eb4dca135978a3, 0x00700a4d4141d8ab, 0x8cc740797779e898, 0x52036cee2b6ffe73, ], }, }; pub const B: Point = Point { x: Z25519 { value: U256 { limbs: [ 0xc9562d608f25d51a, 0x692cc7609525a7b2, 0xc0a4e231fdd6dc5c, 0x216936d3cd6e53fe, ], }, }, y: Z25519 { value: U256 { limbs: [ 0x6666666666666658, 0x6666666666666666, 0x6666666666666666, 0x6666666666666666, ], }, }, z: Z25519 { value: U256 { limbs: [1, 0, 0, 0], }, }, t: Z25519 { value: U256 { limbs: [ 0x6dde8ab3a5b7dda3, 0x20f09f80775152f5, 0x66ea4e8e64abe37d, 0x67875f0fd78b7665, ], }, }, }; #[derive(Clone, Copy, Debug)] pub struct Point { x: Z25519, y: Z25519, z: Z25519, t: Z25519, } impl Point { fn identity() -> Point { Point { x: Z25519::from(0), y: Z25519::from(1), z: Z25519::from(1), t: Z25519::from(0), } } fn from_affine_unchecked(x: Z25519, y: Z25519) -> Point { Point { x, y, z: Z25519::from(1), t: x * y, } } #[must_use] fn doubled(&self) -> Point { let a = self.x.squared(); let b = self.y.squared(); let c = self.z.squared() * 2; let h = a + b; let e = h - (self.x + self.y).squared(); let g = a - b; let f = c + g; Point { x: e * f, y: g * h, t: e * h, z: f * g, } } } impl ConditionallySelectable for Point { fn conditional_select(a: &Self, b: &Self, choice: Choice) -> Self { Point { x: Z25519::conditional_select(&a.x, &b.x, choice), y: Z25519::conditional_select(&a.y, &b.y, choice), z: Z25519::conditional_select(&a.z, &b.z, choice), t: Z25519::conditional_select(&a.t, &b.t, choice), } } } impl Into<[u8; 32]> for Point { fn into(self) -> [u8; 32] { let zinv = self.z.inverse(); let x = self.x * zinv; let y = self.y * zinv; let mut out: [u8; 32] = y.into(); out[31] |= ((x.value.limbs[0] & 1) as u8) << 7; out } } impl<'a> TryFrom<&'a [u8]> for Point { type Error = SignatureError; fn try_from(value: &'a [u8]) -> Result<Self, Self::Error> { if value.len() < 32 { return Err(SignatureError::InvalidPoint); } let mut value_bytes: [u8; 32] = value[..32].try_into().unwrap(); let x_0 = u64::from(value_bytes[31] >> 7); value_bytes[31] &= 0x7F; let y = Z25519::try_from(&value_bytes[..])?; let y_2 = y.squared(); let u = y_2 - Z25519::from(1); let v = D * y_2 + Z25519::from(1); let mut x = Z25519::fraction_root(u, v).ok_or(SignatureError::InvalidPoint)?; if x_0 == 1 && x.value.eq(U256::from(0)) { return Err(SignatureError::InvalidPoint); } if x_0 != x.value.limbs[0] % 2 { x = -x; } Ok(Point::from_affine_unchecked(x, y)) } } impl Add for Point { type Output = Point; fn add(self, other: Point) -> Self::Output { let a = (self.y - self.x) * (other.y - other.x); let b = (self.y + self.x) * (other.y + other.x); let c = self.t * D * other.t * 2; let d = self.z * other.z * 2; let e = b - a; let f = d - c; let g = d + c; let h = b + a; Point { x: e * f, y: g * h, t: e * h, z: f * g, } } } impl Mul<Scalar> for Point { type Output = Point; fn mul(self, other: Scalar) -> Self::Output { let mut out = Point::identity(); const WINDOW_SIZE: usize = 4; let mut window = [Point::identity(); (1 << WINDOW_SIZE) - 1]; window[0] = self; for i in 1..window.len() { window[i] = self + window[i - 1]; } for x in other.value.limbs.iter().rev() { for i in (0..64).step_by(WINDOW_SIZE).rev() { out = out.doubled(); out = out.doubled(); out = out.doubled(); out = out.doubled(); let w = ((x >> i) & ((1 << WINDOW_SIZE) - 1)) as usize; let mut selected = Point::identity(); for i in 0..window.len() { selected.conditional_assign(&window[i], w.ct_eq(&(i + 1))); } out = out + selected; } } out } }
use std::{ convert::{TryFrom, TryInto}, ops::{Add, Mul}, }; use subtle::{Choice, ConditionallySelectable, ConstantTimeEq}; use super::{arithmetic::U256, error::SignatureError, field::Z25519, scalar::Scalar}; const D: Z25519 = Z25519 { value: U256 { limbs: [ 0x75eb4dca135978a3, 0x00700a4d4141d8ab, 0x8cc740797779e898, 0x52036cee2b6ffe73, ], }, }; pub const B: Point = Point { x: Z25519 { value: U256 { limbs: [ 0xc9562d608f25d51a, 0x692cc7609525a7b2, 0xc0a4e231fdd6dc5c, 0x216936d3cd6e53fe, ], }, }, y: Z25519 { value: U256 { limbs: [ 0x6666666666666658, 0x6666666666666666, 0x6666666666666666, 0x6666666666666666, ], }, }, z: Z25519 { value: U256 { limbs: [1, 0, 0, 0], }, }, t: Z25519 { value: U256 { limbs: [ 0x6dde8ab3a5b7dda3, 0x20f09f80775152f5, 0x66ea4e8e64abe37d, 0x67875f0fd78b7665, ], }, }, }; #[derive(Clone, Copy, Debug)] pub struct Point { x: Z25519, y: Z25519, z: Z25519, t: Z25519, } impl Point { fn identity() -> Point { Point { x: Z25519::from(0), y: Z25519::from(1), z: Z25519::from(1), t: Z25519::from(0), } } fn from_affine_unchecked(x: Z25519, y: Z2551
#[must_use] fn doubled(&self) -> Point { let a = self.x.squared(); let b = self.y.squared(); let c = self.z.squared() * 2; let h = a + b; let e = h - (self.x + self.y).squared(); let g = a - b; let f = c + g; Point { x: e * f, y: g * h, t: e * h, z: f * g, } } } impl ConditionallySelectable for Point { fn conditional_select(a: &Self, b: &Self, choice: Choice) -> Self { Point { x: Z25519::conditional_select(&a.x, &b.x, choice), y: Z25519::conditional_select(&a.y, &b.y, choice), z: Z25519::conditional_select(&a.z, &b.z, choice), t: Z25519::conditional_select(&a.t, &b.t, choice), } } } impl Into<[u8; 32]> for Point { fn into(self) -> [u8; 32] { let zinv = self.z.inverse(); let x = self.x * zinv; let y = self.y * zinv; let mut out: [u8; 32] = y.into(); out[31] |= ((x.value.limbs[0] & 1) as u8) << 7; out } } impl<'a> TryFrom<&'a [u8]> for Point { type Error = SignatureError; fn try_from(value: &'a [u8]) -> Result<Self, Self::Error> { if value.len() < 32 { return Err(SignatureError::InvalidPoint); } let mut value_bytes: [u8; 32] = value[..32].try_into().unwrap(); let x_0 = u64::from(value_bytes[31] >> 7); value_bytes[31] &= 0x7F; let y = Z25519::try_from(&value_bytes[..])?; let y_2 = y.squared(); let u = y_2 - Z25519::from(1); let v = D * y_2 + Z25519::from(1); let mut x = Z25519::fraction_root(u, v).ok_or(SignatureError::InvalidPoint)?; if x_0 == 1 && x.value.eq(U256::from(0)) { return Err(SignatureError::InvalidPoint); } if x_0 != x.value.limbs[0] % 2 { x = -x; } Ok(Point::from_affine_unchecked(x, y)) } } impl Add for Point { type Output = Point; fn add(self, other: Point) -> Self::Output { let a = (self.y - self.x) * (other.y - other.x); let b = (self.y + self.x) * (other.y + other.x); let c = self.t * D * other.t * 2; let d = self.z * other.z * 2; let e = b - a; let f = d - c; let g = d + c; let h = b + a; Point { x: e * f, y: g * h, t: e * h, z: f * g, } } } impl Mul<Scalar> for Point { type Output = Point; fn mul(self, other: Scalar) -> Self::Output { let mut out = Point::identity(); const WINDOW_SIZE: usize = 4; let mut window = [Point::identity(); (1 << WINDOW_SIZE) - 1]; window[0] = self; for i in 1..window.len() { window[i] = self + window[i - 1]; } for x in other.value.limbs.iter().rev() { for i in (0..64).step_by(WINDOW_SIZE).rev() { out = out.doubled(); out = out.doubled(); out = out.doubled(); out = out.doubled(); let w = ((x >> i) & ((1 << WINDOW_SIZE) - 1)) as usize; let mut selected = Point::identity(); for i in 0..window.len() { selected.conditional_assign(&window[i], w.ct_eq(&(i + 1))); } out = out + selected; } } out } }
9) -> Point { Point { x, y, z: Z25519::from(1), t: x * y, } }
function_block-function_prefixed
[ { "content": "/// Represents a \"hash value\", as described in Section 6:\n\n/// https://datatracker.ietf.org/doc/html/rfc6234#section-6\n\n///\n\n/// This can be thought of as the ongoing state of our hash function,\n\n/// which gets modified using our message blocks.\n\nstruct HashValue {\n\n data: [u64; 8],\n\n schedule: MessageSchedule,\n\n}\n\n\n\nimpl HashValue {\n\n /// Create an initial hash value, as per Section 6.3:\n\n /// https://datatracker.ietf.org/doc/html/rfc6234#section-6.3\n\n fn initial() -> HashValue {\n\n HashValue {\n\n data: [\n\n 0x6a09e667f3bcc908,\n\n 0xbb67ae8584caa73b,\n\n 0x3c6ef372fe94f82b,\n\n 0xa54ff53a5f1d36f1,\n\n 0x510e527fade682d1,\n\n 0x9b05688c2b3e6c1f,\n\n 0x1f83d9abfb41bd6b,\n\n 0x5be0cd19137e2179,\n\n ],\n", "file_path": "src/sha512.rs", "rank": 0, "score": 70918.85934618868 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let (public, private) = gen_keypair(&mut OsRng);\n\n\n\n {\n\n c.bench_function(\"generating_keypair\", |b| b.iter(|| gen_keypair(&mut OsRng)));\n\n }\n\n\n\n {\n\n let mut group = c.benchmark_group(\"signing\");\n\n for &size in &[KB, 4 * KB, 16 * KB, 64 * KB, 256 * KB, 1024 * KB] {\n\n let data = vec![0; size];\n\n group.throughput(Throughput::Bytes(size as u64));\n\n group.bench_with_input(BenchmarkId::from_parameter(size), &size, |b, _size| {\n\n b.iter(|| private.sign(black_box(&data)));\n\n });\n\n }\n\n group.finish();\n\n }\n\n\n\n {\n", "file_path": "benches/eddo.rs", "rank": 1, "score": 65550.00676189593 }, { "content": "#[inline]\n\npub fn adc(carry: u8, a: u64, b: u64, out: &mut u64) -> u8 {\n\n #[cfg(target_arch = \"x86_64\")]\n\n {\n\n // Using this intrinsic is perfectly safe\n\n unsafe { arch::_addcarry_u64(carry, a, b, out) }\n\n }\n\n #[cfg(not(target_arch = \"x86_64\"))]\n\n {\n\n // The largest result is 2 * (2^64 - 1) + 1 = 2^65 - 1, which needs exactly 65 bits\n\n // Hence, we use u128. Hopefully, Rust will realize that we don't really want to use\n\n // 128 bit operations, but rather want to use an `adc` instruction, or whatever equivalent\n\n // our ISA has, and insert that instead.\n\n let full_res = u128::from(a) + u128::from(b) + u128::from(carry);\n\n *out = full_res as u64;\n\n (full_res >> 64) as u8\n\n }\n\n}\n\n\n\n/// sbb computes out <- a - b - borrow, outputting a new borrow value\n\n///\n\n/// `borrow` must be 0, or 1. The return value will satisfy this constraint\n", "file_path": "src/arch.rs", "rank": 2, "score": 61781.91311778878 }, { "content": "#[inline]\n\npub fn mulc(carry: u64, a: u64, b: u64, out: &mut u64) -> u64 {\n\n let full_res = u128::from(a) * u128::from(b) + u128::from(carry);\n\n *out = full_res as u64;\n\n (full_res >> 64) as u64\n\n}\n", "file_path": "src/arch.rs", "rank": 3, "score": 61781.91311778878 }, { "content": "#[inline]\n\npub fn sbb(borrow: u8, a: u64, b: u64, out: &mut u64) -> u8 {\n\n #[cfg(target_arch = \"x86_64\")]\n\n {\n\n // Using this intrinsic is perfectly safe\n\n unsafe { arch::_subborrow_u64(borrow, a, b, out) }\n\n }\n\n #[cfg(not(target_arch = \"x86_64\"))]\n\n {\n\n // Like with addition, we use a larger type to be able to have carry information\n\n // We also hope that Rust can figure out what we're doing, and replace this\n\n // sequence with an `sbb` instruction\n\n let full_res = i128::from(a) - i128::from(b) - i128::from(borrow);\n\n *out = full_res as u64;\n\n // NOTE: This might leak with odd code generation?\n\n // If this compiles to a branch instruction, then that would be an issue\n\n u8::from(full_res < 0)\n\n }\n\n}\n\n\n\n/// mulc computs out <- a * b + carry, outputting a new carry limb\n", "file_path": "src/arch.rs", "rank": 4, "score": 61781.91311778878 }, { "content": "/// This calculates the SHA-512 hash of some arbitrary input, producing 512 bits of output.\n\n///\n\n/// This implements the function as defined in RFC 6234:\n\n/// https://datatracker.ietf.org/doc/html/rfc6234\n\npub fn hash(message: &[u8]) -> [u8; HASH_SIZE] {\n\n let mut hash_value = HashValue::initial();\n\n\n\n let mut blocks = message.chunks_exact(BLOCK_SIZE);\n\n for block in &mut blocks {\n\n hash_value.update(block.try_into().unwrap());\n\n }\n\n\n\n let remainder = blocks.remainder();\n\n let remainder_len = remainder.len();\n\n\n\n // Now, we need to handle padding, as per Section 4.2:\n\n // https://datatracker.ietf.org/doc/html/rfc6234#section-4.2\n\n\n\n // This buffer is used to contain whatever remaining blocks we feed into the hasher\n\n let mut scratch_block = [0; BLOCK_SIZE];\n\n scratch_block[..remainder_len].copy_from_slice(remainder);\n\n\n\n // a. \"1\" is appended\n\n scratch_block[remainder_len] = 0b1000_0000;\n", "file_path": "src/sha512.rs", "rank": 5, "score": 60391.60537000316 }, { "content": "#[inline]\n\nfn maj(x: u64, y: u64, z: u64) -> u64 {\n\n (x & y) ^ (x & z) ^ (y & z)\n\n}\n\n\n", "file_path": "src/sha512.rs", "rank": 6, "score": 53481.570745155965 }, { "content": "#[inline]\n\nfn ch(x: u64, y: u64, z: u64) -> u64 {\n\n (x & y) ^ (!x & z)\n\n}\n\n\n", "file_path": "src/sha512.rs", "rank": 7, "score": 53481.570745155965 }, { "content": "#[inline]\n\nfn bsig1(x: u64) -> u64 {\n\n x.rotate_right(14) ^ x.rotate_right(18) ^ x.rotate_right(41)\n\n}\n\n\n", "file_path": "src/sha512.rs", "rank": 8, "score": 48310.34740746918 }, { "content": "#[inline]\n\nfn bsig0(x: u64) -> u64 {\n\n x.rotate_right(28) ^ x.rotate_right(34) ^ x.rotate_right(39)\n\n}\n\n\n", "file_path": "src/sha512.rs", "rank": 9, "score": 48310.34740746918 }, { "content": "#[inline]\n\nfn ssig0(x: u64) -> u64 {\n\n x.rotate_right(1) ^ x.rotate_right(8) ^ (x >> 7)\n\n}\n\n\n", "file_path": "src/sha512.rs", "rank": 10, "score": 48310.34740746918 }, { "content": "#[inline]\n\nfn ssig1(x: u64) -> u64 {\n\n x.rotate_right(19) ^ x.rotate_right(61) ^ (x >> 6)\n\n}\n\n\n\n/// The table of constants used in SHA-512 (and SHA-384).\n\n///\n\n/// This table is at the end of Section 5.2:\n\n/// https://datatracker.ietf.org/doc/html/rfc6234#section-5.2\n\n#[rustfmt::skip]\n\nconst K: [u64; 80] = [\n\n 0x428a2f98d728ae22, 0x7137449123ef65cd, 0xb5c0fbcfec4d3b2f, 0xe9b5dba58189dbbc,\n\n 0x3956c25bf348b538, 0x59f111f1b605d019, 0x923f82a4af194f9b, 0xab1c5ed5da6d8118,\n\n 0xd807aa98a3030242, 0x12835b0145706fbe, 0x243185be4ee4b28c, 0x550c7dc3d5ffb4e2,\n\n 0x72be5d74f27b896f, 0x80deb1fe3b1696b1, 0x9bdc06a725c71235, 0xc19bf174cf692694,\n\n 0xe49b69c19ef14ad2, 0xefbe4786384f25e3, 0x0fc19dc68b8cd5b5, 0x240ca1cc77ac9c65,\n\n 0x2de92c6f592b0275, 0x4a7484aa6ea6e483, 0x5cb0a9dcbd41fbd4, 0x76f988da831153b5,\n\n 0x983e5152ee66dfab, 0xa831c66d2db43210, 0xb00327c898fb213f, 0xbf597fc7beef0ee4,\n\n 0xc6e00bf33da88fc2, 0xd5a79147930aa725, 0x06ca6351e003826f, 0x142929670a0e6e70,\n\n 0x27b70a8546d22ffc, 0x2e1b21385c26c926, 0x4d2c6dfc5ac42aed, 0x53380d139d95b3df,\n\n 0x650a73548baf63de, 0x766a0abb3c77b2a8, 0x81c2c92e47edaee6, 0x92722c851482353b,\n", "file_path": "src/sha512.rs", "rank": 11, "score": 48310.34740746918 }, { "content": "fn decode_prefixed_hex<const N: usize>(prefix: &str, input: &str) -> AppResult<[u8; N]> {\n\n let just_hex = input\n\n .strip_prefix(prefix)\n\n .ok_or(AppError::ParseError(\"incorrect prefix\".into()))?;\n\n if just_hex.len() != 2 * N {\n\n return Err(AppError::ParseError(\"incorrect size\".into()));\n\n }\n\n let mut bytes = [0; N];\n\n hex::decode_to_slice(just_hex, &mut bytes)?;\n\n Ok(bytes)\n\n}\n\n\n\nconst PUBLIC_KEY_PREFIX: &'static str = \"エッドの公開鍵\";\n\n\n", "file_path": "src/bin.rs", "rank": 12, "score": 46908.720698975005 }, { "content": "/// This is used to avoid allocating new space for the message schedule for each block.\n\n///\n\n/// This is a struct of our invention, and is used to carry out part 1 of the algorithm\n\n/// in Section 6.3:\n\n/// https://datatracker.ietf.org/doc/html/rfc6234#section-6.3\n\nstruct MessageSchedule {\n\n words: [u64; 80],\n\n}\n\n\n\nimpl MessageSchedule {\n\n /// Create a new MessageSchedule\n\n ///\n\n /// This state shouldn't be used directly, but rather initialized with a message block.\n\n fn new() -> MessageSchedule {\n\n MessageSchedule { words: [0; 80] }\n\n }\n\n\n\n /// This prepares the message schedule with a new message block.\n\n ///\n\n /// This follows part 1 of the algorithm in Section 6.3:\n\n /// https://datatracker.ietf.org/doc/html/rfc6234#section-6.3\n\n fn prepare(&mut self, block: &[u8; BLOCK_SIZE]) {\n\n for (t, chunk) in block.chunks_exact(8).enumerate() {\n\n // Casting the chunk to the right size will never fail, because we use chunks_exact\n\n let mt = u64::from_be_bytes(chunk.try_into().unwrap());\n", "file_path": "src/sha512.rs", "rank": 13, "score": 45994.324307946576 }, { "content": "pub fn gen_keypair<R: RngCore + CryptoRng>(rng: &mut R) -> (PublicKey, PrivateKey) {\n\n let mut private = PrivateKey { bytes: [0u8; 32] };\n\n rng.fill_bytes(&mut private.bytes);\n\n (private.derive_public_key(), private)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_signature_example1() {\n\n let mut private = PrivateKey { bytes: [0; 32] };\n\n hex::decode_to_slice(\n\n \"9d61b19deffd5a60ba844af492ec2cc44449c5697b326919703bac031cae7f60\",\n\n &mut private.bytes,\n\n )\n\n .unwrap();\n\n let mut expected = [0; 64];\n\n hex::decode_to_slice(\n", "file_path": "src/curve25519/mod.rs", "rank": 14, "score": 45144.02332116608 }, { "content": "fn main() -> AppResult<()> {\n\n let args = Args::from_args();\n\n match args {\n\n Args::Generate { out_file } => generate(&out_file),\n\n Args::Sign { key_file, in_file } => sign(&key_file, &&in_file),\n\n Args::Verify {\n\n public,\n\n signature,\n\n in_file,\n\n } => {\n\n let public_key = decode_public_key(&public)?;\n\n let decoded_signature = decode_signature(&signature)?;\n\n verify(public_key, decoded_signature, &in_file)\n\n }\n\n }\n\n}\n", "file_path": "src/bin.rs", "rank": 15, "score": 34038.24327430646 }, { "content": "fn format_signature(signature: Signature) -> String {\n\n format!(\"{}{}\", SIGNATURE_PREFIX, hex::encode(signature.bytes))\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 16, "score": 30241.229739045102 }, { "content": "fn generate(out_path: &Path) -> AppResult<()> {\n\n let (public, private) = gen_keypair(&mut OsRng);\n\n let formatted_public = format_public_key(public);\n\n let formatted_private = format_private_key(private);\n\n let mut out_file = File::create(out_path)?;\n\n writeln!(out_file, \"# Public Key: {}\", formatted_public)?;\n\n writeln!(out_file, \"{}\", formatted_private)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 17, "score": 30241.229739045102 }, { "content": "fn format_private_key(private: PrivateKey) -> String {\n\n format!(\"{}{}\", PRIVATE_KEY_PREFIX, hex::encode(private.bytes))\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 18, "score": 28425.751027572045 }, { "content": "fn format_public_key(public: PublicKey) -> String {\n\n format!(\"{}{}\", PUBLIC_KEY_PREFIX, hex::encode(public.bytes))\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 19, "score": 28425.751027572045 }, { "content": "fn decode_signature(input: &str) -> AppResult<Signature> {\n\n Ok(Signature {\n\n bytes: decode_prefixed_hex(SIGNATURE_PREFIX, input)?,\n\n })\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 20, "score": 27786.99055408498 }, { "content": "fn sign(key_path: &Path, in_path: &Path) -> AppResult<()> {\n\n let key_file = File::open(key_path)?;\n\n let key_reader = BufReader::new(key_file);\n\n let mut maybe_private = None;\n\n for maybe_line in key_reader.lines() {\n\n let line = maybe_line?;\n\n if line.starts_with(\"#\") {\n\n continue;\n\n }\n\n maybe_private = Some(decode_private_key(&line)?);\n\n break;\n\n }\n\n let private = maybe_private.ok_or(AppError::ParseError(\"no private key in file\".into()))?;\n\n let in_data = fs::read(in_path)?;\n\n let sig = private.sign(&in_data);\n\n println!(\"{}\", format_signature(sig));\n\n Ok(())\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 21, "score": 26434.6275016495 }, { "content": "fn decode_public_key(input: &str) -> AppResult<PublicKey> {\n\n Ok(PublicKey {\n\n bytes: decode_prefixed_hex(PUBLIC_KEY_PREFIX, input)?,\n\n })\n\n}\n\n\n\nconst PRIVATE_KEY_PREFIX: &'static str = \"エッドの秘密鍵\";\n\n\n", "file_path": "src/bin.rs", "rank": 22, "score": 26266.60774817189 }, { "content": "fn decode_private_key(input: &str) -> AppResult<PrivateKey> {\n\n Ok(PrivateKey {\n\n bytes: decode_prefixed_hex(PRIVATE_KEY_PREFIX, input)?,\n\n })\n\n}\n\n\n\nconst SIGNATURE_PREFIX: &'static str = \"エッドの署名\";\n\n\n", "file_path": "src/bin.rs", "rank": 23, "score": 26266.60774817189 }, { "content": "fn verify(public: PublicKey, signature: Signature, in_path: &Path) -> AppResult<()> {\n\n let in_data = fs::read(in_path)?;\n\n if !public.verify(&in_data, signature) {\n\n return Err(AppError::FailedSignature);\n\n }\n\n println!(\"Ok!\");\n\n Ok(())\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 35, "score": 24100.0138354019 }, { "content": " Ok(Z25519 { value })\n\n }\n\n}\n\n\n\nimpl From<u64> for Z25519 {\n\n fn from(x: u64) -> Self {\n\n Z25519 {\n\n value: U256::from(x),\n\n }\n\n }\n\n}\n\n\n\nimpl From<[u64; 4]> for Z25519 {\n\n fn from(limbs: [u64; 4]) -> Self {\n\n Z25519 {\n\n value: U256 { limbs },\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/curve25519/field.rs", "rank": 36, "score": 15.329955446280795 }, { "content": "const TWO_P_MINUS_1_OVER_4: Z25519 = Z25519 {\n\n value: U256 {\n\n limbs: [\n\n 0xc4ee1b274a0ea0b0,\n\n 0x2f431806ad2fe478,\n\n 0x2b4d00993dfbd7a7,\n\n 0x2b8324804fc1df0b,\n\n ],\n\n },\n\n};\n\n\n\n/// Represents an element in the field Z/(2^255 - 19).\n\n///\n\n/// The operations in this field are defined through arithmetic modulo\n\n/// P := 2^255 - 19\n\n///\n\n/// # Creation\n\n///\n\n/// Elements in the field can be created from `u64`.\n\n#[derive(Clone, Copy, Debug)]\n", "file_path": "src/curve25519/field.rs", "rank": 37, "score": 13.183358452498943 }, { "content": " },\n\n };\n\n let z2 = Z25519 {\n\n value: U256 {\n\n limbs: [2, 2, 2, 2],\n\n },\n\n };\n\n let z3 = Z25519 {\n\n value: U256 {\n\n limbs: [3, 3, 3, 3],\n\n },\n\n };\n\n assert_eq!(z3, z1 + z2);\n\n\n\n let two_254 = Z25519 {\n\n value: U256 {\n\n limbs: [0, 0, 0, 1 << 62],\n\n },\n\n };\n\n assert_eq!(two_254 + two_254, Z25519::from(19));\n", "file_path": "src/curve25519/field.rs", "rank": 38, "score": 12.57584131420844 }, { "content": "use std::convert::{TryFrom, TryInto};\n\n\n\nuse rand::{CryptoRng, RngCore};\n\n\n\nuse crate::{\n\n curve25519::{point::Point, scalar::Scalar},\n\n sha512,\n\n};\n\n\n\nuse self::error::SignatureError;\n\n\n\nmod arithmetic;\n\nmod error;\n\nmod field;\n\nmod point;\n\nmod scalar;\n\n\n\npub const SIGNATURE_SIZE: usize = 64;\n\n\n\n#[derive(Debug, Clone, Copy)]\n", "file_path": "src/curve25519/mod.rs", "rank": 39, "score": 12.391140250755624 }, { "content": "/// after multiplication.\n\npub type U512 = U<8>;\n\n\n\nimpl U512 {\n\n pub fn lo(&self) -> U256 {\n\n U256 {\n\n limbs: [self.limbs[0], self.limbs[1], self.limbs[2], self.limbs[3]],\n\n }\n\n }\n\n\n\n #[cfg(test)]\n\n pub fn hi(&self) -> U256 {\n\n U256 {\n\n limbs: [self.limbs[4], self.limbs[5], self.limbs[6], self.limbs[7]],\n\n }\n\n }\n\n\n\n #[cfg(test)]\n\n pub fn from_hi_lo(hi: U256, lo: U256) -> Self {\n\n U512 {\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 40, "score": 11.879045062422449 }, { "content": " carry = (full_res >> 64) as u64;\n\n }\n\n self.reduce_after_scaling(carry);\n\n }\n\n}\n\n\n\nimpl Mul for Z25519 {\n\n type Output = Self;\n\n\n\n fn mul(mut self, other: Self) -> Self::Output {\n\n self *= other;\n\n self\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::super::arithmetic::U256;\n\n\n\n use super::Z25519;\n", "file_path": "src/curve25519/field.rs", "rank": 41, "score": 11.663092505634232 }, { "content": "use std::{\n\n convert::{TryFrom, TryInto},\n\n ops::{Add, AddAssign, Mul, MulAssign, Neg},\n\n};\n\n\n\nuse subtle::{ConditionallySelectable, ConstantTimeEq};\n\n\n\nuse super::{\n\n arithmetic::{U256, U512},\n\n error::SignatureError,\n\n};\n\n\n\nconst L: U256 = U256 {\n\n limbs: [\n\n 0x5812631a5cf5d3ed,\n\n 0x14def9dea2f79cd6,\n\n 0x0000000000000000,\n\n 0x1000000000000000,\n\n ],\n\n};\n", "file_path": "src/curve25519/scalar.rs", "rank": 42, "score": 11.530712327620337 }, { "content": "\n\n fn mul(mut self, small: u64) -> Self::Output {\n\n self *= small;\n\n self\n\n }\n\n}\n\n\n\nimpl MulAssign for Z25519 {\n\n fn mul_assign(&mut self, other: Self) {\n\n let res = self.value * other.value;\n\n // At this point, we've multiplied things out, and have:\n\n // hi⋅2²⁵⁶ + lo\n\n // Observe that 2²⁵⁶ = 2⋅(2²⁵⁵ - 19) + 38, so mod P, we have:\n\n // hi + 38⋅lo\n\n // All that's left is to multiply hi by 38, and then add in lo\n\n let mut carry = 0u64;\n\n for i in 0..4 {\n\n let full_res =\n\n u128::from(carry) + u128::from(res.limbs[i]) + 38 * u128::from(res.limbs[i + 4]);\n\n self.value.limbs[i] = full_res as u64;\n", "file_path": "src/curve25519/field.rs", "rank": 43, "score": 11.467677251119687 }, { "content": "use std::{\n\n cell::Cell,\n\n convert::TryInto,\n\n ops::{Add, AddAssign, Mul, Sub, SubAssign},\n\n};\n\n\n\nuse subtle::{Choice, ConditionallySelectable};\n\n\n\nuse crate::arch::{adc, mulc, sbb};\n\n\n\n#[derive(Clone, Copy, Debug)]\n\n// Only implement equality for tests. This is to avoid the temptation to introduce\n\n// a timing leak through equality comparison in other situations.\n\n#[cfg_attr(test, derive(PartialEq))]\n\npub struct U<const N: usize> {\n\n pub limbs: [u64; N],\n\n}\n\n\n\nimpl<const N: usize> U<N> {\n\n /// sub_with_borrow subtracts other from this elements in place, returning a borrow\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 44, "score": 11.283366446259919 }, { "content": "\n\nimpl Neg for Z25519 {\n\n type Output = Self;\n\n\n\n fn neg(self) -> Self::Output {\n\n // NOTE: Hopefully Rust inlines things, to avoid materializing 4 zeros in memory\n\n Self::from(0) - self\n\n }\n\n}\n\n\n\nimpl MulAssign<u64> for Z25519 {\n\n fn mul_assign(&mut self, small: u64) {\n\n let (carry, lo) = self.value * small;\n\n self.value = lo;\n\n self.reduce_after_scaling(carry);\n\n }\n\n}\n\n\n\nimpl Mul<u64> for Z25519 {\n\n type Output = Z25519;\n", "file_path": "src/curve25519/field.rs", "rank": 45, "score": 10.761200442487915 }, { "content": "impl ConditionallySelectable for Z25519 {\n\n fn conditional_select(a: &Self, b: &Self, choice: Choice) -> Self {\n\n Z25519 {\n\n value: U256::conditional_select(&a.value, &b.value, choice),\n\n }\n\n }\n\n}\n\n\n\nimpl AddAssign for Z25519 {\n\n fn add_assign(&mut self, other: Self) {\n\n let carry = self.value.add_with_carry(other.value);\n\n self.reduce_after_addition(carry);\n\n }\n\n}\n\n\n\nimpl Add for Z25519 {\n\n type Output = Self;\n\n\n\n fn add(mut self, other: Self) -> Self::Output {\n\n self += other;\n", "file_path": "src/curve25519/field.rs", "rank": 46, "score": 10.191558879607708 }, { "content": "pub struct Signature {\n\n pub bytes: [u8; SIGNATURE_SIZE],\n\n}\n\n\n\npub const PUBLIC_KEY_SIZE: usize = 32;\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub struct PublicKey {\n\n pub bytes: [u8; PUBLIC_KEY_SIZE],\n\n}\n\n\n\nimpl PublicKey {\n\n fn from_hash(hash: &[u8; 64]) -> Self {\n\n let scalar = Scalar::clamped(hash[..32].try_into().unwrap());\n\n PublicKey {\n\n bytes: (point::B * scalar).into(),\n\n }\n\n }\n\n\n\n fn verify_result(&self, message: &[u8], signature: Signature) -> Result<(), SignatureError> {\n", "file_path": "src/curve25519/mod.rs", "rank": 47, "score": 10.11962478351827 }, { "content": " limbs: [\n\n lo.limbs[0],\n\n lo.limbs[1],\n\n lo.limbs[2],\n\n lo.limbs[3],\n\n hi.limbs[0],\n\n hi.limbs[1],\n\n hi.limbs[2],\n\n hi.limbs[3],\n\n ],\n\n }\n\n }\n\n}\n\n\n\nimpl Mul for U256 {\n\n type Output = U512;\n\n\n\n fn mul(self, other: U256) -> Self::Output {\n\n // You can treat both of these functions as macros. They just exist to avoid\n\n // repeating this logic multiple times.\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 48, "score": 10.035219886356485 }, { "content": "use eddo::{gen_keypair, PrivateKey, PublicKey, Signature};\n\nuse rand::rngs::OsRng;\n\nuse std::fs::{self, File};\n\nuse std::io::{self, BufReader};\n\nuse std::io::{BufRead, Write};\n\nuse std::path::{Path, PathBuf};\n\nuse structopt::StructOpt;\n\n\n\nextern crate hex;\n\nextern crate structopt;\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"eddo\")]\n", "file_path": "src/bin.rs", "rank": 49, "score": 10.02614601390597 }, { "content": " use proptest::prelude::*;\n\n\n\n prop_compose! {\n\n fn arb_z25519()(\n\n z0 in 0..(!0u64 - 19),\n\n z1 in any::<u64>(),\n\n z2 in any::<u64>(),\n\n z3 in 0..((1u64 << 63) - 19)) -> Z25519 {\n\n Z25519 {\n\n value: U256 { limbs: [z0, z1, z2, z3] }\n\n }\n\n }\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_addition_commutative(a in arb_z25519(), b in arb_z25519()) {\n\n assert_eq!(a + b, b + a);\n\n }\n\n }\n", "file_path": "src/curve25519/field.rs", "rank": 50, "score": 9.96575902277927 }, { "content": "use std::{\n\n convert::{TryFrom, TryInto},\n\n ops::{Add, AddAssign, Mul, MulAssign, Neg, Sub, SubAssign},\n\n};\n\n\n\nuse subtle::{Choice, ConditionallySelectable, ConstantTimeEq};\n\n\n\nuse crate::arch::adc;\n\n\n\nuse super::{arithmetic::U256, error::SignatureError};\n\n\n\nconst P: U256 = U256 {\n\n limbs: [\n\n 0xFFFF_FFFF_FFFF_FFED,\n\n 0xFFFF_FFFF_FFFF_FFFF,\n\n 0xFFFF_FFFF_FFFF_FFFF,\n\n 0x7FFF_FFFF_FFFF_FFFF,\n\n ],\n\n};\n\n\n", "file_path": "src/curve25519/field.rs", "rank": 51, "score": 9.842558219509883 }, { "content": " assert_eq!(minus_one, 1.into());\n\n }\n\n\n\n #[test]\n\n fn test_two_255() {\n\n let two_254 = Z25519 {\n\n value: U256 {\n\n limbs: [0, 0, 0, 0x4000000000000000],\n\n },\n\n };\n\n assert_eq!(two_254 * Z25519::from(2), 19.into());\n\n }\n\n}\n", "file_path": "src/curve25519/field.rs", "rank": 52, "score": 9.677074729550931 }, { "content": "}\n\n\n\nimpl Into<[u8; 32]> for Z25519 {\n\n fn into(self) -> [u8; 32] {\n\n self.value.into()\n\n }\n\n}\n\n\n\nimpl<'a> TryFrom<&'a [u8]> for Z25519 {\n\n type Error = SignatureError;\n\n\n\n fn try_from(value: &'a [u8]) -> Result<Self, Self::Error> {\n\n if value.len() < 32 {\n\n return Err(SignatureError::InvalidFieldElement);\n\n }\n\n let value_bytes: [u8; 32] = value[..32].try_into().unwrap();\n\n let value = U256::from(value_bytes);\n\n if value.geq(P) {\n\n return Err(SignatureError::InvalidScalar);\n\n }\n", "file_path": "src/curve25519/field.rs", "rank": 53, "score": 9.512011870684347 }, { "content": " Self { limbs }\n\n }\n\n}\n\n\n\nimpl Into<[u8; 32]> for U256 {\n\n fn into(self) -> [u8; 32] {\n\n let mut out = [0; 32];\n\n let mut i = 0;\n\n for limb in &self.limbs {\n\n for &b in &limb.to_le_bytes() {\n\n out[i] = b;\n\n i += 1;\n\n }\n\n }\n\n out\n\n }\n\n}\n\n\n\nimpl From<[u8; 32]> for U256 {\n\n fn from(x: [u8; 32]) -> Self {\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 54, "score": 9.329872301645675 }, { "content": " }\n\n\n\n #[test]\n\n fn test_addition_examples() {\n\n let z1 = Scalar {\n\n value: U256 {\n\n limbs: [1, 1, 1, 1],\n\n },\n\n };\n\n let z2 = Scalar {\n\n value: U256 {\n\n limbs: [2, 2, 2, 2],\n\n },\n\n };\n\n let z3 = Scalar {\n\n value: U256 {\n\n limbs: [3, 3, 3, 3],\n\n },\n\n };\n\n assert_eq!(z3, z1 + z2);\n", "file_path": "src/curve25519/scalar.rs", "rank": 55, "score": 9.220867115115485 }, { "content": " (hi.limbs[2] << 6) | (hi.limbs[1] >> 58),\n\n (hi.limbs[3] << 6) | (hi.limbs[2] >> 58),\n\n ],\n\n };\n\n let to_subtract = q * L;\n\n let mut scalar = Scalar {\n\n value: large.lo() - to_subtract.lo(),\n\n };\n\n scalar.reduce_after_addition();\n\n scalar\n\n }\n\n}\n\n\n\nimpl From<u64> for Scalar {\n\n fn from(x: u64) -> Self {\n\n Scalar {\n\n value: U256::from(x),\n\n }\n\n }\n\n}\n", "file_path": "src/curve25519/scalar.rs", "rank": 56, "score": 9.216555955217403 }, { "content": " }\n\n\n\n #[test]\n\n fn test_subtraction_examples() {\n\n let mut z1 = Z25519 {\n\n value: U256 {\n\n limbs: [1, 1, 1, 1],\n\n },\n\n };\n\n z1 -= z1;\n\n assert_eq!(z1, 0.into());\n\n z1 -= 1.into();\n\n let p_minus_one = Z25519 {\n\n value: U256 {\n\n limbs: [\n\n 0xFFFF_FFFF_FFFF_FFEC,\n\n 0xFFFF_FFFF_FFFF_FFFF,\n\n 0xFFFF_FFFF_FFFF_FFFF,\n\n 0x7FFF_FFFF_FFFF_FFFF,\n\n ],\n", "file_path": "src/curve25519/field.rs", "rank": 57, "score": 9.155195414950715 }, { "content": " }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_inverse(\n\n a in arb_z25519()\n\n .prop_filter(\n\n \"zero cannot be inverted\".to_owned(),\n\n |x: &Z25519| *x != 0.into()\n\n )\n\n ) {\n\n assert_eq!(a * a.inverse(), 1.into());\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_addition_examples() {\n\n let z1 = Z25519 {\n\n value: U256 {\n\n limbs: [1, 1, 1, 1],\n", "file_path": "src/curve25519/field.rs", "rank": 58, "score": 9.01053010522186 }, { "content": " self += other;\n\n self\n\n }\n\n}\n\n\n\nimpl MulAssign for Scalar {\n\n fn mul_assign(&mut self, other: Self) {\n\n let large = self.value * other.value;\n\n *self = Scalar::reduce_barret(large);\n\n }\n\n}\n\n\n\nimpl Mul for Scalar {\n\n type Output = Self;\n\n\n\n fn mul(mut self, other: Self) -> Self::Output {\n\n self *= other;\n\n self\n\n }\n\n}\n", "file_path": "src/curve25519/scalar.rs", "rank": 59, "score": 8.984643354139752 }, { "content": "\n\npub const PRIVATE_KEY_SIZE: usize = 32;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct PrivateKey {\n\n pub bytes: [u8; PRIVATE_KEY_SIZE],\n\n}\n\n\n\nimpl PrivateKey {\n\n fn derive_public_key(&self) -> PublicKey {\n\n let hash = sha512::hash(&self.bytes);\n\n PublicKey::from_hash(&hash)\n\n }\n\n\n\n pub fn sign(&self, message: &[u8]) -> Signature {\n\n let hash = sha512::hash(&self.bytes);\n\n let s = Scalar::clamped(hash[..32].try_into().unwrap());\n\n let a: [u8; 32] = (point::B * s).into();\n\n let prefix = &hash[32..];\n\n\n", "file_path": "src/curve25519/mod.rs", "rank": 60, "score": 8.561485257893489 }, { "content": "///\n\n/// The operations in this ring are defined through arithmetic modulo\n\n/// L := 2^252 + 27742317777372353535851937790883648493\n\n#[derive(Clone, Copy, Debug)]\n\n// Only implement equality for tests. This is to avoid the temptation to introduce\n\n// a timing leak through equality comparison in other situations.\n\n#[cfg_attr(test, derive(PartialEq))]\n\npub struct Scalar {\n\n pub value: U256,\n\n}\n\n\n\nimpl Scalar {\n\n /// Creates a new scalar from 32 bytes.\n\n ///\n\n /// This will apply a standard clamping procedure to the bytes, as described\n\n /// in Section 5.1.5:\n\n /// https://datatracker.ietf.org/doc/html/rfc8032#section-5.1.5\n\n pub fn clamped(mut bytes: [u8; 32]) -> Scalar {\n\n bytes[0] &= 248;\n\n bytes[31] &= 127;\n", "file_path": "src/curve25519/scalar.rs", "rank": 61, "score": 8.554306224931658 }, { "content": " self\n\n }\n\n}\n\n\n\nimpl SubAssign for Z25519 {\n\n fn sub_assign(&mut self, other: Z25519) {\n\n // We perform the subtraction, and then add back P if we underflowed.\n\n let borrow = self.value.sub_with_borrow(other.value);\n\n self.value.cond_add(P, borrow.ct_eq(&1));\n\n }\n\n}\n\n\n\nimpl Sub for Z25519 {\n\n type Output = Self;\n\n\n\n fn sub(mut self, other: Z25519) -> Self::Output {\n\n self -= other;\n\n self\n\n }\n\n}\n", "file_path": "src/curve25519/field.rs", "rank": 62, "score": 8.400105128884277 }, { "content": " },\n\n };\n\n assert_eq!(z1, p_minus_one);\n\n }\n\n\n\n #[test]\n\n fn test_small_multiplication_examples() {\n\n let z1 = Z25519 {\n\n value: U256 { limbs: [1; 4] },\n\n };\n\n assert_eq!(z1 + z1, z1 * 2);\n\n assert_eq!(z1 + z1 + z1, z1 * 3);\n\n let p_minus_one = Z25519 {\n\n value: U256 {\n\n limbs: [\n\n 0xFFFF_FFFF_FFFF_FFEC,\n\n 0xFFFF_FFFF_FFFF_FFFF,\n\n 0xFFFF_FFFF_FFFF_FFFF,\n\n 0x7FFF_FFFF_FFFF_FFFF,\n\n ],\n", "file_path": "src/curve25519/field.rs", "rank": 63, "score": 8.297977530645642 }, { "content": "\n\n#[cfg(test)]\n\nmod test {\n\n use crate::curve25519::scalar::L;\n\n\n\n use super::super::arithmetic::U256;\n\n\n\n use super::Scalar;\n\n use proptest::prelude::*;\n\n\n\n prop_compose! {\n\n fn arb_scalar()(\n\n z0 in any::<u64>(),\n\n z1 in any::<u64>(),\n\n z2 in any::<u64>(),\n\n z3 in 0..0xFFFFFFFFFFFFFFFu64) -> Scalar {\n\n Scalar {\n\n value: U256 { limbs: [z0, z1, z2, z3] }\n\n }\n\n }\n", "file_path": "src/curve25519/scalar.rs", "rank": 64, "score": 8.214072861614852 }, { "content": " value: U256 {\n\n limbs: [\n\n 0xa40611e3449c0f00,\n\n 0xd00e1ba768859347,\n\n 0xceec73d217f5be65,\n\n 0x0399411b7c309a3d,\n\n ],\n\n },\n\n };\n\n assert_eq!(Scalar::from(bytes), expected);\n\n bytes = [0; 64];\n\n bytes[0] = 1;\n\n expected.value = U256 {\n\n limbs: [1, 0, 0, 0],\n\n };\n\n assert_eq!(Scalar::from(bytes), expected);\n\n }\n\n}\n", "file_path": "src/curve25519/scalar.rs", "rank": 65, "score": 8.135252711174815 }, { "content": "\n\nconst N_SQUARED: U256 = U256 {\n\n limbs: [\n\n 0xe2edf685ab128969,\n\n 0x680392762298a31d,\n\n 0x3dceec73d217f5be,\n\n 0x01b399411b7c309a,\n\n ],\n\n};\n\n\n\nconst R: U256 = U256 {\n\n limbs: [\n\n 0x9fb673968c28b04c,\n\n 0xac84188574218ca6,\n\n 0xffffffffffffffff,\n\n 0x3fffffffffffffff,\n\n ],\n\n};\n\n\n\n/// Represents a scalar in Z/(L) the order of our curve group.\n", "file_path": "src/curve25519/scalar.rs", "rank": 66, "score": 8.104028417742462 }, { "content": " let value = U256::from(value_bytes);\n\n if value.geq(L) {\n\n return Err(SignatureError::InvalidScalar);\n\n }\n\n Ok(Scalar { value })\n\n }\n\n}\n\n\n\nimpl ConditionallySelectable for Scalar {\n\n fn conditional_select(a: &Self, b: &Self, choice: subtle::Choice) -> Self {\n\n Scalar {\n\n value: U256::conditional_select(&a.value, &b.value, choice),\n\n }\n\n }\n\n}\n\n\n\nimpl Neg for Scalar {\n\n type Output = Scalar;\n\n\n\n fn neg(self) -> Self::Output {\n", "file_path": "src/curve25519/scalar.rs", "rank": 67, "score": 7.874762426659736 }, { "content": " },\n\n };\n\n assert_eq!(p_minus_one * 2, p_minus_one - 1.into());\n\n assert_eq!(p_minus_one * 3, p_minus_one - 2.into());\n\n }\n\n\n\n #[test]\n\n fn test_2192_times_zero() {\n\n let two192 = Z25519 {\n\n value: U256 {\n\n limbs: [0, 0, 0, 1],\n\n },\n\n };\n\n assert_eq!(two192 * Z25519::from(0), 0.into());\n\n }\n\n\n\n #[test]\n\n fn test_minus_one_squared() {\n\n let mut minus_one = Z25519::from(0) - Z25519::from(1);\n\n minus_one.square();\n", "file_path": "src/curve25519/field.rs", "rank": 68, "score": 7.7849448909871795 }, { "content": "\n\n fn mul(mut self, small: u64) -> Self::Output {\n\n let mut carry = 0;\n\n // Hopefully this gets unrolled\n\n for i in 0..N {\n\n carry = mulc(carry, small, self.limbs[i], &mut self.limbs[i]);\n\n }\n\n (carry, self)\n\n }\n\n}\n\n\n\n/// Represents a 256 bit unsigned integer.\n\n///\n\n/// This is intended to hold common behavior between the different modular arithmetic\n\n/// behavior we need for our crate.\n\npub type U256 = U<4>;\n\n\n\n/// Represents a 512 bit unsigned integer.\n\n///\n\n/// This is used less often, mainly for converting from hashes, and reducing\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 69, "score": 7.746515244954028 }, { "content": " bytes[31] |= 64;\n\n let mut value = U256::from(0);\n\n for (i, chunk) in bytes.chunks_exact(8).enumerate() {\n\n value.limbs[i] = u64::from_le_bytes(chunk.try_into().unwrap());\n\n }\n\n Scalar { value }\n\n }\n\n\n\n fn reduce_after_addition(&mut self) {\n\n let mut l_removed = *self;\n\n let borrow = l_removed.value.sub_with_borrow(L);\n\n self.conditional_assign(&l_removed, borrow.ct_eq(&0));\n\n }\n\n\n\n fn reduce_barret(large: U512) -> Self {\n\n let (hi, lo) = large * R;\n\n let q = U256 {\n\n limbs: [\n\n (hi.limbs[0] << 6) | (lo.limbs[7] >> 58),\n\n (hi.limbs[1] << 6) | (hi.limbs[0] >> 58),\n", "file_path": "src/curve25519/scalar.rs", "rank": 70, "score": 7.746359055244334 }, { "content": " (full_res >> 64) as u64,\n\n self.value.limbs[1],\n\n &mut self.value.limbs[1],\n\n );\n\n carry = adc(carry, 0, self.value.limbs[2], &mut self.value.limbs[2]);\n\n carry = adc(carry, 0, self.value.limbs[3], &mut self.value.limbs[3]);\n\n // Now remove P if necessary\n\n self.reduce_after_addition(carry);\n\n }\n\n\n\n /// calculate z <- z * z mod P.\n\n ///\n\n /// This is equivalent to z *= z, but is a bit more efficient, because it takes\n\n /// advantage of the extra symmetry of this operation compared to the general case.\n\n pub fn square(&mut self) {\n\n *self *= *self;\n\n }\n\n\n\n /// calculates z * z mod P\n\n ///\n", "file_path": "src/curve25519/field.rs", "rank": 71, "score": 7.650260381254893 }, { "content": " }\n\n}\n\n\n\nimpl Mul<U256> for U512 {\n\n type Output = (U256, U512);\n\n\n\n fn mul(self, other: U256) -> Self::Output {\n\n // You can treat both of these functions as macros. They just exist to avoid\n\n // repeating this logic multiple times.\n\n\n\n // This is essentially a 192 bit number\n\n let r0 = Cell::new(0u64);\n\n let r1 = Cell::new(0u64);\n\n let r2 = Cell::new(0u64);\n\n\n\n let multiply_in = |i: usize, j: usize| {\n\n let uv = u128::from(self.limbs[i]) * u128::from(other.limbs[j]);\n\n let mut carry = 0;\n\n let mut out = 0;\n\n carry = adc(carry, uv as u64, r0.get(), &mut out);\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 72, "score": 7.638537528158725 }, { "content": "// Only implement equality for tests. This is to avoid the temptation to introduce\n\n// a timing leak through equality comparison in other situations.\n\n#[cfg_attr(test, derive(PartialEq))]\n\npub struct Z25519 {\n\n pub value: U256,\n\n}\n\n\n\nimpl Z25519 {\n\n /// reduce_after_addition reduces this element modulo P, after an addition.\n\n ///\n\n /// After an addition, we have at most 2P - 2, so at most one subtraction of P suffices.\n\n fn reduce_after_addition(&mut self, carry: u8) {\n\n let mut m_removed = *self;\n\n // The largest result we've just calculated is 2P - 2. Therefore, we might\n\n // need to subtract P once, if we have a result >= P.\n\n let borrow = m_removed.value.sub_with_borrow(P);\n\n // A few cases here:\n\n //\n\n // carry = 1, borrow = 0:\n\n // Impossible: we would need a result ≥ 2²⁵⁶ + P\n", "file_path": "src/curve25519/field.rs", "rank": 73, "score": 7.53600201640773 }, { "content": " let mut out = Scalar {\n\n value: U256::from(0),\n\n };\n\n let borrow = out.value.sub_with_borrow(self.value);\n\n out.value.cond_add(L, borrow.ct_eq(&1));\n\n out\n\n }\n\n}\n\n\n\nimpl AddAssign for Scalar {\n\n fn add_assign(&mut self, other: Self) {\n\n self.value += other.value;\n\n self.reduce_after_addition();\n\n }\n\n}\n\n\n\nimpl Add for Scalar {\n\n type Output = Self;\n\n\n\n fn add(mut self, other: Self) -> Self::Output {\n", "file_path": "src/curve25519/scalar.rs", "rank": 74, "score": 7.325519576813459 }, { "content": " /// This is like the function square, except returning a new value instead of working\n\n /// in place.\n\n pub fn squared(mut self) -> Z25519 {\n\n self.square();\n\n self\n\n }\n\n\n\n // inverse calculates self^-1 mod P, a number which multiplied by self returns 1\n\n //\n\n // This will work for every valid number, except 0.\n\n pub fn inverse(self) -> Z25519 {\n\n // By Fermat, we know that self ^ (P - 2) is an inverse.\n\n // We can do binary exponentiation, using the fact that we have\n\n // 0b01011, and then 250 one bits.\n\n let mut out = Z25519::from(1);\n\n let mut current_power = self;\n\n // Handling 0b01011\n\n out *= current_power;\n\n current_power.square();\n\n out *= current_power;\n", "file_path": "src/curve25519/field.rs", "rank": 75, "score": 7.0740722350609735 }, { "content": "\n\n use proptest::prelude::*;\n\n\n\n prop_compose! {\n\n fn arb_u256()(\n\n z0 in any::<u64>(),\n\n z1 in any::<u64>(),\n\n z2 in any::<u64>(),\n\n z3 in any::<u64>()) -> U256 {\n\n U256 {\n\n limbs: [z0, z1, z2, z3]\n\n }\n\n }\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_addition_commutative(a in arb_u256(), b in arb_u256()) {\n\n assert_eq!(a + b, b + a);\n\n }\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 76, "score": 6.717850165461659 }, { "content": " // A = q⋅P + R + 19q\n\n // Modulo P, this entails:\n\n // A ≡ R + 19q mod P\n\n // We can efficiently calculate q and R using shifting and masking.\n\n\n\n // We pull in one bit from the top limb, in order to calculate the quotient\n\n let q = (carry << 1) | (self.value.limbs[3] >> 63);\n\n // Clear the top bit, thus calculating R\n\n self.value.limbs[3] &= 0x7FFF_FFFF_FFFF_FFFF;\n\n // Now we add in 19q\n\n let full_res = 19 * u128::from(q);\n\n let mut carry = 0;\n\n carry = adc(\n\n carry,\n\n full_res as u64,\n\n self.value.limbs[0],\n\n &mut self.value.limbs[0],\n\n );\n\n carry = adc(\n\n carry,\n", "file_path": "src/curve25519/field.rs", "rank": 77, "score": 6.620516471215208 }, { "content": " }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_subtraction_versus_negation(a in arb_u256(), b in arb_u256()) {\n\n assert_eq!(a - b, U256::from(0) - (b - a));\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_subtraction_examples() {\n\n let a = U256 { limbs: [0; 4] };\n\n let b = U256 {\n\n limbs: [1, 0, 0, 0],\n\n };\n\n let c = U256 {\n\n limbs: [u64::MAX; 4],\n\n };\n\n assert_eq!(a - b, c);\n\n }\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 78, "score": 6.508648961974863 }, { "content": " }\n\n\n\n #[test]\n\n fn test_scaling_examples() {\n\n let a = U256 { limbs: [1; 4] };\n\n let c = U256 { limbs: [64; 4] };\n\n assert_eq!((a * 64).1, c);\n\n }\n\n}\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 79, "score": 6.428215041141435 }, { "content": " };\n\n assert_eq!(a * b, c);\n\n\n\n a = U256 {\n\n limbs: [1, 0, 0, 0],\n\n };\n\n b = U256 {\n\n limbs: [0, 0, 1, 0],\n\n };\n\n c = U512 {\n\n limbs: [0, 0, 1, 0, 0, 0, 0, 0],\n\n };\n\n assert_eq!(a * b, c);\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_subtraction_yields_zero(a in arb_u256()) {\n\n assert_eq!(a - a, 0.into());\n\n }\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 80, "score": 6.351378391447417 }, { "content": " }\n\n\n\n #[test]\n\n fn test_addition_examples() {\n\n let a = U256 {\n\n limbs: [u64::MAX, u64::MAX, u64::MAX, 0],\n\n };\n\n let b = U256 {\n\n limbs: [2, 0, 0, 0],\n\n };\n\n let c = U256 {\n\n limbs: [1, 0, 0, 1],\n\n };\n\n assert_eq!(a + b, c);\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_multiplication_commutative(a in arb_u256(), b in arb_u256()) {\n\n assert_eq!(a * b, b * a);\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 81, "score": 6.285247930785001 }, { "content": " }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_low_512_256_multiplication(a in arb_u256(), b in arb_u256()) {\n\n let (_, lo) = U512::from_hi_lo(0.into(), a) * b;\n\n assert_eq!(lo, a * b);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_multiplication_examples() {\n\n let mut a = U256 {\n\n limbs: [1, 1, 1, 1],\n\n };\n\n let mut b = U256 {\n\n limbs: [2, 0, 0, 0],\n\n };\n\n let mut c = U512 {\n\n limbs: [2, 2, 2, 2, 0, 0, 0, 0],\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 82, "score": 6.103724874336427 }, { "content": "#[derive(StructOpt, Debug)]\n\n#[structopt(name = \"eddo\")]\n\nenum Args {\n\n /// Generate a new keypair\n\n ///\n\n /// The public key will be printed out, the private key will be saved to a file\n\n Generate {\n\n /// The file to write the private key into\n\n #[structopt(short = \"o\", long = \"out\", parse(from_os_str))]\n\n out_file: PathBuf,\n\n },\n\n /// Verify a signature for a file, by a given public key\n\n Verify {\n\n /// The public key used to sign this file\n\n #[structopt(short = \"p\", long = \"public\")]\n\n public: String,\n\n /// The signature for this file\n\n #[structopt(short = \"s\", long = \"signature\")]\n\n signature: String,\n\n /// The file whose signature needs to be verified\n\n #[structopt(name = \"INPUT_FILE\", parse(from_os_str))]\n\n in_file: PathBuf,\n", "file_path": "src/bin.rs", "rank": 83, "score": 6.039493978536598 }, { "content": "\n\n /// Check if self >= other.\n\n ///\n\n /// This method is not constant-time.\n\n pub fn geq(&self, other: Self) -> bool {\n\n for i in (0..N).rev() {\n\n if other.limbs[i] > self.limbs[i] {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n\n\n /// Check if self == other.\n\n ///\n\n /// This method is not constant-time.\n\n pub fn eq(&self, other: Self) -> bool {\n\n for i in (0..N).rev() {\n\n if other.limbs[i] != self.limbs[i] {\n\n return false;\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 84, "score": 5.935989900694848 }, { "content": " ],\n\n };\n\n Scalar::reduce_barret(lo + hi_reduced)\n\n }\n\n}\n\n\n\nimpl Into<[u8; 32]> for Scalar {\n\n fn into(self) -> [u8; 32] {\n\n self.value.into()\n\n }\n\n}\n\n\n\nimpl<'a> TryFrom<&'a [u8]> for Scalar {\n\n type Error = SignatureError;\n\n\n\n fn try_from(value: &'a [u8]) -> Result<Self, Self::Error> {\n\n if value.len() < 32 {\n\n return Err(SignatureError::InvalidScalar);\n\n }\n\n let value_bytes: [u8; 32] = value[..32].try_into().unwrap();\n", "file_path": "src/curve25519/scalar.rs", "rank": 85, "score": 5.701645858339786 }, { "content": " }\n\n }\n\n true\n\n }\n\n}\n\n\n\nimpl<const N: usize> ConditionallySelectable for U<N> {\n\n fn conditional_select(a: &Self, b: &Self, choice: Choice) -> Self {\n\n let mut limbs = [0; N];\n\n for i in 0..N {\n\n limbs[i] = u64::conditional_select(&a.limbs[i], &b.limbs[i], choice)\n\n }\n\n Self { limbs }\n\n }\n\n}\n\n\n\nimpl<const N: usize> From<u64> for U<N> {\n\n fn from(x: u64) -> Self {\n\n let mut limbs = [0; N];\n\n limbs[0] = x;\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 86, "score": 5.6964074452376465 }, { "content": "\n\n proptest! {\n\n #[test]\n\n fn test_multiplication_associative(a in arb_z25519(), b in arb_z25519(), c in arb_z25519()) {\n\n assert_eq!(a * (b * c), (a * b) * c);\n\n }\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_multiplication_distributive(a in arb_z25519(), b in arb_z25519(), c in arb_z25519()) {\n\n assert_eq!(a * (b + c), a * b + a * c);\n\n }\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_multiply_one_identity(a in arb_z25519()) {\n\n let one = Z25519::from(1);\n\n assert_eq!(a * one, a);\n", "file_path": "src/curve25519/field.rs", "rank": 87, "score": 5.36503432753756 }, { "content": "pub enum SignatureError {\n\n InvalidPoint,\n\n InvalidFieldElement,\n\n InvalidScalar,\n\n InvalidEquation,\n\n}\n", "file_path": "src/curve25519/error.rs", "rank": 88, "score": 5.247283597962918 }, { "content": "\n\n proptest! {\n\n #[test]\n\n fn test_addition_associative(a in arb_z25519(), b in arb_z25519(), c in arb_z25519()) {\n\n assert_eq!(a + (b + c), (a + b) + c);\n\n }\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_add_zero_identity(a in arb_z25519()) {\n\n let zero = Z25519::from(0);\n\n assert_eq!(a + zero, a);\n\n assert_eq!(zero + a, a);\n\n }\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_subtract_self_is_zero(a in arb_z25519()) {\n", "file_path": "src/curve25519/field.rs", "rank": 89, "score": 5.230475889084588 }, { "content": " multiply_in(6, 2);\n\n multiply_in(7, 1);\n\n propagate(&mut hi.limbs[0]);\n\n\n\n multiply_in(6, 3);\n\n multiply_in(7, 2);\n\n propagate(&mut hi.limbs[1]);\n\n\n\n multiply_in(7, 3);\n\n propagate(&mut hi.limbs[2]);\n\n\n\n hi.limbs[3] = r0.get();\n\n\n\n (hi, lo)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 90, "score": 5.009276726934095 }, { "content": " }\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_multiplication_identity(a in arb_u256()) {\n\n let lo1 = (a * U256::from(1)).lo();\n\n let lo2 = (U256::from(1) * a).lo();\n\n\n\n assert_eq!(lo1, a);\n\n assert_eq!(lo2, a);\n\n }\n\n }\n\n\n\n proptest! {\n\n #[test]\n\n fn test_high_512_256_multiplication(a in arb_u256(), b in arb_u256()) {\n\n let (hi, _) = U512::from_hi_lo(a, 0.into()) * b;\n\n assert_eq!(hi, (a * b).hi());\n\n }\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 91, "score": 5.0035544337668885 }, { "content": " let mut to_hash = Vec::with_capacity(64 + message.len());\n\n to_hash.extend_from_slice(prefix);\n\n to_hash.extend_from_slice(message);\n\n let r = Scalar::from(sha512::hash(&to_hash));\n\n\n\n let big_r: [u8; 32] = (point::B * r).into();\n\n\n\n to_hash.clear();\n\n to_hash.extend_from_slice(&big_r);\n\n to_hash.extend_from_slice(&a);\n\n to_hash.extend_from_slice(message);\n\n let k = Scalar::from(sha512::hash(&to_hash));\n\n\n\n let big_s: [u8; 32] = (r + k * s).into();\n\n\n\n let mut out = Signature { bytes: [0; 64] };\n\n out.bytes[..32].copy_from_slice(&big_r);\n\n out.bytes[32..].copy_from_slice(&big_s);\n\n\n\n out\n\n }\n\n}\n\n\n", "file_path": "src/curve25519/mod.rs", "rank": 92, "score": 4.980725972369212 }, { "content": " let s = Scalar::try_from(&signature.bytes[32..])?;\n\n let a = Point::try_from(&self.bytes[..])?;\n\n let mut to_hash = Vec::with_capacity(64 + message.len());\n\n let r_bytes = &signature.bytes[..32];\n\n to_hash.extend_from_slice(&r_bytes);\n\n let a_bytes: [u8; 32] = a.into();\n\n to_hash.extend_from_slice(&a_bytes);\n\n to_hash.extend_from_slice(message);\n\n let k = Scalar::from(sha512::hash(&to_hash));\n\n let check_encoded: [u8; 32] = (point::B * s + (a * -k)).into();\n\n if r_bytes != &check_encoded {\n\n return Err(SignatureError::InvalidEquation);\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn verify(&self, message: &[u8], signature: Signature) -> bool {\n\n self.verify_result(message, signature).is_ok()\n\n }\n\n}\n", "file_path": "src/curve25519/mod.rs", "rank": 93, "score": 4.644519431216706 }, { "content": " }\n\n}\n\n\n\nimpl<const N: usize> SubAssign for U<N> {\n\n fn sub_assign(&mut self, other: Self) {\n\n self.sub_with_borrow(other);\n\n }\n\n}\n\n\n\nimpl<const N: usize> Sub for U<N> {\n\n type Output = Self;\n\n\n\n fn sub(mut self, other: Self) -> Self::Output {\n\n self -= other;\n\n self\n\n }\n\n}\n\n\n\nimpl<const N: usize> Mul<u64> for U<N> {\n\n type Output = (u64, Self);\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 94, "score": 4.597041455435782 }, { "content": " let mut out = Self { limbs: [0; 4] };\n\n for (i, chunk) in x.chunks_exact(8).enumerate() {\n\n out.limbs[i] = u64::from_le_bytes(chunk.try_into().unwrap());\n\n }\n\n out\n\n }\n\n}\n\n\n\nimpl<const N: usize> AddAssign for U<N> {\n\n fn add_assign(&mut self, other: Self) {\n\n self.add_with_carry(other);\n\n }\n\n}\n\n\n\nimpl<const N: usize> Add for U<N> {\n\n type Output = Self;\n\n\n\n fn add(mut self, other: Self) -> Self::Output {\n\n self += other;\n\n self\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 95, "score": 4.574178497872566 }, { "content": "\n\n let l_minus_1 = Scalar {\n\n value: L - U256::from(1),\n\n };\n\n assert_eq!(l_minus_1 + Scalar::from(1), Scalar::from(0));\n\n assert_eq!(l_minus_1 + Scalar::from(20), Scalar::from(19));\n\n }\n\n\n\n #[test]\n\n fn test_multiplication_examples() {\n\n let l_minus_1 = Scalar {\n\n value: L - U256::from(1),\n\n };\n\n assert_eq!(l_minus_1 * l_minus_1, Scalar::from(1));\n\n }\n\n\n\n #[test]\n\n fn test_large_reduction_examples() {\n\n let mut bytes = [0xFF; 64];\n\n let mut expected = Scalar {\n", "file_path": "src/curve25519/scalar.rs", "rank": 96, "score": 4.5232380164849175 }, { "content": "\n\nimpl From<[u8; 64]> for Scalar {\n\n fn from(mut bytes: [u8; 64]) -> Self {\n\n let hi = u64::from(bytes[63]);\n\n bytes[63] = 0;\n\n let mut lo = U512 { limbs: [0; 8] };\n\n for (i, chunk) in bytes.chunks_exact(8).enumerate() {\n\n lo.limbs[i] = u64::from_le_bytes(chunk.try_into().unwrap());\n\n }\n\n let (hi_reduced_hi, hi_reduced_lo) = N_SQUARED * hi;\n\n let hi_reduced = U512 {\n\n limbs: [\n\n hi_reduced_lo.limbs[0],\n\n hi_reduced_lo.limbs[1],\n\n hi_reduced_lo.limbs[2],\n\n hi_reduced_lo.limbs[3],\n\n hi_reduced_hi,\n\n 0,\n\n 0,\n\n 0,\n", "file_path": "src/curve25519/scalar.rs", "rank": 97, "score": 4.345718594675915 }, { "content": " r0.set(out);\n\n carry = adc(carry, (uv >> 64) as u64, r1.get(), &mut out);\n\n r1.set(out);\n\n r2.set(r2.get() + u64::from(carry));\n\n };\n\n\n\n // Given r2:r1:r0, this sets limb = r0, and then shifts to get 0:r2:r1\n\n let propagate = |limb: &mut u64| {\n\n *limb = r0.get();\n\n r0.set(r1.get());\n\n r1.set(r2.get());\n\n r2.set(0);\n\n };\n\n\n\n let mut lo = U512 { limbs: [0; 8] };\n\n let mut hi = U256 { limbs: [0; 4] };\n\n\n\n multiply_in(0, 0);\n\n propagate(&mut lo.limbs[0]);\n\n\n", "file_path": "src/curve25519/arithmetic.rs", "rank": 98, "score": 4.172111409862879 }, { "content": "#[cfg(target_arch = \"x86_64\")]\n\nuse core::arch::x86_64 as arch;\n\n\n\n/// adc computes out <- a + b + carry, outputting a new carry.\n\n///\n\n/// `carry` must be 0, or 1. The return value will satisfy this constraint\n\n#[inline]\n", "file_path": "src/arch.rs", "rank": 99, "score": 4.138147129071632 } ]
Rust
src/pointer/raw.rs
oliver-giersch/reclaim
ab55cb7eb9078376597dcdbaa298daeb9bf22d0b
use core::cmp::{self, PartialEq, PartialOrd}; use core::fmt; use core::marker::PhantomData; use core::ptr::{self, NonNull}; use typenum::{IsGreaterOrEqual, True, Unsigned}; use crate::pointer::{self, MarkedNonNull, MarkedPtr}; /********** impl Clone ****************************************************************************/ impl<T, N> Clone for MarkedPtr<T, N> { #[inline] fn clone(&self) -> Self { Self::new(self.inner) } } /********** impl Copy *****************************************************************************/ impl<T, N> Copy for MarkedPtr<T, N> {} /********** impl inherent (const) *****************************************************************/ impl<T, N> MarkedPtr<T, N> { #[inline] pub const fn new(ptr: *mut T) -> Self { Self { inner: ptr, _marker: PhantomData } } #[inline] pub const fn null() -> Self { Self::new(ptr::null_mut()) } #[inline] pub const fn cast<U>(self) -> MarkedPtr<U, N> { MarkedPtr::new(self.inner as *mut U) } #[inline] pub const fn from_usize(val: usize) -> Self { Self::new(val as *mut _) } } /********** impl inherent *************************************************************************/ impl<T, N: Unsigned> MarkedPtr<T, N> { pub const MARK_BITS: usize = N::USIZE; pub const MARK_MASK: usize = pointer::mark_mask::<T>(Self::MARK_BITS); pub const POINTER_MASK: usize = !Self::MARK_MASK; #[inline] pub fn into_usize(self) -> usize { self.inner as usize } #[inline] pub fn into_ptr(self) -> *mut T { self.inner } #[inline] pub fn compose(ptr: *mut T, tag: usize) -> Self { debug_assert_eq!(0, ptr as usize & Self::MARK_MASK, "pointer must be properly aligned"); Self::new(pointer::compose::<_, N>(ptr, tag)) } #[inline] pub fn convert<M: Unsigned>(other: MarkedPtr<T, M>) -> Self where N: IsGreaterOrEqual<M, Output = True>, { Self::new(other.inner) } #[inline] pub fn clear_tag(self) -> Self { Self::new(self.decompose_ptr()) } #[inline] pub fn with_tag(self, tag: usize) -> Self { Self::compose(self.decompose_ptr(), tag) } #[inline] pub fn decompose(self) -> (*mut T, usize) { pointer::decompose(self.into_usize(), Self::MARK_BITS) } #[inline] pub fn decompose_ptr(self) -> *mut T { pointer::decompose_ptr(self.into_usize(), Self::MARK_BITS) } #[inline] pub fn decompose_tag(self) -> usize { pointer::decompose_tag::<T>(self.into_usize(), Self::MARK_BITS) } #[inline] pub unsafe fn decompose_ref<'a>(self) -> (Option<&'a T>, usize) { let (ptr, tag) = self.decompose(); (ptr.as_ref(), tag) } #[inline] pub unsafe fn decompose_mut<'a>(self) -> (Option<&'a mut T>, usize) { let (ptr, tag) = self.decompose(); (ptr.as_mut(), tag) } #[inline] pub unsafe fn as_ref<'a>(self) -> Option<&'a T> { self.decompose_ptr().as_ref() } #[inline] pub unsafe fn as_mut<'a>(self) -> Option<&'a mut T> { self.decompose_ptr().as_mut() } #[inline] pub fn is_null(self) -> bool { self.decompose_ptr().is_null() } } /********** impl Default **************************************************************************/ impl<T, N: Unsigned> Default for MarkedPtr<T, N> { #[inline] fn default() -> Self { Self::null() } } /********** impl Debug ****************************************************************************/ impl<T, N: Unsigned> fmt::Debug for MarkedPtr<T, N> { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let (ptr, tag) = self.decompose(); f.debug_struct("MarkedPtr").field("ptr", &ptr).field("tag", &tag).finish() } } /********** impl Pointer **************************************************************************/ impl<T, N: Unsigned> fmt::Pointer for MarkedPtr<T, N> { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Pointer::fmt(&self.decompose_ptr(), f) } } /********** impl From *****************************************************************************/ impl<T, N: Unsigned> From<*const T> for MarkedPtr<T, N> { #[inline] fn from(ptr: *const T) -> Self { Self::new(ptr as *mut _) } } impl<T, N: Unsigned> From<*mut T> for MarkedPtr<T, N> { fn from(ptr: *mut T) -> Self { Self::new(ptr) } } impl<'a, T, N: Unsigned> From<&'a T> for MarkedPtr<T, N> { #[inline] fn from(reference: &'a T) -> Self { Self::new(reference as *const _ as *mut _) } } impl<'a, T, N: Unsigned> From<&'a mut T> for MarkedPtr<T, N> { #[inline] fn from(reference: &'a mut T) -> Self { Self::new(reference) } } impl<T, N: Unsigned> From<NonNull<T>> for MarkedPtr<T, N> { #[inline] fn from(ptr: NonNull<T>) -> Self { Self::new(ptr.as_ptr()) } } impl<T, N: Unsigned> From<(*mut T, usize)> for MarkedPtr<T, N> { #[inline] fn from(pair: (*mut T, usize)) -> Self { let (ptr, tag) = pair; Self::compose(ptr, tag) } } impl<T, N: Unsigned> From<(*const T, usize)> for MarkedPtr<T, N> { #[inline] fn from(pair: (*const T, usize)) -> Self { let (ptr, tag) = pair; Self::compose(ptr as *mut _, tag) } } /********** impl PartialEq ************************************************************************/ impl<T, N> PartialEq for MarkedPtr<T, N> { #[inline] fn eq(&self, other: &Self) -> bool { self.inner == other.inner } } impl<T, N> PartialEq<MarkedNonNull<T, N>> for MarkedPtr<T, N> { #[inline] fn eq(&self, other: &MarkedNonNull<T, N>) -> bool { self.inner.eq(&other.inner.as_ptr()) } } /********** impl PartialOrd ***********************************************************************/ impl<T, N> PartialOrd for MarkedPtr<T, N> { #[inline] fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> { self.inner.partial_cmp(&other.inner) } } impl<T, N> PartialOrd<MarkedNonNull<T, N>> for MarkedPtr<T, N> { #[inline] fn partial_cmp(&self, other: &MarkedNonNull<T, N>) -> Option<cmp::Ordering> { self.inner.partial_cmp(&other.inner.as_ptr()) } } #[cfg(test)] mod test { use core::ptr; use matches::assert_matches; use typenum::{U0, U1, U3}; use crate::align::Aligned8; type UnmarkedMarkedPtr = super::MarkedPtr<Aligned8<i32>, U0>; type MarkedPtr1N = super::MarkedPtr<Aligned8<i32>, U1>; type MarkedPtr3N = super::MarkedPtr<Aligned8<i32>, U3>; #[test] fn decompose_ref() { let null = MarkedPtr3N::null(); assert_eq!((None, 0), unsafe { null.decompose_ref() }); let marked_null = MarkedPtr3N::compose(ptr::null_mut(), 0b111); assert_eq!((None, 0b111), unsafe { marked_null.decompose_ref() }); let value = Aligned8(1); let marked = MarkedPtr3N::compose(&value as *const Aligned8<i32> as *mut _, 0b11); assert_eq!((Some(&value), 0b11), unsafe { marked.decompose_ref() }); } #[test] fn decompose_mut() { let null = MarkedPtr3N::null(); assert_eq!((None, 0), unsafe { null.decompose_mut() }); let marked_null = MarkedPtr3N::compose(ptr::null_mut(), 0b111); assert_eq!((None, 0b111), unsafe { marked_null.decompose_mut() }); let mut value = Aligned8(1); let marked = MarkedPtr3N::compose(&mut value, 0b11); assert_eq!((Some(&mut value), 0b11), unsafe { marked.decompose_mut() }); } #[test] fn from_usize() { unsafe { let unmarked = UnmarkedMarkedPtr::from_usize(&Aligned8(1) as *const _ as usize); assert_matches!(unmarked.as_ref(), Some(&Aligned8(1))); let tagged = (&Aligned8(1i32) as *const _ as usize) | 0b1; assert_eq!( (Some(&Aligned8(1i32)), 0b1), MarkedPtr1N::from_usize(tagged).decompose_ref() ); } } #[test] fn from() { let mut x = Aligned8(1); let from_ref = MarkedPtr1N::from(&x); let from_mut = MarkedPtr1N::from(&mut x); let from_const_ptr = MarkedPtr1N::from(&x as *const _); let from_mut_ptr = MarkedPtr1N::from(&mut x as *mut _); assert!(from_ref == from_mut && from_const_ptr == from_mut_ptr); } #[test] fn eq_ord() { let null = MarkedPtr3N::null(); assert!(null.is_null()); assert_eq!(null, null); let mut aligned = Aligned8(1); let marked1 = MarkedPtr3N::compose(&mut aligned, 0b01); let marked2 = MarkedPtr3N::compose(&mut aligned, 0b11); assert_ne!(marked1, marked2); assert!(marked1 < marked2); } #[test] fn convert() { let mut aligned = Aligned8(1); let marked = MarkedPtr1N::compose(&mut aligned, 0b1); let convert = MarkedPtr3N::convert(marked); assert_eq!((Some(&aligned), 0b1), unsafe { convert.decompose_ref() }); } }
use core::cmp::{self, PartialEq, PartialOrd}; use core::fmt; use core::marker::PhantomData; use core::ptr::{self, NonNull}; use typenum::{IsGreaterOrEqual, True, Unsigned}; use crate::pointer::{self, MarkedNonNull, MarkedPtr}; /********** impl Clone ****************************************************************************/ impl<T, N> Clone for MarkedPtr<T, N> { #[inline] fn clone(&self) -> Self { Self::new(self.inner) } } /********** impl Copy *****************************************************************************/ impl<T, N> Copy for MarkedPtr<T, N> {} /********** impl inherent (const) *****************************************************************/ impl<T, N> MarkedPtr<T, N> { #[inline] pub const fn new(ptr: *mut T) -> Self { Self { inner: ptr, _marker: PhantomData } } #[inline] pub const fn null() -> Self { Self::new(ptr::null_mut()) } #[inline] pub const fn cast<U>(self) -> MarkedPtr<U, N> { MarkedPtr::new(self.inner as *mut U) } #[inline] pub const fn from_usize(val: usize) -> Self { Self::new(val as *mut _) } } /********** impl inherent *************************************************************************/ impl<T, N: Unsigned> MarkedPtr<T, N> { pub const MARK_BITS: usize = N::USIZE; pub const MARK_MASK: usize = pointer::mark_mask::<T>(Self::MARK_BITS); pub const POINTER_MASK: usize = !Self::MARK_MASK; #[inline] pub fn into_usize(self) -> usize { self.inner as usize } #[inline] pub fn into_ptr(self) -> *mut T { self.inner } #[inline] pub fn compose(ptr: *mut T, tag: usize) -> Self { debug_assert_eq!(0, ptr as usize & Self::MARK_MASK, "pointer must be properly aligned"); Self::new(pointer::compose::<_, N>(ptr, tag)) } #[inline] pub fn convert<M: Unsigned>(other: MarkedPtr<T, M>) -> Self where N: IsGreaterOrEqual<M, Output = True>, { Self::new(other.inner) } #[inline] pub fn clear_tag(self) -> Self { Self::new(self.decompose_ptr()) } #[inline] pub fn with_tag(self, tag: usize) -> Self { Self::compose(self.decompose_ptr(), tag) } #[inline] pub fn decompose(self) -> (*mut T, usize) { pointer::decompose(self.into_usize(), Self::MARK_BITS) } #[inline] pub fn decompose_ptr(self) -> *mut T { pointer::decompose_ptr(self.into_usize(), Self::MARK_BITS) } #[inline] pub fn decompose_tag(self) -> usize { pointer::decompose_tag::<T>(self.into_usize(), Self::MARK_BITS) } #[inline] pub unsafe fn decompose_ref<'a>(self) -> (Option<&'a T>, usize) { let (ptr, tag) = self.decompose(); (ptr.as_ref(), tag) } #[inline] pub unsafe fn decompose_mut<'a>(self) -> (Option<&'a mut T>, usize) { let (ptr, tag) = self.decompose(); (ptr.as_mut(), tag) } #[inline] pub unsafe fn as_ref<'a>(self) -> Option<&'a T> { self.decompose_ptr().as_ref() } #[inline] pub unsafe fn as_mut<'a>(self) -> Option<&'a mut T> { self.decompose_ptr().as_mut() } #[inline] pub fn is_null(self) -> bool { self.decompose_ptr().is_null() } } /********** impl Default **************************************************************************/ impl<T, N: Unsigned> Default for MarkedPtr<T, N> { #[inline] fn default() -> Self { Self::null() } } /********** impl Debug ****************************************************************************/ impl<T, N: Unsigned> fmt::Debug for MarkedPtr<T, N> { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let (ptr, tag) = self.decompose(); f.debug_struct("MarkedPtr").field("ptr", &ptr).field("tag", &tag).finish() } } /********** impl Pointer **************************************************************************/ impl<T, N: Unsigned> fmt::Pointer for MarkedPtr<T, N> { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Pointer::fmt(&self.decompose_ptr(), f) } } /********** impl From *****************************************************************************/ impl<T, N: Unsigned> From<*const T> for MarkedPtr<T, N> { #[inline] fn from(ptr: *const T) -> Self { Self::new(ptr as *mut _) } } impl<T, N: Unsigned> From<*mut T> for MarkedPtr<T, N> { fn from(ptr: *mut T) -> Self { Self::new(ptr) } } impl<'a, T, N: Unsigned> From<&'a T> for MarkedPtr<T, N> { #[inline] fn from(reference: &'a T) -> Self { Self::new(reference as *const _ as *mut _) } } impl<'a, T, N: Unsigned> From<&'a mut T> for MarkedPtr<T, N> { #[inline] fn from(reference: &'a mut T) -> Self { Self::new(reference) } } impl<T, N: Unsigned> From<NonNull<T>> for MarkedPtr<T, N> { #[inline] fn from(ptr: NonNull<T>) -> Self { Self::new(ptr.as_ptr()) } } impl<T, N: Unsigned> From<(*mut T, usize)> for MarkedPtr<T, N> { #[inline] fn from(pair: (*mut T, usize)) ->
r: &Self) -> Option<cmp::Ordering> { self.inner.partial_cmp(&other.inner) } } impl<T, N> PartialOrd<MarkedNonNull<T, N>> for MarkedPtr<T, N> { #[inline] fn partial_cmp(&self, other: &MarkedNonNull<T, N>) -> Option<cmp::Ordering> { self.inner.partial_cmp(&other.inner.as_ptr()) } } #[cfg(test)] mod test { use core::ptr; use matches::assert_matches; use typenum::{U0, U1, U3}; use crate::align::Aligned8; type UnmarkedMarkedPtr = super::MarkedPtr<Aligned8<i32>, U0>; type MarkedPtr1N = super::MarkedPtr<Aligned8<i32>, U1>; type MarkedPtr3N = super::MarkedPtr<Aligned8<i32>, U3>; #[test] fn decompose_ref() { let null = MarkedPtr3N::null(); assert_eq!((None, 0), unsafe { null.decompose_ref() }); let marked_null = MarkedPtr3N::compose(ptr::null_mut(), 0b111); assert_eq!((None, 0b111), unsafe { marked_null.decompose_ref() }); let value = Aligned8(1); let marked = MarkedPtr3N::compose(&value as *const Aligned8<i32> as *mut _, 0b11); assert_eq!((Some(&value), 0b11), unsafe { marked.decompose_ref() }); } #[test] fn decompose_mut() { let null = MarkedPtr3N::null(); assert_eq!((None, 0), unsafe { null.decompose_mut() }); let marked_null = MarkedPtr3N::compose(ptr::null_mut(), 0b111); assert_eq!((None, 0b111), unsafe { marked_null.decompose_mut() }); let mut value = Aligned8(1); let marked = MarkedPtr3N::compose(&mut value, 0b11); assert_eq!((Some(&mut value), 0b11), unsafe { marked.decompose_mut() }); } #[test] fn from_usize() { unsafe { let unmarked = UnmarkedMarkedPtr::from_usize(&Aligned8(1) as *const _ as usize); assert_matches!(unmarked.as_ref(), Some(&Aligned8(1))); let tagged = (&Aligned8(1i32) as *const _ as usize) | 0b1; assert_eq!( (Some(&Aligned8(1i32)), 0b1), MarkedPtr1N::from_usize(tagged).decompose_ref() ); } } #[test] fn from() { let mut x = Aligned8(1); let from_ref = MarkedPtr1N::from(&x); let from_mut = MarkedPtr1N::from(&mut x); let from_const_ptr = MarkedPtr1N::from(&x as *const _); let from_mut_ptr = MarkedPtr1N::from(&mut x as *mut _); assert!(from_ref == from_mut && from_const_ptr == from_mut_ptr); } #[test] fn eq_ord() { let null = MarkedPtr3N::null(); assert!(null.is_null()); assert_eq!(null, null); let mut aligned = Aligned8(1); let marked1 = MarkedPtr3N::compose(&mut aligned, 0b01); let marked2 = MarkedPtr3N::compose(&mut aligned, 0b11); assert_ne!(marked1, marked2); assert!(marked1 < marked2); } #[test] fn convert() { let mut aligned = Aligned8(1); let marked = MarkedPtr1N::compose(&mut aligned, 0b1); let convert = MarkedPtr3N::convert(marked); assert_eq!((Some(&aligned), 0b1), unsafe { convert.decompose_ref() }); } }
Self { let (ptr, tag) = pair; Self::compose(ptr, tag) } } impl<T, N: Unsigned> From<(*const T, usize)> for MarkedPtr<T, N> { #[inline] fn from(pair: (*const T, usize)) -> Self { let (ptr, tag) = pair; Self::compose(ptr as *mut _, tag) } } /********** impl PartialEq ************************************************************************/ impl<T, N> PartialEq for MarkedPtr<T, N> { #[inline] fn eq(&self, other: &Self) -> bool { self.inner == other.inner } } impl<T, N> PartialEq<MarkedNonNull<T, N>> for MarkedPtr<T, N> { #[inline] fn eq(&self, other: &MarkedNonNull<T, N>) -> bool { self.inner.eq(&other.inner.as_ptr()) } } /********** impl PartialOrd ***********************************************************************/ impl<T, N> PartialOrd for MarkedPtr<T, N> { #[inline] fn partial_cmp(&self, othe
random
[ { "content": "#[inline]\n\nfn compose<T, N: Unsigned>(ptr: *mut T, tag: usize) -> *mut T {\n\n debug_assert_eq!(ptr as usize & mark_mask::<T>(N::USIZE), 0);\n\n ((ptr as usize) | (mark_mask::<T>(N::USIZE) & tag)) as *mut _\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use core::ptr;\n\n\n\n use typenum::{Unsigned, U0, U1, U2, U3, U6};\n\n\n\n use crate::align::{\n\n Aligned1, Aligned1024, Aligned16, Aligned2, Aligned32, Aligned4, Aligned4096, Aligned64,\n\n Aligned8,\n\n };\n\n\n\n #[test]\n\n fn lower_bits() {\n\n assert_eq!(0, super::lower_bits::<Aligned1<u8>>());\n\n assert_eq!(1, super::lower_bits::<Aligned2<u8>>());\n", "file_path": "src/pointer/mod.rs", "rank": 0, "score": 205284.74890213806 }, { "content": "/// A trait that adds a method to ergonomically extract a `*mut T' from an\n\n/// [`Option`] of a non-nullable pointer or reference type.\n\npub trait UnwrapMutPtr: UnwrapPtr {\n\n /// Unwraps the [`Option`] and returns the contained value converted to a\n\n /// `mut` pointer or `null`.\n\n fn unwrap_mut_ptr(self) -> *mut <Self as UnwrapPtr>::Item;\n\n}\n\n\n\n/********** blanket impls *************************************************************************/\n\n\n\nimpl<'a, T> UnwrapMutPtr for Option<&'a mut T> {\n\n #[inline]\n\n fn unwrap_mut_ptr(self) -> *mut Self::Item {\n\n self.unwrap_ptr() as *mut _\n\n }\n\n}\n\n\n\nimpl<T> UnwrapMutPtr for Option<NonNull<T>> {\n\n #[inline]\n\n fn unwrap_mut_ptr(self) -> *mut Self::Item {\n\n self.unwrap_ptr() as *mut _\n\n }\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n// UnwrapUnchecked (trait)\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "src/util.rs", "rank": 1, "score": 95646.00623831023 }, { "content": "/// A trait that adds a method to ergonomically extract a `*const T' from an\n\n/// [`Option`] of a non-nullable pointer or reference type.\n\npub trait UnwrapPtr {\n\n /// The type to which the [`Option`] contains a pointer or reference.\n\n type Item: Sized;\n\n\n\n /// Unwraps the [`Option`] and returns the contained value converted to a\n\n /// `const` pointer or `null`.\n\n fn unwrap_ptr(self) -> *const Self::Item;\n\n}\n\n\n\n/********** blanket impls *************************************************************************/\n\n\n\nimpl<'a, T> UnwrapPtr for Option<&'a T> {\n\n type Item = T;\n\n\n\n #[inline]\n\n fn unwrap_ptr(self) -> *const Self::Item {\n\n match self {\n\n Some(value) => value as *const _,\n\n None => ptr::null(),\n\n }\n", "file_path": "src/util.rs", "rank": 2, "score": 70086.44684283354 }, { "content": "/// Trait for nullable and non-nullable *markable* pointer types.\n\npub trait MarkedPointer: Sized + Internal {\n\n /// The pointer type.\n\n type Pointer: MarkedNonNullable<Item = Self::Item, MarkBits = Self::MarkBits>;\n\n /// The pointed-to type.\n\n type Item: Sized;\n\n /// Number of bits available for tagging.\n\n type MarkBits: Unsigned;\n\n\n\n /// Returns the equivalent raw marked pointer.\n\n ///\n\n /// # Note\n\n ///\n\n /// For types like [`Shared`][crate::Owned], [`Shared`][crate::Shared] and\n\n /// [`Unlinked`][crate::Unlinked], which implement [`Deref`][core::ops::Deref]\n\n /// this method may conflict with inherent methods of the de-referenced type\n\n /// and goes against Rust's API guidelines.\n\n /// This is a deliberate trade-off for enabling more ergonomic usage of\n\n /// this method\n\n fn as_marked_ptr(&self) -> MarkedPtr<Self::Item, Self::MarkBits>;\n\n\n", "file_path": "src/pointer/mod.rs", "rank": 3, "score": 67142.8229784027 }, { "content": "/// Trait for pointer types that can be stored in an `Atomic`.\n\npub trait Store: MarkedPointer + Sized {\n\n type Reclaimer: Reclaim;\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n// Internal (trait)\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "src/internal.rs", "rank": 4, "score": 60346.1862974214 }, { "content": "/// Trait for pointer types that can be compared against in atomic\n\n/// *compare-and-swap* operations.\n\npub trait Compare: MarkedPointer + Sized {\n\n type Reclaimer: Reclaim;\n\n type Unlinked: MarkedPointer<Item = Self::Item, MarkBits = Self::MarkBits>;\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n// Store (trait)\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "src/internal.rs", "rank": 5, "score": 60346.075453901816 }, { "content": " pub fn compose(ptr: NonNull<T>, tag: usize) -> Self {\n\n debug_assert_eq!(0, ptr.as_ptr() as usize & Self::MARK_MASK, \"`ptr` is not well aligned\");\n\n unsafe { Self::from(NonNull::new_unchecked(pointer::compose::<_, N>(ptr.as_ptr(), tag))) }\n\n }\n\n\n\n /// Decomposes the marked pointer, returning the separated raw\n\n /// [`NonNull`] pointer and its tag.\n\n #[inline]\n\n pub fn decompose(self) -> (NonNull<T>, usize) {\n\n let (ptr, tag) = pointer::decompose(self.inner.as_ptr() as usize, Self::MARK_BITS);\n\n (unsafe { NonNull::new_unchecked(ptr) }, tag)\n\n }\n\n\n\n /// Decomposes the marked pointer, returning only the separated raw pointer.\n\n #[inline]\n\n pub fn decompose_ptr(self) -> *mut T {\n\n pointer::decompose_ptr(self.inner.as_ptr() as usize, Self::MARK_BITS)\n\n }\n\n\n\n /// Decomposes the marked pointer, returning only the separated raw\n", "file_path": "src/pointer/non_null.rs", "rank": 6, "score": 57235.440567825695 }, { "content": " Self::from(self.inner)\n\n }\n\n}\n\n\n\n/********** impl Copy *****************************************************************************/\n\n\n\nimpl<T, N> Copy for MarkedNonNull<T, N> {}\n\n\n\n/********** impl inherent *************************************************************************/\n\n\n\nimpl<T, N> MarkedNonNull<T, N> {\n\n /// Cast to a pointer of another type.\n\n #[inline]\n\n pub const fn cast<U>(self) -> MarkedNonNull<U, N> {\n\n MarkedNonNull { inner: self.inner.cast(), _marker: PhantomData }\n\n }\n\n\n\n /// Creates a new `MarkedNonNull` that is dangling, but well-aligned.\n\n ///\n\n /// This is useful for initializing types which lazily allocate, like\n", "file_path": "src/pointer/non_null.rs", "rank": 7, "score": 57232.08480692112 }, { "content": "}\n\n\n\n/********** impl Debug ****************************************************************************/\n\n\n\nimpl<T, N: Unsigned> fmt::Debug for MarkedNonNull<T, N> {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let (ptr, tag) = self.decompose();\n\n f.debug_struct(\"MarkedNonNull\").field(\"ptr\", &ptr).field(\"tag\", &tag).finish()\n\n }\n\n}\n\n\n\n/********** impl Pointer **************************************************************************/\n\n\n\nimpl<T, N: Unsigned> fmt::Pointer for MarkedNonNull<T, N> {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Pointer::fmt(&self.decompose_non_null(), f)\n\n }\n\n}\n", "file_path": "src/pointer/non_null.rs", "rank": 8, "score": 57228.22436546987 }, { "content": " /// [`NonNull`] pointer.\n\n #[inline]\n\n pub fn decompose_non_null(self) -> NonNull<T> {\n\n unsafe {\n\n NonNull::new_unchecked(pointer::decompose_ptr(\n\n self.inner.as_ptr() as usize,\n\n Self::MARK_BITS,\n\n ))\n\n }\n\n }\n\n\n\n /// Decomposes the marked pointer, returning only the separated tag.\n\n #[inline]\n\n pub fn decompose_tag(self) -> usize {\n\n pointer::decompose_tag::<T>(self.inner.as_ptr() as usize, Self::MARK_BITS)\n\n }\n\n\n\n /// Decomposes the marked pointer, dereferences the the raw pointer and\n\n /// returns both the reference and the separated tag.\n\n ///\n", "file_path": "src/pointer/non_null.rs", "rank": 9, "score": 57227.531624399926 }, { "content": " ///\n\n /// # Safety\n\n ///\n\n /// `ptr` may be marked, but must be be neither an unmarked nor a marked\n\n /// null pointer.\n\n #[inline]\n\n pub unsafe fn new_unchecked(ptr: MarkedPtr<T, N>) -> Self {\n\n Self::from(NonNull::new_unchecked(ptr.inner))\n\n }\n\n\n\n /// Creates a new `MarkedNonNull` wrapped in a [`Marked`] if `ptr` is\n\n /// non-null.\n\n pub fn new(ptr: MarkedPtr<T, N>) -> Marked<Self> {\n\n match ptr.decompose() {\n\n (raw, _) if !raw.is_null() => unsafe { Value(Self::new_unchecked(ptr)) },\n\n (_, tag) => Null(tag),\n\n }\n\n }\n\n\n\n /// Clears the tag of `self` and returns the same but untagged pointer.\n", "file_path": "src/pointer/non_null.rs", "rank": 10, "score": 57227.49782892766 }, { "content": " #[inline]\n\n pub fn clear_tag(self) -> Self {\n\n Self::from(self.decompose_non_null())\n\n }\n\n\n\n /// Clears the tag of `self` and replaces it with `tag`.\n\n #[inline]\n\n pub fn with_tag(self, tag: usize) -> Self {\n\n Self::compose(self.decompose_non_null(), tag)\n\n }\n\n\n\n /// Converts the pointer to the equivalent [`MarkedPtr`].\n\n #[inline]\n\n pub fn into_marked_ptr(self) -> MarkedPtr<T, N> {\n\n MarkedPtr::new(self.inner.as_ptr())\n\n }\n\n\n\n /// Composes a new marked non-null pointer from a non-null pointer and a tag\n\n /// value.\n\n #[inline]\n", "file_path": "src/pointer/non_null.rs", "rank": 11, "score": 57227.26110427333 }, { "content": " /// `Vec::new` does.\n\n ///\n\n /// Note that the pointer value may potentially represent a valid pointer to\n\n /// a `T`, which means this must not be used as a \"not yet initialized\"\n\n /// sentinel value. Types that lazily allocate must track initialization by\n\n /// some other means.\n\n #[inline]\n\n pub const fn dangling() -> Self {\n\n Self { inner: NonNull::dangling(), _marker: PhantomData }\n\n }\n\n}\n\n\n\nimpl<T, N: Unsigned> MarkedNonNull<T, N> {\n\n /// The number of available mark bits for this type.\n\n pub const MARK_BITS: usize = N::USIZE;\n\n /// The bitmask for the lower markable bits.\n\n pub const MARK_MASK: usize = pointer::mark_mask::<T>(Self::MARK_BITS);\n\n /// The bitmask for the (higher) pointer bits.\n\n pub const POINTER_MASK: usize = !Self::MARK_MASK;\n\n\n", "file_path": "src/pointer/non_null.rs", "rank": 12, "score": 57226.63503039589 }, { "content": " /// Returns the inner pointer *as is*, meaning potential tags are not\n\n /// stripped.\n\n #[inline]\n\n pub fn into_non_null(self) -> NonNull<T> {\n\n self.inner\n\n }\n\n\n\n /// Converts a marked non-null pointer with `M` potential mark bits to the\n\n /// **same** marked pointer with `N` potential mark bits, requires that\n\n /// `N >= M`.\n\n #[inline]\n\n pub fn convert<M: Unsigned>(other: MarkedNonNull<T, M>) -> Self\n\n where\n\n N: IsGreaterOrEqual<M, Output = True>,\n\n {\n\n Self::from(other.inner)\n\n }\n\n\n\n /// Creates a new `MarkedNonNull` from a marked pointer without checking\n\n /// for `null`.\n", "file_path": "src/pointer/non_null.rs", "rank": 13, "score": 57226.60322047743 }, { "content": "use core::cmp;\n\nuse core::convert::TryFrom;\n\nuse core::fmt;\n\nuse core::marker::PhantomData;\n\nuse core::ptr::NonNull;\n\n\n\nuse typenum::{IsGreaterOrEqual, True, Unsigned};\n\n\n\nuse crate::internal::Internal;\n\nuse crate::pointer::{\n\n self, InvalidNullError,\n\n Marked::{self, Null, Value},\n\n MarkedNonNull, MarkedNonNullable, MarkedPtr,\n\n};\n\n\n\n/********** impl Clone ****************************************************************************/\n\n\n\nimpl<T, N> Clone for MarkedNonNull<T, N> {\n\n #[inline]\n\n fn clone(&self) -> Self {\n", "file_path": "src/pointer/non_null.rs", "rank": 14, "score": 57225.81700099179 }, { "content": "\n\n/********** impl From *****************************************************************************/\n\n\n\nimpl<T, N> From<NonNull<T>> for MarkedNonNull<T, N> {\n\n #[inline]\n\n fn from(ptr: NonNull<T>) -> Self {\n\n Self { inner: ptr, _marker: PhantomData }\n\n }\n\n}\n\n\n\nimpl<'a, T, N: Unsigned> From<&'a T> for MarkedNonNull<T, N> {\n\n #[inline]\n\n fn from(reference: &'a T) -> Self {\n\n Self::from(NonNull::from(reference))\n\n }\n\n}\n\n\n\nimpl<'a, T, N: Unsigned> From<&'a mut T> for MarkedNonNull<T, N> {\n\n #[inline]\n\n fn from(reference: &'a mut T) -> Self {\n", "file_path": "src/pointer/non_null.rs", "rank": 15, "score": 57224.41688661059 }, { "content": " #[inline]\n\n pub unsafe fn decompose_mut(&mut self) -> (&mut T, usize) {\n\n let (ptr, tag) = self.decompose();\n\n (&mut *ptr.as_ptr(), tag)\n\n }\n\n\n\n /// Decomposes the marked pointer, mutably dereferences the the raw pointer\n\n /// and returns both the mutable reference and the separated tag. The\n\n /// returned reference is not bound to the lifetime of the `MarkedNonNull`.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This is unsafe because it cannot verify the validity of the returned\n\n /// pointer, nor can it ensure that the lifetime `'a` returned is indeed a\n\n /// valid lifetime for the contained data.\n\n #[inline]\n\n pub unsafe fn decompose_mut_unbounded<'a>(&mut self) -> (&'a mut T, usize) {\n\n let (ptr, tag) = self.decompose();\n\n (&mut *ptr.as_ptr(), tag)\n\n }\n", "file_path": "src/pointer/non_null.rs", "rank": 16, "score": 57223.80704637626 }, { "content": "impl<T, N> PartialEq for MarkedNonNull<T, N> {\n\n #[inline]\n\n fn eq(&self, other: &Self) -> bool {\n\n self.inner == other.inner\n\n }\n\n}\n\n\n\nimpl<T, N> PartialEq<MarkedPtr<T, N>> for MarkedNonNull<T, N> {\n\n #[inline]\n\n fn eq(&self, other: &MarkedPtr<T, N>) -> bool {\n\n self.inner.as_ptr() == other.inner\n\n }\n\n}\n\n\n\n/********** impl PartialOrd ***********************************************************************/\n\n\n\nimpl<T, N> PartialOrd for MarkedNonNull<T, N> {\n\n #[inline]\n\n fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {\n\n self.inner.partial_cmp(&other.inner)\n", "file_path": "src/pointer/non_null.rs", "rank": 17, "score": 57221.84542134119 }, { "content": " /// The resulting lifetime is bound to self so this behaves \"as if\"\n\n /// it were actually an instance of T that is getting borrowed. If a longer\n\n /// (unbound) lifetime is needed, use e.g. `&*my_ptr.decompose_ptr()`.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This is unsafe because it cannot verify the validity of the returned\n\n /// pointer.\n\n #[inline]\n\n pub unsafe fn decompose_ref(&self) -> (&T, usize) {\n\n let (ptr, tag) = self.decompose();\n\n (&*ptr.as_ptr(), tag)\n\n }\n\n\n\n /// Decomposes the marked pointer, dereferences the the raw pointer and\n\n /// returns both the reference and the separated tag. The returned reference\n\n /// is not bound to the lifetime of the `MarkedNonNull`.\n\n ///\n\n /// # Safety\n\n ///\n", "file_path": "src/pointer/non_null.rs", "rank": 18, "score": 57219.53331779437 }, { "content": " Self::from(NonNull::from(reference))\n\n }\n\n}\n\n\n\n/********** impl TryFrom **************************************************************************/\n\n\n\nimpl<T, N: Unsigned> TryFrom<MarkedPtr<T, N>> for MarkedNonNull<T, N> {\n\n type Error = InvalidNullError;\n\n\n\n #[inline]\n\n fn try_from(ptr: MarkedPtr<T, N>) -> Result<Self, Self::Error> {\n\n match ptr.decompose() {\n\n (raw, _) if raw.is_null() => Err(InvalidNullError),\n\n _ => unsafe { Ok(MarkedNonNull::new_unchecked(ptr)) },\n\n }\n\n }\n\n}\n\n\n\n/********** impl PartialEq ************************************************************************/\n\n\n", "file_path": "src/pointer/non_null.rs", "rank": 19, "score": 57219.13940351794 }, { "content": " /// This is unsafe because it cannot verify the validity of the returned\n\n /// pointer, nor can it ensure that the lifetime `'a` returned is indeed a\n\n /// valid lifetime for the contained data.\n\n #[inline]\n\n pub unsafe fn decompose_ref_unbounded<'a>(self) -> (&'a T, usize) {\n\n let (ptr, tag) = self.decompose();\n\n (&*ptr.as_ptr(), tag)\n\n }\n\n\n\n /// Decomposes the marked pointer, mutably dereferences the the raw pointer\n\n /// and returns both the mutable reference and the separated tag.\n\n ///\n\n /// The resulting lifetime is bound to self so this behaves \"as if\"\n\n /// it were actually an instance of T that is getting borrowed. If a longer\n\n /// (unbound) lifetime is needed, use e.g. `&mut *my_ptr.decompose_ptr()`.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This is unsafe because it cannot verify the validity of the returned\n\n /// pointer.\n", "file_path": "src/pointer/non_null.rs", "rank": 20, "score": 57219.04850453252 }, { "content": " ///\n\n /// This is unsafe because it cannot verify the validity of the returned\n\n /// pointer.\n\n #[inline]\n\n pub unsafe fn as_mut(&mut self) -> &mut T {\n\n &mut *self.decompose_non_null().as_ptr()\n\n }\n\n\n\n /// Decomposes the marked pointer, returning only the mutably de-referenced\n\n /// raw pointer, which is not bound to the lifetime of the `MarkedNonNull`.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This is unsafe because it cannot verify the validity of the returned\n\n /// pointer, nor can it ensure that the lifetime `'a` returned is indeed a\n\n /// valid lifetime for the contained data.\n\n #[inline]\n\n pub unsafe fn as_mut_unbounded<'a>(self) -> &'a mut T {\n\n &mut *self.decompose_non_null().as_ptr()\n\n }\n", "file_path": "src/pointer/non_null.rs", "rank": 21, "score": 57217.72994904403 }, { "content": " }\n\n}\n\n\n\nimpl<T, N> PartialOrd<MarkedPtr<T, N>> for MarkedNonNull<T, N> {\n\n #[inline]\n\n fn partial_cmp(&self, other: &MarkedPtr<T, N>) -> Option<cmp::Ordering> {\n\n self.inner.as_ptr().partial_cmp(&other.inner)\n\n }\n\n}\n\n\n\n/********** impl Eq *******************************************************************************/\n\n\n\nimpl<T, N> Eq for MarkedNonNull<T, N> {}\n\n\n\n/********** impl Ord ******************************************************************************/\n\n\n\nimpl<T, N> Ord for MarkedNonNull<T, N> {\n\n #[inline]\n\n fn cmp(&self, other: &Self) -> cmp::Ordering {\n\n self.inner.cmp(&other.inner)\n", "file_path": "src/pointer/non_null.rs", "rank": 22, "score": 57217.384653590045 }, { "content": " ///\n\n /// # Safety\n\n ///\n\n /// This is unsafe because it cannot verify the validity of the returned\n\n /// pointer, nor can it ensure that the lifetime `'a` returned is indeed a\n\n /// valid lifetime for the contained data.\n\n #[inline]\n\n pub unsafe fn as_ref_unbounded<'a>(self) -> &'a T {\n\n &*self.decompose_non_null().as_ptr()\n\n }\n\n\n\n /// Decomposes the marked pointer, returning only the mutably de-referenced\n\n /// raw pointer.\n\n ///\n\n /// The resulting lifetime is bound to self so this behaves \"as if\"\n\n /// it were actually an instance of T that is getting borrowed. If a longer\n\n /// (unbound) lifetime is needed, use e.g. `&mut *my_ptr.decompose_ptr()`\n\n /// or [`as_mut_unbounded`][MarkedNonNull::as_ref_unbounded].\n\n ///\n\n /// # Safety\n", "file_path": "src/pointer/non_null.rs", "rank": 23, "score": 57216.000989236054 }, { "content": " }\n\n}\n\n\n\n/********** impl NonNullable **********************************************************************/\n\n\n\nimpl<T, N: Unsigned> MarkedNonNullable for MarkedNonNull<T, N> {\n\n type Item = T;\n\n type MarkBits = N;\n\n\n\n #[inline]\n\n fn into_marked_non_null(self) -> MarkedNonNull<Self::Item, Self::MarkBits> {\n\n self\n\n }\n\n}\n\n\n\n/********** impl Internal *************************************************************************/\n\n\n\nimpl<T, N: Unsigned> Internal for MarkedNonNull<T, N> {}\n\n\n\n#[cfg(test)]\n", "file_path": "src/pointer/non_null.rs", "rank": 24, "score": 57214.01802844654 }, { "content": "mod tests {\n\n use std::ptr;\n\n\n\n use typenum::U2;\n\n\n\n use crate::align::Aligned4;\n\n\n\n type MarkedPtr<T> = crate::pointer::MarkedPtr<T, U2>;\n\n type MarkedNonNull<T> = crate::pointer::MarkedNonNull<T, U2>;\n\n\n\n #[test]\n\n fn new() {\n\n let reference = &mut Aligned4(1);\n\n let unmarked = MarkedPtr::new(reference);\n\n\n\n let marked = MarkedNonNull::new(unmarked);\n\n assert_eq!(unsafe { marked.unwrap_value().decompose_ref() }, (&Aligned4(1), 0));\n\n\n\n let marked = MarkedNonNull::new(MarkedPtr::compose(reference, 0b11));\n\n assert_eq!(unsafe { marked.unwrap_value().decompose_ref() }, (&Aligned4(1), 0b11));\n", "file_path": "src/pointer/non_null.rs", "rank": 25, "score": 57213.721943636985 }, { "content": "\n\n /// Decomposes the marked pointer, returning only the de-referenced raw\n\n /// pointer.\n\n ///\n\n /// The resulting lifetime is bound to self so this behaves \"as if\" it were\n\n /// actually an instance of T that is getting borrowed. If a longer\n\n /// (unbound) lifetime is needed, use e.g. `&*my_ptr.decompose_ptr()`\n\n /// or [`as_ref_unbounded`][MarkedNonNull::as_ref_unbounded].\n\n ///\n\n /// # Safety\n\n ///\n\n /// This is unsafe because it cannot verify the validity of the returned\n\n /// pointer.\n\n #[inline]\n\n pub unsafe fn as_ref(&self) -> &T {\n\n &*self.decompose_non_null().as_ptr()\n\n }\n\n\n\n /// Decomposes the marked pointer, returning only the de-referenced raw\n\n /// pointer, which is not bound to the lifetime of the `MarkedNonNull`.\n", "file_path": "src/pointer/non_null.rs", "rank": 26, "score": 57213.63482360698 }, { "content": "\n\n let null: *mut Aligned4<i32> = ptr::null_mut();\n\n let marked = MarkedNonNull::new(MarkedPtr::compose(null, 0b11));\n\n assert!(marked.is_null());\n\n assert_eq!(marked.unwrap_null(), 0b11);\n\n\n\n let marked = MarkedNonNull::new(MarkedPtr::compose(null, 0));\n\n assert!(marked.is_null());\n\n }\n\n}\n", "file_path": "src/pointer/non_null.rs", "rank": 27, "score": 57204.548505000734 }, { "content": "/// A sealed (internal) trait for non-nullable marked pointer types.\n\npub trait MarkedNonNullable: Sized + Internal {\n\n /// The pointed-to type.\n\n type Item: Sized;\n\n /// Number of bits available for tagging.\n\n type MarkBits: Unsigned;\n\n\n\n /// Converts the given `Self` into a equivalent marked non-null pointer.\n\n ///\n\n /// # Note\n\n ///\n\n /// For types like [`Shared`][crate::Owned], [`Shared`][crate::Shared] and\n\n /// [`Unlinked`][crate::Unlinked], which implement [`Deref`][core::ops::Deref]\n\n /// this method may conflict with inherent methods of the de-referenced type\n\n /// and goes against Rust's API guidelines.\n\n /// This is a deliberate trade-off for enabling more ergonomic usage of\n\n /// this method\n\n fn into_marked_non_null(self) -> MarkedNonNull<Self::Item, Self::MarkBits>;\n\n}\n\n\n\n/********** blanket impls *************************************************************************/\n", "file_path": "src/pointer/mod.rs", "rank": 28, "score": 56268.80861427961 }, { "content": "fn main() {}\n", "file_path": "examples/treiber.rs", "rank": 29, "score": 48957.367072513174 }, { "content": "/// A general purpose sealed marker trait for all relevant types of this crate.\n\npub trait Internal {}\n\n\n\nimpl<'a, T> Internal for &'a T {}\n\nimpl<'a, T> Internal for &'a mut T {}\n\nimpl<T> Internal for NonNull<T> {}\n", "file_path": "src/internal.rs", "rank": 30, "score": 43135.247085658586 }, { "content": "/// A trait for adding an `unsafe` unwrapping method to [`Option`] like types.\n\npub trait UnwrapUnchecked {\n\n /// The contained type that will be unwrapped.\n\n type Item: Sized;\n\n\n\n /// Unwraps the contained item in an [`Option`] like type **without**\n\n /// checking if the value actually exists.\n\n ///\n\n /// # Safety\n\n ///\n\n /// The caller has to ensure `self` actually contains an item, otherwise,\n\n /// there will be undefined behaviour.\n\n ///\n\n /// # Panics\n\n ///\n\n /// This method may panic in debug builds, if it is called on a value that\n\n /// does not contain an item\n\n unsafe fn unwrap_unchecked(self) -> Self::Item;\n\n}\n\n\n\n/********** blanket impls *************************************************************************/\n", "file_path": "src/util.rs", "rank": 31, "score": 41681.71919433652 }, { "content": "/// TODO: Docs...\n\npub trait StoreRetired {\n\n type Reclaimer: Reclaim;\n\n\n\n /// TODO: Docs...\n\n unsafe fn retire(&self, record: Retired<Self::Reclaimer>);\n\n}\n", "file_path": "src/traits.rs", "rank": 32, "score": 41678.04402306592 }, { "content": "/// A sealed trait for abstracting over different types for valid guard references.\n\n///\n\n/// For guard types implementing only the [`Protect`](crate::Protect) trait,\n\n/// this trait is only implemented for *mutable* references to this type.\n\n/// For guard types that also implement the\n\n/// [`ProtectRegion`](crate::ProtectRegion) trait, this trait is also\n\n/// implemented for *shared* references.\n\npub trait GuardRef<'g> {\n\n type Reclaimer: Reclaim;\n\n\n\n fn load_protected<T, N: Unsigned>(\n\n self,\n\n atomic: &Atomic<T, Self::Reclaimer, N>,\n\n order: Ordering,\n\n ) -> Marked<Shared<'g, T, Self::Reclaimer, N>>;\n\n\n\n fn load_protected_if_equal<T, N: Unsigned>(\n\n self,\n\n atomic: &Atomic<T, Self::Reclaimer, N>,\n\n expected: MarkedPtr<T, N>,\n\n order: Ordering,\n\n ) -> AcquireResult<'g, T, Self::Reclaimer, N>;\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n// Compare (trait)\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "src/internal.rs", "rank": 33, "score": 40109.344112354585 }, { "content": "//! Thin wrapper types for adjusting a type's alignment to increase the number of markable lower\n\n//! bits.\n\n\n\npub use self::Aligned64 as CacheAligned;\n\n\n\nuse core::borrow::{Borrow, BorrowMut};\n\nuse core::ops::{Deref, DerefMut};\n\n\n\nmacro_rules! impl_align {\n\n ($(struct align($align:expr) $wrapper:ident;)*) => {\n\n $(\n\n #[doc = \"A thin wrapper type with an alignment of at least $align bytes.\"]\n\n #[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]\n\n #[repr(align($align))]\n\n pub struct $wrapper<T>(pub T);\n\n\n\n impl<T> $wrapper<T> {\n\n /// Returns a reference to the inner value.\n\n pub fn get(aligned: &Self) -> &T {\n\n &aligned.0\n", "file_path": "src/align.rs", "rank": 34, "score": 33241.73910572985 }, { "content": " impl<T> BorrowMut<T> for $wrapper<T> {\n\n #[inline]\n\n fn borrow_mut(&mut self) -> &mut T {\n\n &mut self.0\n\n }\n\n }\n\n )*\n\n };\n\n}\n\n\n\nimpl_align! {\n\n struct align(1) Aligned1;\n\n struct align(2) Aligned2;\n\n struct align(4) Aligned4;\n\n struct align(8) Aligned8;\n\n struct align(16) Aligned16;\n\n struct align(32) Aligned32;\n\n struct align(64) Aligned64;\n\n struct align(128) Aligned128;\n\n struct align(256) Aligned256;\n", "file_path": "src/align.rs", "rank": 35, "score": 33225.93193889111 }, { "content": " }\n\n }\n\n\n\n impl<T> Deref for $wrapper<T> {\n\n type Target = T;\n\n\n\n #[inline]\n\n fn deref(&self) -> &Self::Target {\n\n &self.0\n\n }\n\n }\n\n\n\n impl<T> DerefMut for $wrapper<T> {\n\n #[inline]\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.0\n\n }\n\n }\n\n\n\n impl<T> AsRef<T> for $wrapper<T> {\n", "file_path": "src/align.rs", "rank": 36, "score": 33225.75475596845 }, { "content": " #[inline]\n\n fn as_ref(&self) -> &T {\n\n &self.0\n\n }\n\n }\n\n\n\n impl<T> AsMut<T> for $wrapper<T> {\n\n #[inline]\n\n fn as_mut(&mut self) -> &mut T {\n\n &mut self.0\n\n }\n\n }\n\n\n\n impl<T> Borrow<T> for $wrapper<T> {\n\n #[inline]\n\n fn borrow(&self) -> &T {\n\n &self.0\n\n }\n\n }\n\n\n", "file_path": "src/align.rs", "rank": 37, "score": 33222.40531219305 }, { "content": " struct align(0x10000000) Aligned512M;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::mem;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn alignments() {\n\n assert_eq!(mem::align_of::<Aligned8<u8>>(), 8);\n\n assert_eq!(mem::align_of::<Aligned16<u8>>(), 16);\n\n assert_eq!(mem::align_of::<Aligned32<u8>>(), 32);\n\n assert_eq!(mem::align_of::<Aligned64<u8>>(), 64);\n\n assert_eq!(mem::align_of::<Aligned128<u8>>(), 128);\n\n assert_eq!(mem::align_of::<Aligned256<u8>>(), 256);\n\n assert_eq!(mem::align_of::<Aligned512<u8>>(), 512);\n\n assert_eq!(mem::align_of::<Aligned1024<u8>>(), 1024);\n\n assert_eq!(mem::align_of::<Aligned2048<u8>>(), 2048);\n", "file_path": "src/align.rs", "rank": 38, "score": 33218.09469828699 }, { "content": " struct align(512) Aligned512;\n\n struct align(1024) Aligned1024;\n\n struct align(2048) Aligned2048;\n\n struct align(4096) Aligned4096;\n\n struct align(0x2000) Aligned8k;\n\n struct align(0x4000) Aligned16k;\n\n struct align(0x8000) Aligned32k;\n\n struct align(0x10000) Aligned64k;\n\n struct align(0x20000) Aligned128k;\n\n struct align(0x40000) Aligned256k;\n\n struct align(0x80000) Aligned512k;\n\n struct align(0x100000) Aligned1M;\n\n struct align(0x200000) Aligned2M;\n\n struct align(0x400000) Aligned4M;\n\n struct align(0x800000) Aligned8M;\n\n struct align(0x1000000) Aligned16M;\n\n struct align(0x2000000) Aligned32M;\n\n struct align(0x4000000) Aligned64M;\n\n struct align(0x8000000) Aligned128M;\n\n struct align(0x10000000) Aligned256M;\n", "file_path": "src/align.rs", "rank": 39, "score": 33215.25904980047 }, { "content": " assert_eq!(mem::align_of::<Aligned4096<u8>>(), 4096);\n\n }\n\n\n\n #[test]\n\n fn construct_and_deref() {\n\n let value = Aligned8(255u8);\n\n assert_eq!(*value, 255);\n\n\n\n let value = CacheAligned(1u8);\n\n assert_eq!(*value, 1);\n\n }\n\n}\n", "file_path": "src/align.rs", "rank": 40, "score": 33214.262663497495 }, { "content": "use core::fmt;\n\nuse core::marker::PhantomData;\n\nuse core::sync::atomic::{AtomicUsize, Ordering};\n\n\n\nuse typenum::Unsigned;\n\n\n\nuse crate::pointer::{self, AtomicMarkedPtr, MarkedPtr};\n\n\n\n/********** impl Send + Sync **********************************************************************/\n\n\n\nunsafe impl<T, N> Send for AtomicMarkedPtr<T, N> {}\n\nunsafe impl<T, N> Sync for AtomicMarkedPtr<T, N> {}\n\n\n\n/********** impl inherent *************************************************************************/\n\n\n\nimpl<T, N> AtomicMarkedPtr<T, N> {\n\n /// Creates a new & unmarked `null` pointer.\n\n #[inline]\n\n pub const fn null() -> Self {\n\n Self { inner: AtomicUsize::new(0), _marker: PhantomData }\n", "file_path": "src/pointer/atomic.rs", "rank": 44, "score": 29255.477706438738 }, { "content": " }\n\n}\n\n\n\n/********** impl Default **************************************************************************/\n\n\n\nimpl<T, N: Unsigned> Default for AtomicMarkedPtr<T, N> {\n\n #[inline]\n\n fn default() -> Self {\n\n Self::null()\n\n }\n\n}\n\n\n\n/********** impl Debug ****************************************************************************/\n\n\n\nimpl<T, N: Unsigned> fmt::Debug for AtomicMarkedPtr<T, N> {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let (ptr, tag) = self.load(Ordering::SeqCst).decompose();\n\n f.debug_struct(\"AtomicMarkedPtr\").field(\"ptr\", &ptr).field(\"tag\", &tag).finish()\n\n }\n", "file_path": "src/pointer/atomic.rs", "rank": 48, "score": 29251.99705894602 }, { "content": "}\n\n\n\nimpl<T: MarkedNonNullable + MarkedPointer> Marked<T> {\n\n /// Decomposes the inner marked pointer, returning only the separated tag.\n\n #[inline]\n\n pub fn decompose_tag(&self) -> usize {\n\n match self {\n\n Value(ptr) => ptr.as_marked_ptr().decompose_tag(),\n\n Null(tag) => *tag,\n\n }\n\n }\n\n}\n\n\n\n/********** impl Default **************************************************************************/\n\n\n\nimpl<T: MarkedNonNullable> Default for Marked<T> {\n\n #[inline]\n\n fn default() -> Self {\n\n Null(0)\n\n }\n", "file_path": "src/pointer/marked.rs", "rank": 49, "score": 29250.21789567574 }, { "content": " }\n\n}\n\n\n\n/********** impl inherent *************************************************************************/\n\n\n\nimpl<T, N: Unsigned> AtomicMarkedPtr<T, N> {\n\n /// The number of available mark bits for this type.\n\n pub const MARK_BITS: usize = N::USIZE;\n\n /// The bitmask for the lower markable bits.\n\n pub const MARK_MASK: usize = pointer::mark_mask::<T>(Self::MARK_BITS);\n\n /// The bitmask for the (higher) pointer bits.\n\n pub const POINTER_MASK: usize = !Self::MARK_MASK;\n\n\n\n /// Creates a new `AtomicMarkedPtr`.\n\n #[inline]\n\n pub fn new(ptr: MarkedPtr<T, N>) -> Self {\n\n Self { inner: AtomicUsize::new(ptr.inner as usize), _marker: PhantomData }\n\n }\n\n\n\n /// Consumes `self` and returns the inner [`MarkedPtr`](crate::pointer::MarkedPtr)\n", "file_path": "src/pointer/atomic.rs", "rank": 51, "score": 29249.742099585248 }, { "content": "\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n// MarkedPtr\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n\n/// A raw, unsafe pointer type like `*mut T` in which up to `N` of the pointer's\n\n/// lower bits can be used to store additional information (the *tag*).\n\n///\n\n/// Note, that the upper bound for `N` is dictated by the alignment of `T`.\n\n/// A type with an alignment of `8` (e.g. a `usize` on 64-bit architectures) can\n\n/// have up to `3` mark bits.\n\n/// Attempts to use types with insufficient alignment will result in a compile-\n\n/// time error.\n\npub struct MarkedPtr<T, N> {\n\n inner: *mut T,\n\n _marker: PhantomData<N>,\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n// MarkedNonNull\n", "file_path": "src/pointer/mod.rs", "rank": 52, "score": 29249.600393613408 }, { "content": " Null(tag) => Null(tag),\n\n }\n\n }\n\n\n\n /// Applies a function to the contained value (if any), or computes a\n\n /// default value using `func`, if no value is contained.\n\n #[inline]\n\n pub fn map_or_else<U: MarkedNonNullable>(\n\n self,\n\n default: impl FnOnce(usize) -> U,\n\n func: impl FnOnce(T) -> U,\n\n ) -> U {\n\n match self {\n\n Value(ptr) => func(ptr),\n\n Null(tag) => default(tag),\n\n }\n\n }\n\n\n\n /// Converts `self` from `Marked<T>` to [`Option<T>`][Option].\n\n #[inline]\n", "file_path": "src/pointer/marked.rs", "rank": 53, "score": 29248.123609521976 }, { "content": " Value(T),\n\n /// A null pointer that may be marked, in which case the `usize` is\n\n /// non-zero.\n\n Null(usize),\n\n}\n\n\n\n/********** blanket impls *************************************************************************/\n\n\n\nimpl<U, T, N: Unsigned> MarkedPointer for Option<U>\n\nwhere\n\n U: MarkedPointer<Pointer = U, Item = T, MarkBits = N>\n\n + MarkedNonNullable<Item = T, MarkBits = N>,\n\n{\n\n type Pointer = U;\n\n type Item = T;\n\n type MarkBits = N;\n\n\n\n #[inline]\n\n fn as_marked_ptr(&self) -> MarkedPtr<Self::Item, Self::MarkBits> {\n\n match self {\n", "file_path": "src/pointer/mod.rs", "rank": 56, "score": 29247.1060634496 }, { "content": " Null(tag) => tag,\n\n _ => panic!(\"called `Marked::unwrap_tag()` on a `Value`\"),\n\n }\n\n }\n\n\n\n /// Returns the contained value or the result of the given `func`.\n\n #[inline]\n\n pub fn unwrap_value_or_else(self, func: impl (FnOnce(usize) -> T)) -> T {\n\n match self {\n\n Value(ptr) => ptr,\n\n Null(tag) => func(tag),\n\n }\n\n }\n\n\n\n /// Maps a `Marked<T>` to `Marked<U>` by applying a function to a contained\n\n /// value.\n\n #[inline]\n\n pub fn map<U: MarkedNonNullable>(self, func: impl (FnOnce(T) -> U)) -> Marked<U> {\n\n match self {\n\n Value(ptr) => Value(func(ptr)),\n", "file_path": "src/pointer/marked.rs", "rank": 57, "score": 29245.328127319754 }, { "content": "/// a [`MarkedPtr`][marked] instead of `*mut T`.\n\n///\n\n/// [atomic]: std::sync::atomic::AtomicPtr\n\n/// [marked]: MarkedPtr\n\npub struct AtomicMarkedPtr<T, N> {\n\n inner: AtomicUsize,\n\n _marker: PhantomData<(*mut T, N)>,\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n// Marked (enum)\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n\n/// A value that represents the possible states of a nullable marked pointer.\n\n///\n\n/// This type is similar to [`Option<T>`][Option] but can also express `null`\n\n/// pointers with mark bits.\n\n#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\npub enum Marked<T: MarkedNonNullable> {\n\n /// A marked, non-nullable pointer or reference value.\n", "file_path": "src/pointer/mod.rs", "rank": 59, "score": 29243.98451214297 }, { "content": "\n\n #[inline]\n\n fn as_marked_ptr(&self) -> MarkedPtr<Self::Item, Self::MarkBits> {\n\n match self {\n\n Value(ptr) => Self::Pointer::as_marked_ptr(ptr),\n\n Null(tag) => MarkedPtr::compose(ptr::null_mut(), *tag),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn into_marked_ptr(self) -> MarkedPtr<Self::Item, Self::MarkBits> {\n\n match self {\n\n Value(ptr) => Self::Pointer::into_marked_ptr(ptr),\n\n Null(tag) => MarkedPtr::compose(ptr::null_mut(), tag),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn marked(marked: Self, tag: usize) -> Marked<Self::Pointer> {\n\n match marked {\n", "file_path": "src/pointer/mod.rs", "rank": 60, "score": 29243.55981348711 }, { "content": " Some(Self::Pointer::from_marked_non_null(MarkedNonNull::new_unchecked(marked)))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n #[inline]\n\n unsafe fn from_marked_non_null(marked: MarkedNonNull<Self::Item, Self::MarkBits>) -> Self {\n\n Some(Self::Pointer::from_marked_non_null(marked))\n\n }\n\n}\n\n\n\nimpl<U, T, N: Unsigned> MarkedPointer for Marked<U>\n\nwhere\n\n U: MarkedPointer<Pointer = U, Item = T, MarkBits = N>\n\n + MarkedNonNullable<Item = T, MarkBits = N>,\n\n{\n\n type Pointer = U;\n\n type Item = T;\n\n type MarkBits = N;\n", "file_path": "src/pointer/mod.rs", "rank": 61, "score": 29243.250794020827 }, { "content": "/// non-nullable pointer types.\n\n#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Ord, PartialOrd)]\n\npub struct InvalidNullError;\n\n\n\n/********** impl Display **************************************************************************/\n\n\n\nimpl fmt::Display for InvalidNullError {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"failed conversion of null pointer to non-nullable type\")\n\n }\n\n}\n\n\n\n/********** impl Error ****************************************************************************/\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl Error for InvalidNullError {}\n\n\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n// NonNullable (trait)\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n", "file_path": "src/pointer/mod.rs", "rank": 62, "score": 29243.162271897945 }, { "content": "use core::mem;\n\n\n\nuse crate::pointer::{\n\n Marked::{self, Null, Value},\n\n MarkedNonNullable,\n\n};\n\nuse crate::MarkedPointer;\n\n\n\n/********** impl inherent *************************************************************************/\n\n\n\nimpl<T: MarkedNonNullable> Marked<T> {\n\n /// Returns `true` if the marked value contains a [`Value`].\n\n #[inline]\n\n pub fn is_value(&self) -> bool {\n\n match *self {\n\n Value(_) => true,\n\n _ => false,\n\n }\n\n }\n\n\n", "file_path": "src/pointer/marked.rs", "rank": 63, "score": 29242.862910897664 }, { "content": " pub fn as_mut(&mut self) -> Marked<&mut T> {\n\n match self {\n\n Value(value) => Value(value),\n\n Null(tag) => Null(*tag),\n\n }\n\n }\n\n\n\n /// Moves the pointer out of the `Marked` if it is [`Value(ptr)`][Value].\n\n #[inline]\n\n pub fn unwrap_value(self) -> T {\n\n match self {\n\n Value(ptr) => ptr,\n\n _ => panic!(\"called `Marked::unwrap_value()` on a `Null` value\"),\n\n }\n\n }\n\n\n\n /// Extracts the tag out of the `Marked` if it is [`Null(tag)`][Null].\n\n #[inline]\n\n pub fn unwrap_null(self) -> usize {\n\n match self {\n", "file_path": "src/pointer/marked.rs", "rank": 64, "score": 29242.305909577542 }, { "content": "impl<T, N: Unsigned> From<*mut T> for AtomicMarkedPtr<T, N> {\n\n #[inline]\n\n fn from(ptr: *mut T) -> Self {\n\n AtomicMarkedPtr::new(MarkedPtr::from(ptr))\n\n }\n\n}\n\n\n\nimpl<T, N: Unsigned> From<MarkedPtr<T, N>> for AtomicMarkedPtr<T, N> {\n\n #[inline]\n\n fn from(ptr: MarkedPtr<T, N>) -> Self {\n\n AtomicMarkedPtr::new(ptr)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use core::ptr;\n\n use core::sync::atomic::Ordering;\n\n\n\n use typenum::U3;\n", "file_path": "src/pointer/atomic.rs", "rank": 65, "score": 29242.133377424932 }, { "content": "}\n\n\n\n/********** impl Pointer **************************************************************************/\n\n\n\nimpl<T, N: Unsigned> fmt::Pointer for AtomicMarkedPtr<T, N> {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Pointer::fmt(&self.load(Ordering::SeqCst).decompose_ptr(), f)\n\n }\n\n}\n\n\n\n/********** impl From *****************************************************************************/\n\n\n\nimpl<T, N: Unsigned> From<*const T> for AtomicMarkedPtr<T, N> {\n\n #[inline]\n\n fn from(ptr: *const T) -> Self {\n\n AtomicMarkedPtr::new(MarkedPtr::from(ptr))\n\n }\n\n}\n\n\n", "file_path": "src/pointer/atomic.rs", "rank": 67, "score": 29241.597804975154 }, { "content": "\n\n #[inline]\n\n fn unmarked(opt: Self) -> Self {\n\n opt.map(Self::Pointer::unmarked)\n\n }\n\n\n\n #[inline]\n\n fn decompose(opt: Self) -> (Self, usize) {\n\n match opt {\n\n Some(ptr) => {\n\n let (ptr, tag) = Self::Pointer::decompose(ptr);\n\n (Some(ptr), tag)\n\n }\n\n None => (None, 0),\n\n }\n\n }\n\n\n\n #[inline]\n\n unsafe fn from_marked_ptr(marked: MarkedPtr<Self::Item, Self::MarkBits>) -> Self {\n\n if !marked.is_null() {\n", "file_path": "src/pointer/mod.rs", "rank": 68, "score": 29241.595145308453 }, { "content": " Value(ptr) => Self::Pointer::marked(ptr, tag),\n\n Null(_) => Null(tag),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn unmarked(marked: Self) -> Self {\n\n match marked {\n\n Value(ptr) => Value(Self::Pointer::unmarked(ptr)),\n\n Null(_) => Null(0),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn decompose(marked: Self) -> (Self, usize) {\n\n match marked {\n\n Value(ptr) => {\n\n let (ptr, tag) = Self::Pointer::decompose(ptr);\n\n (Value(ptr), tag)\n\n }\n", "file_path": "src/pointer/mod.rs", "rank": 71, "score": 29241.068920259666 }, { "content": " /// tag and the separated tag.\n\n fn decompose(_: Self) -> (Self, usize);\n\n\n\n /// Constructs a `Self` from a raw marked pointer.\n\n ///\n\n /// # Safety\n\n ///\n\n /// The caller has to ensure that raw is a valid pointer for the respective\n\n /// `Self` type. If `Self` is nullable, a null pointer is a valid value.\n\n /// Otherwise, all values must be valid pointers.\n\n unsafe fn from_marked_ptr(marked: MarkedPtr<Self::Item, Self::MarkBits>) -> Self;\n\n\n\n /// Constructs a `Self` from a raw non-null marked pointer\n\n ///\n\n /// # Safety\n\n ///\n\n /// The same caveats as with [`from_marked_ptr`][MarkedPointer::from_marked_ptr]\n\n /// apply as well.\n\n unsafe fn from_marked_non_null(marked: MarkedNonNull<Self::Item, Self::MarkBits>) -> Self;\n\n}\n", "file_path": "src/pointer/mod.rs", "rank": 72, "score": 29240.845945402132 }, { "content": "\n\nimpl<T> MarkedNonNullable for NonNull<T> {\n\n type Item = T;\n\n type MarkBits = typenum::U0;\n\n\n\n #[inline]\n\n fn into_marked_non_null(self) -> MarkedNonNull<Self::Item, Self::MarkBits> {\n\n MarkedNonNull::from(self)\n\n }\n\n}\n\n\n\n/********** impl Internal *************************************************************************/\n\n\n\nimpl<U, T, N: Unsigned> Internal for Option<U> where\n\n U: MarkedPointer<Item = T, MarkBits = N> + MarkedNonNullable<Item = T, MarkBits = N>\n\n{\n\n}\n\n\n\nimpl<U, T, N: Unsigned> Internal for Marked<U> where\n\n U: MarkedPointer<Item = T, MarkBits = N> + MarkedNonNullable<Item = T, MarkBits = N>\n", "file_path": "src/pointer/mod.rs", "rank": 73, "score": 29240.717793976062 }, { "content": " Some(ptr) => Self::Pointer::as_marked_ptr(ptr),\n\n None => MarkedPtr::null(),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn into_marked_ptr(self) -> MarkedPtr<Self::Item, Self::MarkBits> {\n\n match self {\n\n Some(ptr) => Self::Pointer::into_marked_ptr(ptr),\n\n None => MarkedPtr::null(),\n\n }\n\n }\n\n\n\n #[inline]\n\n fn marked(opt: Self, tag: usize) -> Marked<Self::Pointer> {\n\n match opt {\n\n Some(ptr) => Self::Pointer::marked(ptr, tag),\n\n None => Null(tag),\n\n }\n\n }\n", "file_path": "src/pointer/mod.rs", "rank": 74, "score": 29240.426421113087 }, { "content": " Null(tag) => (Null(0), tag),\n\n }\n\n }\n\n\n\n #[inline]\n\n unsafe fn from_marked_ptr(marked: MarkedPtr<Self::Item, Self::MarkBits>) -> Self {\n\n MarkedNonNull::new(marked).map(|ptr| Self::Pointer::from_marked_non_null(ptr))\n\n }\n\n\n\n #[inline]\n\n unsafe fn from_marked_non_null(marked: MarkedNonNull<Self::Item, Self::MarkBits>) -> Self {\n\n Value(Self::Pointer::from_marked_non_null(marked))\n\n }\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n// InvalidNullError\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n\n/// An error type for representing failed conversions from nullable to\n", "file_path": "src/pointer/mod.rs", "rank": 76, "score": 29239.481961227928 }, { "content": " /// Returns `true` if the marked value is a [`Null`].\n\n #[inline]\n\n pub fn is_null(&self) -> bool {\n\n match *self {\n\n Null(_) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n /// Converts from `Marked<T>` to `Marked<&T>`.\n\n #[inline]\n\n pub fn as_ref(&self) -> Marked<&T> {\n\n match self {\n\n Value(value) => Value(value),\n\n Null(tag) => Null(*tag),\n\n }\n\n }\n\n\n\n /// Converts from `Marked<T>` to `Marked<&mut T>`.\n\n #[inline]\n", "file_path": "src/pointer/marked.rs", "rank": 77, "score": 29239.067166578694 }, { "content": " /// Stores a value into the pointer, returning the previous value.\n\n #[inline]\n\n pub fn swap(&self, ptr: MarkedPtr<T, N>, order: Ordering) -> MarkedPtr<T, N> {\n\n MarkedPtr::from_usize(self.inner.swap(ptr.into_usize(), order))\n\n }\n\n\n\n /// Stores a value into the pointer if the current value is the same\n\n /// as `current`.\n\n #[inline]\n\n pub fn compare_and_swap(\n\n &self,\n\n current: MarkedPtr<T, N>,\n\n new: MarkedPtr<T, N>,\n\n order: Ordering,\n\n ) -> MarkedPtr<T, N> {\n\n MarkedPtr::from_usize(self.inner.compare_and_swap(\n\n current.into_usize(),\n\n new.into_usize(),\n\n order,\n\n ))\n", "file_path": "src/pointer/atomic.rs", "rank": 78, "score": 29238.980023650347 }, { "content": " #[inline]\n\n pub fn compare_exchange_weak(\n\n &self,\n\n current: MarkedPtr<T, N>,\n\n new: MarkedPtr<T, N>,\n\n success: Ordering,\n\n failure: Ordering,\n\n ) -> Result<MarkedPtr<T, N>, MarkedPtr<T, N>> {\n\n self.inner\n\n .compare_exchange_weak(current.into_usize(), new.into_usize(), success, failure)\n\n .map(MarkedPtr::from_usize)\n\n .map_err(MarkedPtr::from_usize)\n\n }\n\n\n\n /// Bitwise `and` with the current tag value.\n\n ///\n\n /// Performs a bitwise `and` operation on the current tag and the argument `value` and sets the\n\n /// new value to the result.\n\n ///\n\n /// Returns the [`MarkedPtr`] with the previous tag, the pointer itself can not change.\n", "file_path": "src/pointer/atomic.rs", "rank": 80, "score": 29238.279084451016 }, { "content": "\n\n use crate::align::Aligned8;\n\n\n\n type AtomicMarkedPtr<T> = crate::pointer::AtomicMarkedPtr<T, U3>;\n\n type MarkedPtr<T> = crate::pointer::MarkedPtr<T, U3>;\n\n\n\n #[test]\n\n fn null() {\n\n let ptr: AtomicMarkedPtr<usize> = AtomicMarkedPtr::null();\n\n assert_eq!(ptr.load(Ordering::Relaxed).into_usize(), 0);\n\n assert_eq!(ptr.into_inner().into_usize(), 0);\n\n }\n\n\n\n #[test]\n\n fn new() {\n\n let reference = &Aligned8(1usize);\n\n let marked = AtomicMarkedPtr::new(MarkedPtr::from(reference));\n\n let from = AtomicMarkedPtr::from(reference as *const _ as *mut Aligned8<usize>);\n\n assert_eq!(marked.load(Ordering::Relaxed).into_usize(), reference as *const _ as usize);\n\n assert_eq!(from.load(Ordering::Relaxed).into_usize(), reference as *const _ as usize);\n", "file_path": "src/pointer/atomic.rs", "rank": 81, "score": 29237.046804663136 }, { "content": " /// [seq_cst]: core::sync::atomic::Ordering::SeqCst\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use std::sync::atomic::Ordering;\n\n ///\n\n /// type MarkedPtr<T> = reclaim::MarkedPtr<T, reclaim::typenum::U1>;\n\n /// type AtomicMarkedPtr<T> = reclaim::AtomicMarkedPtr<T, reclaim::typenum::U1>;\n\n ///\n\n /// let ptr = &mut 5;\n\n /// let marked = MarkedPtr::compose(ptr, 0b1);\n\n /// let atomic = AtomicMarkedPtr::new(marked);\n\n ///\n\n /// let value = atomic.load(Ordering::Relaxed);\n\n /// assert_eq!((Some(&mut 5), 0b1), unsafe { value.decompose_mut() });\n\n /// ```\n\n #[inline]\n\n pub fn load(&self, order: Ordering) -> MarkedPtr<T, N> {\n\n MarkedPtr::from_usize(self.inner.load(order))\n", "file_path": "src/pointer/atomic.rs", "rank": 82, "score": 29236.549264774814 }, { "content": " }\n\n\n\n /// Stores a value into the pointer if the current value is the same\n\n /// as `current`.\n\n #[inline]\n\n pub fn compare_exchange(\n\n &self,\n\n current: MarkedPtr<T, N>,\n\n new: MarkedPtr<T, N>,\n\n success: Ordering,\n\n failure: Ordering,\n\n ) -> Result<MarkedPtr<T, N>, MarkedPtr<T, N>> {\n\n self.inner\n\n .compare_exchange(current.into_usize(), new.into_usize(), success, failure)\n\n .map(MarkedPtr::from_usize)\n\n .map_err(MarkedPtr::from_usize)\n\n }\n\n\n\n /// Stores a value into the pointer if the current value is the same\n\n /// as `current`.\n", "file_path": "src/pointer/atomic.rs", "rank": 83, "score": 29236.485712458252 }, { "content": " ///\n\n /// Returns the [`MarkedPtr`] with the previous tag, the pointer itself can not change.\n\n /// It `value` is larger than the mask of markable bits of this type it is silently truncated.\n\n ///\n\n /// `fetch_nand` takes an [`Ordering`] argument, which describes the memory ordering of this\n\n /// operation.\n\n /// All orderings modes are possible.\n\n /// Note, that using [`Acquire`][acq] makes the store part of this operation [`Relaxed`][rlx]\n\n /// and using [`Release`][rel] makes the load part [`Relaxed][rlx]\n\n ///\n\n /// [acq]: core::sync::atomic::Ordering::Acquire\n\n /// [rel]: core::sync::atomic::Ordering::Release\n\n /// [rlx]: core::sync::atomic::Ordering::Relaxed\n\n #[inline]\n\n pub fn fetch_nand(&self, value: usize, order: Ordering) -> MarkedPtr<T, N> {\n\n MarkedPtr::from_usize(self.inner.fetch_nand(value, order))\n\n }\n\n\n\n /// Bitwise `or` with the current tag value.\n\n ///\n", "file_path": "src/pointer/atomic.rs", "rank": 85, "score": 29236.056850441877 }, { "content": "{\n\n}\n\n\n\n/********** helper functions **********************************************************************/\n\n\n\n/// Decomposes the integer representation of a marked pointer into a\n\n/// raw pointer and its tag.\n\n#[inline]\n\nconst fn decompose<T>(marked: usize, mark_bits: usize) -> (*mut T, usize) {\n\n (decompose_ptr::<T>(marked, mark_bits), decompose_tag::<T>(marked, mark_bits))\n\n}\n\n\n\n/// Decomposes the integer representation of a marked pointer into\n\n/// a raw pointer stripped of its tag.\n\n#[inline]\n\nconst fn decompose_ptr<T>(marked: usize, mark_bits: usize) -> *mut T {\n\n (marked & !mark_mask::<T>(mark_bits)) as *mut _\n\n}\n\n\n\n/// Decomposes the integer representation of a marked pointer into\n", "file_path": "src/pointer/mod.rs", "rank": 86, "score": 29235.803851926023 }, { "content": " /// Performs a bitwise `or` operation on the current tag and the argument `value` and sets the\n\n /// new value to the result.\n\n ///\n\n /// Returns the [`MarkedPtr`] with the previous tag, the pointer itself can not change.\n\n /// It `value` is larger than the mask of markable bits of this type it is silently truncated.\n\n ///\n\n /// `fetch_or` takes an [`Ordering`] argument, which describes the memory ordering of this\n\n /// operation.\n\n /// All orderings modes are possible.\n\n /// Note, that using [`Acquire`][acq] makes the store part of this operation [`Relaxed`][rlx]\n\n /// and using [`Release`][rel] makes the load part [`Relaxed][rlx]\n\n ///\n\n /// [acq]: core::sync::atomic::Ordering::Acquire\n\n /// [rel]: core::sync::atomic::Ordering::Release\n\n /// [rlx]: core::sync::atomic::Ordering::Relaxed\n\n #[inline]\n\n pub fn fetch_or(&self, value: usize, order: Ordering) -> MarkedPtr<T, N> {\n\n MarkedPtr::from_usize(self.inner.fetch_or(value, order))\n\n }\n\n\n", "file_path": "src/pointer/atomic.rs", "rank": 88, "score": 29235.64809635747 }, { "content": "////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n\n/// A non-nullable marked raw pointer type like [`NonNull`](std::ptr::NonNull).\n\n///\n\n/// Note, that unlike [`MarkedPtr`][MarkedPtr] this also **excludes** marked\n\n/// null-pointers.\n\npub struct MarkedNonNull<T, N> {\n\n inner: NonNull<T>,\n\n _marker: PhantomData<N>,\n\n}\n\n\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n// AtomicMarkedPtr\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n\n/// A raw pointer type which can be safely shared between threads, which\n\n/// can store additional information in its lower (unused) bits.\n\n///\n\n/// This type has the same in-memory representation as a *mut T. It is mostly\n\n/// identical to [`AtomicPtr`][atomic], except that all of its methods involve\n", "file_path": "src/pointer/mod.rs", "rank": 89, "score": 29235.468031590535 }, { "content": " ///\n\n /// ```\n\n /// use std::sync::atomic::Ordering;\n\n ///\n\n /// type MarkedPtr<T> = reclaim::MarkedPtr<T, reclaim::typenum::U0>;\n\n /// type AtomicMarkedPtr<T> = reclaim::AtomicMarkedPtr<T, reclaim::typenum::U0>;\n\n ///\n\n /// let ptr = &mut 5;\n\n /// let marked = MarkedPtr::new(ptr);\n\n /// let atomic = AtomicMarkedPtr::new(marked);\n\n ///\n\n /// let other_marked = MarkedPtr::new(&mut 10);\n\n ///\n\n /// atomic.store(other_marked, Ordering::Relaxed);\n\n /// ```\n\n #[inline]\n\n pub fn store(&self, ptr: MarkedPtr<T, N>, order: Ordering) {\n\n self.inner.store(ptr.into_usize(), order);\n\n }\n\n\n", "file_path": "src/pointer/atomic.rs", "rank": 90, "score": 29235.341156262133 }, { "content": " /// Bitwise `xor` with the current tag value.\n\n ///\n\n /// Performs a bitwise `xor` operation on the current tag and the argument `value` and sets the\n\n /// new value to the result.\n\n ///\n\n /// Returns the [`MarkedPtr`] with the previous tag, the pointer itself can not change.\n\n /// It `value` is larger than the mask of markable bits of this type it is silently truncated.\n\n ///\n\n /// `fetch_xor` takes an [`Ordering`] argument, which describes the memory ordering of this\n\n /// operation.\n\n /// All orderings modes are possible.\n\n /// Note, that using [`Acquire`][acq] makes the store part of this operation [`Relaxed`][rlx]\n\n /// and using [`Release`][rel] makes the load part [`Relaxed][rlx]\n\n ///\n\n /// [acq]: core::sync::atomic::Ordering::Acquire\n\n /// [rel]: core::sync::atomic::Ordering::Release\n\n /// [rlx]: core::sync::atomic::Ordering::Relaxed\n\n #[inline]\n\n pub fn fetch_xor(&self, value: usize, order: Ordering) -> MarkedPtr<T, N> {\n\n MarkedPtr::from_usize(self.inner.fetch_xor(value, order))\n", "file_path": "src/pointer/atomic.rs", "rank": 91, "score": 29235.311334425045 }, { "content": " pub fn value(self) -> Option<T> {\n\n match self {\n\n Value(ptr) => Some(ptr),\n\n _ => None,\n\n }\n\n }\n\n\n\n /// Takes the value of the [`Marked`], leaving a [`Null`] variant in its\n\n /// place.\n\n #[inline]\n\n pub fn take(&mut self) -> Self {\n\n mem::replace(self, Null(0))\n\n }\n\n\n\n /// Replaces the actual value in the [`Marked`] with the given `value`,\n\n /// returning the old value.\n\n #[inline]\n\n pub fn replace(&mut self, value: T) -> Self {\n\n mem::replace(self, Value(value))\n\n }\n", "file_path": "src/pointer/marked.rs", "rank": 93, "score": 29235.01763410632 }, { "content": " let composed = super::compose::<_, U3>(&mut aligned, 0b10);\n\n assert_eq!(super::decompose(composed as usize, U3::USIZE), (&mut aligned as *mut _, 0b10));\n\n let composed = super::compose::<_, U3>(&mut aligned, 0b100);\n\n assert_eq!(super::decompose(composed as usize, U3::USIZE), (&mut aligned as *mut _, 0b100));\n\n let composed = super::compose::<_, U3>(&mut aligned, 0b1000);\n\n assert_eq!(super::decompose(composed as usize, U3::USIZE), (&mut aligned as *mut _, 0b0));\n\n }\n\n\n\n #[test]\n\n fn marked_null() {\n\n let ptr: *mut Aligned4<u8> = ptr::null_mut();\n\n let marked = super::compose::<_, U1>(ptr, 1);\n\n assert_eq!(super::decompose::<Aligned4<u8>>(marked as usize, 1), (ptr::null_mut(), 1));\n\n }\n\n}\n", "file_path": "src/pointer/mod.rs", "rank": 94, "score": 29234.583039971258 }, { "content": " let ptr = reference as *mut _ as usize;\n\n\n\n assert_eq!(super::compose::<Aligned8<u8>, U2>(ptr::null_mut(), 0), ptr::null_mut());\n\n assert_eq!(super::compose::<_, U2>(reference, 0), ptr as *mut _);\n\n assert_eq!(super::compose::<_, U2>(reference, 0b11), (ptr | 0b11) as *mut _);\n\n assert_eq!(super::compose::<_, U2>(reference, 0b1111), (ptr | 0b11) as *mut _);\n\n assert_eq!(\n\n super::compose::<Aligned64<u8>, U6>(ptr::null_mut(), 0b11_0101),\n\n 0b11_0101 as *mut Aligned64<u8>\n\n );\n\n }\n\n\n\n #[test]\n\n fn decompose() {\n\n let mut aligned = Aligned8(0);\n\n\n\n let composed = super::compose::<_, U3>(&mut aligned, 0b0);\n\n assert_eq!(super::decompose(composed as usize, U3::USIZE), (&mut aligned as *mut _, 0b0));\n\n let composed = super::compose::<_, U3>(&mut aligned, 0b1);\n\n assert_eq!(super::decompose(composed as usize, U3::USIZE), (&mut aligned as *mut _, 0b1));\n", "file_path": "src/pointer/mod.rs", "rank": 95, "score": 29233.909937957105 }, { "content": " /// It `value` is larger than the mask of markable bits of this type it is silently truncated.\n\n ///\n\n /// `fetch_and` takes an [`Ordering`] argument, which describes the memory ordering of this\n\n /// operation.\n\n /// All orderings modes are possible.\n\n /// Note, that using [`Acquire`][acq] makes the store part of this operation [`Relaxed`][rlx]\n\n /// and using [`Release`][rel] makes the load part [`Relaxed][rlx]\n\n ///\n\n /// [acq]: core::sync::atomic::Ordering::Acquire\n\n /// [rel]: core::sync::atomic::Ordering::Release\n\n /// [rlx]: core::sync::atomic::Ordering::Relaxed\n\n #[inline]\n\n pub fn fetch_and(&self, value: usize, order: Ordering) -> MarkedPtr<T, N> {\n\n MarkedPtr::from_usize(self.inner.fetch_and(value, order))\n\n }\n\n\n\n /// Bitwise `nand` with the current tag value.\n\n ///\n\n /// Performs a bitwise `nand` operation on the current tag and the argument `value` and sets the\n\n /// new value to the result.\n", "file_path": "src/pointer/atomic.rs", "rank": 96, "score": 29233.49852000252 }, { "content": "/// *only* the tag.\n\n#[inline]\n\nconst fn decompose_tag<T>(marked: usize, mark_bits: usize) -> usize {\n\n marked & mark_mask::<T>(mark_bits)\n\n}\n\n\n\n/// Gets the number of unused (markable) lower bits in a pointer for\n\n/// type `T`.\n\n#[inline]\n\nconst fn lower_bits<T>() -> usize {\n\n mem::align_of::<T>().trailing_zeros() as usize\n\n}\n\n\n\n/// Gets the integer representation for the bitmask of markable lower\n\n/// bits of a pointer for type `T`.\n\n#[deny(const_err)]\n\n#[inline]\n\nconst fn mark_mask<T>(mark_bits: usize) -> usize {\n\n let _assert_sufficient_alignment = lower_bits::<T>() - mark_bits;\n\n (1 << mark_bits) - 1\n\n}\n\n\n\n/// Composes a marked pointer from a raw (i.e. unmarked) pointer and a tag.\n\n///\n\n/// If the size of the tag exceeds the markable bits of `T` the tag is truncated.\n", "file_path": "src/pointer/mod.rs", "rank": 98, "score": 29232.73195477938 }, { "content": "mod atomic;\n\nmod marked;\n\nmod non_null;\n\nmod raw;\n\n\n\n#[cfg(feature = \"std\")]\n\nuse std::error::Error;\n\n\n\nuse core::fmt;\n\nuse core::marker::PhantomData;\n\nuse core::mem;\n\nuse core::ptr::{self, NonNull};\n\nuse core::sync::atomic::AtomicUsize;\n\n\n\nuse typenum::Unsigned;\n\n\n\nuse crate::internal::Internal;\n\n\n\nuse self::Marked::{Null, Value};\n\n\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n// MarkedPointer (trait)\n\n////////////////////////////////////////////////////////////////////////////////////////////////////\n\n\n\n/// Trait for nullable and non-nullable *markable* pointer types.\n", "file_path": "src/pointer/mod.rs", "rank": 99, "score": 29232.724414656837 } ]
Rust
lib/grammers-client/src/types/chat/mod.rs
the-blank-x/grammers
ad3fd4b7facafccc1eeacbf1df11a67b58d52e60
mod channel; mod group; mod user; use grammers_tl_types as tl; use std::fmt; pub use channel::Channel; pub use group::Group; pub use user::{Platform, RestrictionReason, User}; #[derive(Clone, Debug)] pub enum Chat { User(User), Group(Group), Channel(Channel), } #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub(crate) enum PackedType { User = 0b0000_0010, Bot = 0b0000_0011, Chat = 0b0000_0100, Megagroup = 0b0010_1000, Broadcast = 0b0011_0000, Gigagroup = 0b0011_1000, } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct PackedChat { pub(crate) ty: PackedType, pub(crate) id: i32, pub(crate) access_hash: Option<i64>, } impl PackedChat { pub fn to_bytes(&self) -> Vec<u8> { let mut res = if let Some(access_hash) = self.access_hash { let mut res = vec![0; 14]; res[6..14].copy_from_slice(&access_hash.to_le_bytes()); res } else { vec![0; 6] }; res[0] = self.ty as u8; res[1] = res.len() as u8; res[2..6].copy_from_slice(&self.id.to_le_bytes()); res } pub fn from_bytes(buf: &[u8]) -> Result<Self, ()> { if buf.len() != 6 && buf.len() != 14 { return Err(()); } if buf[1] as usize != buf.len() { return Err(()); } let ty = match buf[0] { 0b0000_0010 => PackedType::User, 0b0000_0011 => PackedType::Bot, 0b0000_0100 => PackedType::Chat, 0b0010_1000 => PackedType::Megagroup, 0b0011_0000 => PackedType::Broadcast, 0b0011_1000 => PackedType::Gigagroup, _ => return Err(()), }; let id = i32::from_le_bytes([buf[2], buf[3], buf[4], buf[5]]); let access_hash = if buf[1] == 14 { Some(i64::from_le_bytes([ buf[6], buf[7], buf[8], buf[9], buf[10], buf[11], buf[12], buf[13], ])) } else { None }; Ok(Self { ty, id, access_hash, }) } pub fn unpack(&self) -> Chat { match self.ty { PackedType::User | PackedType::Bot => { let mut user = User::from_raw(tl::types::UserEmpty { id: self.id }.into()); user.0.access_hash = self.access_hash; Chat::User(user) } PackedType::Chat => { Chat::Group(Group::from_raw(tl::types::ChatEmpty { id: self.id }.into())) } PackedType::Megagroup => Chat::Group(Group::from_raw( tl::types::ChannelForbidden { id: self.id, broadcast: false, megagroup: true, access_hash: self.access_hash.unwrap_or(0), title: String::new(), until_date: None, } .into(), )), PackedType::Broadcast | PackedType::Gigagroup => Chat::Channel(Channel::from_raw( tl::types::ChannelForbidden { id: self.id, broadcast: false, megagroup: true, access_hash: self.access_hash.unwrap_or(0), title: String::new(), until_date: None, } .into(), )), } } } impl fmt::Display for PackedType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(match self { Self::User => "User", Self::Bot => "Bot", Self::Chat => "Group", Self::Megagroup => "Supergroup", Self::Broadcast => "Channel", Self::Gigagroup => "BroadcastGroup", }) } } impl fmt::Display for PackedChat { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "PackedChat::{}({})", self.ty, self.id) } } impl Chat { pub(crate) fn from_user(user: tl::enums::User) -> Self { Self::User(User::from_raw(user)) } pub(crate) fn from_chat(chat: tl::enums::Chat) -> Self { use tl::enums::Chat as C; match chat { C::Empty(_) | C::Chat(_) | C::Forbidden(_) => Self::Group(Group::from_raw(chat)), C::Channel(ref channel) => { if channel.broadcast { Self::Channel(Channel::from_raw(chat)) } else { Self::Group(Group::from_raw(chat)) } } C::ChannelForbidden(ref channel) => { if channel.broadcast { Self::Channel(Channel::from_raw(chat)) } else { Self::Group(Group::from_raw(chat)) } } } } pub(crate) fn to_peer(&self) -> tl::enums::Peer { match self { Self::User(user) => user.to_peer(), Self::Group(group) => group.to_peer(), Self::Channel(channel) => channel.to_peer(), } } pub(crate) fn to_input_peer(&self) -> tl::enums::InputPeer { match self { Self::User(user) => user.to_input_peer(), Self::Group(group) => group.to_input_peer(), Self::Channel(channel) => channel.to_input_peer(), } } pub(crate) fn to_input_user(&self) -> Option<tl::enums::InputUser> { match self { Self::User(user) => Some(user.to_input()), Self::Group(_) => None, Self::Channel(_) => None, } } pub(crate) fn to_input_channel(&self) -> Option<tl::enums::InputChannel> { match self { Self::User(_) => None, Self::Group(group) => group.to_input_channel(), Self::Channel(channel) => Some(channel.to_input()), } } pub(crate) fn to_chat_id(&self) -> Option<i32> { match self { Self::User(_) => None, Self::Group(group) => group.to_chat_id(), Self::Channel(_) => None, } } pub fn id(&self) -> i32 { match self { Self::User(user) => user.id(), Self::Group(group) => group.id(), Self::Channel(channel) => channel.id(), } } fn access_hash(&self) -> Option<i64> { match self { Self::User(user) => user.access_hash(), Self::Group(group) => group.access_hash(), Self::Channel(channel) => channel.access_hash(), } } pub fn name(&self) -> &str { match self { Self::User(user) => user.first_name(), Self::Group(group) => group.title(), Self::Channel(channel) => channel.title(), } } pub fn pack(&self) -> PackedChat { let ty = match self { Self::User(user) => { if user.is_bot() { PackedType::Bot } else { PackedType::User } } Self::Group(chat) => { if chat.is_megagroup() { PackedType::Megagroup } else { PackedType::Chat } } Self::Channel(channel) => { if channel.0.gigagroup { PackedType::Gigagroup } else { PackedType::Broadcast } } }; PackedChat { ty, id: self.id(), access_hash: self.access_hash(), } } }
mod channel; mod group; mod user; use grammers_tl_types as tl; use std::fmt; pub use channel::Channel; pub use group::Group; pub use user::{Platform, RestrictionReason, User}; #[derive(Clone, Debug)] pub enum Chat { User(User), Group(Group), Channel(Channel), } #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub(crate) enum PackedType { User = 0b0000_0010, Bot = 0b0000_0011, Chat = 0b0000_0100, Megagroup = 0b0010_1000, Broadcast = 0b0011_0000, Gigagroup = 0b0011_1000, } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct PackedChat { pub(crate) ty: PackedType, pub(crate) id: i32, pub(crate) access_hash: Option<i64>, } impl PackedChat { pub fn to_bytes(&self) -> Vec<u8> { let mut res = if let Some(access_hash) = self.access_hash { let mut res = vec![0; 14]; res[6..14].copy_from_slice(&access_hash.to_le_bytes()); res } else { vec![0; 6] }; res[0] = self.ty as u8; res[1] = res.len() as u8; res[2..6].copy_from_slice(&self.id.to_le_bytes()); res } pub fn from_bytes(buf: &[u8]) -> Result<Self, ()> { if buf.len() != 6 && buf.len() != 14 { return Err(()); } if buf[1] as usize != buf.len() { return Err(()); } let ty = match buf[0] { 0b0000_0010 => PackedType::User, 0b0000_0011 => PackedType::Bot, 0b0000_0100 => PackedType::Chat, 0b0010_1000 => PackedType::Megagroup, 0b0011_0000 => PackedType::Broadcast, 0b0011_1000 => PackedType::Gigagroup, _ => return Err(()), }; let id = i32::from_le_bytes([buf[2], buf[3], buf[4], buf[5]]); let access_hash = if buf[1] == 14 { Some(i64::from_le_bytes([ buf[6], buf[7], buf[8], buf[9], buf[10], buf[11], buf[12], buf[13], ])) } else { None }; Ok(Self { ty, id, access_hash, }) } pub fn unpack(&self) -> Chat { match self.ty { PackedType::User | PackedType::Bot => { let mut user = User::from_raw(tl::types::UserEmpty { id: self.id }.into()); user.0.access_hash = self.access_hash; Chat::User(user) } PackedType::Chat => { Chat::Group(Group::from_raw(tl::types::ChatEmpty { id: self.id }.into())) } PackedType::Megagroup => Chat::Group(Group::from_raw( tl::types::ChannelForbidden { id: self.id, broadcast: false, megagroup: true, access_hash: self.access_hash.unwrap_or(0), title: String::new(), until_date: None, } .into(), )), PackedType::Broadcast | PackedType::Gigagroup => Chat::Channel(Channel::from_raw( tl::types::ChannelForbidden { id: self.id, broadcast: false, megagroup: true, access_hash: self.access_hash.unwrap_or(0), title: String::new(), until_date: None, } .into(), )), } } } impl fmt::Display for PackedType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str(match self { Self::User => "User", Self::Bot => "Bot", Self::Chat => "Group", Self::Megagroup => "Supergroup", Self::Broadcast => "Channel", Self::Gigagroup => "BroadcastGroup", }) } } impl fmt::Display for PackedChat { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "PackedChat::{}({})", self.ty, self.id) } } impl Chat { pub(crate) fn from_user(user: tl::enums::User) -> Self { Self::User(User::from_raw(user)) } pub(crate) fn from_chat(chat: tl::enums::Chat) -> Self { use tl::enums::Chat as C; match chat { C::Empty(_) | C::Chat(_) | C::Forbidden(_) => Self::Group(Group::from_raw(chat)), C::Channel(ref channel) => { if channel.broadcast { Self::Channel(Channel::from_raw(chat)) } else { Self::Group(Group::from_raw(chat)) } } C::ChannelForbidden(ref channel) => { if channel.broadcast { Self::Channel(Channel::from_raw(chat)) } else { Self::Group(Group::from_raw(chat)) } } } } pub(crate) fn to_peer(&self) -> tl::enums::Peer { match self { Self::User(user) => user.to_peer(), Self::Group(group) => group.to_peer(), Self::Channel(channel) => channel.to_peer(), } } pub(crate) fn to_input_peer(&self) -> tl::enums::InputPeer { match self { Self::User(user) => user.to_input_peer(), Self::Group(group) => group.to_input_peer(), Self::Channel(channel) => channel.to_input_peer(), } } pub(crate) fn to_input_user(&self) -> Option<tl::enums::InputUser> { match self { Self::User(user) => Some(user.to_input()), Self::Group(_) => None, Self::Channel(_) => None, } } pub(crate) fn to_input_channel(&self) -> Option<tl::enums::InputChannel> { match self { Self::User(_) => None, Self::Group(group) => group.to_input_channel(),
lf::User(user) => user.access_hash(), Self::Group(group) => group.access_hash(), Self::Channel(channel) => channel.access_hash(), } } pub fn name(&self) -> &str { match self { Self::User(user) => user.first_name(), Self::Group(group) => group.title(), Self::Channel(channel) => channel.title(), } } pub fn pack(&self) -> PackedChat { let ty = match self { Self::User(user) => { if user.is_bot() { PackedType::Bot } else { PackedType::User } } Self::Group(chat) => { if chat.is_megagroup() { PackedType::Megagroup } else { PackedType::Chat } } Self::Channel(channel) => { if channel.0.gigagroup { PackedType::Gigagroup } else { PackedType::Broadcast } } }; PackedChat { ty, id: self.id(), access_hash: self.access_hash(), } } }
Self::Channel(channel) => Some(channel.to_input()), } } pub(crate) fn to_chat_id(&self) -> Option<i32> { match self { Self::User(_) => None, Self::Group(group) => group.to_chat_id(), Self::Channel(_) => None, } } pub fn id(&self) -> i32 { match self { Self::User(user) => user.id(), Self::Group(group) => group.id(), Self::Channel(channel) => channel.id(), } } fn access_hash(&self) -> Option<i64> { match self { Se
random
[ { "content": "fn message_channel_id(message: &tl::enums::Message) -> Option<i32> {\n\n match message {\n\n tl::enums::Message::Empty(_) => None,\n\n tl::enums::Message::Message(m) => match &m.peer_id {\n\n tl::enums::Peer::Channel(c) => Some(c.channel_id),\n\n _ => None,\n\n },\n\n tl::enums::Message::Service(m) => match &m.peer_id {\n\n tl::enums::Peer::Channel(c) => Some(c.channel_id),\n\n _ => None,\n\n },\n\n }\n\n}\n\n\n\nimpl PtsInfo {\n\n pub(super) fn from_update(update: &tl::enums::Update) -> Option<Self> {\n\n use tl::enums::Update::*;\n\n match update {\n\n NewMessage(u) => {\n\n assert!(!matches!(\n", "file_path": "lib/grammers-session/src/message_box/adaptor.rs", "rank": 0, "score": 290929.802428894 }, { "content": "/// Validation for parameters required for two-factor authentication\n\npub fn check_p_and_g(g: &i32, p: &[u8]) -> bool {\n\n if !check_p_len(p) {\n\n return false;\n\n }\n\n\n\n check_p_prime_and_subgroup(p, g)\n\n}\n\n\n", "file_path": "lib/grammers-crypto/src/two_factor_auth.rs", "rank": 1, "score": 244014.30967634433 }, { "content": "/// Return the name from the `.tl` definition corresponding to the provided definition identifier.\n\npub fn name_for_id(id: u32) -> &'static str {{\n\n match id {{\n\n \"#\n\n )?;\n\n for def in definitions {\n\n writeln!(file, r#\" 0x{:x} => \"{}\",\"#, def.id, def.full_name())?;\n\n }\n\n\n\n writeln!(\n\n file,\n\n r#\"\n\n _ => \"(unknown)\",\n\n }}\n\n}}\n\n \"#,\n\n )?;\n\n }\n\n\n\n let metadata = metadata::Metadata::new(&definitions);\n\n structs::write_category_mod(file, Category::Types, definitions, &metadata, config)?;\n\n structs::write_category_mod(file, Category::Functions, definitions, &metadata, config)?;\n\n enums::write_enums_mod(file, definitions, &metadata, config)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "lib/grammers-tl-gen/src/lib.rs", "rank": 2, "score": 233091.10347797847 }, { "content": "/// Encrypt the given data, prefixing it with a hash before, using RSA.\n\npub fn encrypt_hashed(data: &[u8], key: &Key, random_bytes: &[u8; 256]) -> Vec<u8> {\n\n // Sha1::digest's len is always 20, we're left with 255 - 20 - x padding.\n\n let to_encrypt = {\n\n // sha1\n\n let mut buffer = Vec::with_capacity(255);\n\n buffer.extend(&Sha1::from(data).digest().bytes());\n\n\n\n // + data\n\n buffer.extend(data);\n\n\n\n // + padding\n\n let padding_len = 255 - 20 - data.len();\n\n let mut padding = vec![0; padding_len];\n\n padding.copy_from_slice(&random_bytes[..padding_len]);\n\n buffer.extend(&padding);\n\n\n\n buffer\n\n };\n\n\n\n let payload = BigUint::from_bytes_be(&to_encrypt);\n", "file_path": "lib/grammers-crypto/src/rsa.rs", "rank": 3, "score": 228631.16945826882 }, { "content": "/// Parses a file full of [Type Language] definitions.\n\n///\n\n/// # Examples\n\n///\n\n/// ```no_run\n\n/// use std::fs::File;\n\n/// use std::io::{self, Read};\n\n/// use grammers_tl_parser::parse_tl_file;\n\n///\n\n/// fn main() -> std::io::Result<()> {\n\n/// let mut file = File::open(\"api.tl\")?;\n\n/// let mut contents = String::new();\n\n/// file.read_to_string(&mut contents)?;\n\n///\n\n/// for definition in parse_tl_file(&contents) {\n\n/// dbg!(definition);\n\n/// }\n\n///\n\n/// Ok(())\n\n/// }\n\n/// ```\n\n///\n\n/// [Type Language]: https://core.telegram.org/mtproto/TL\n\npub fn parse_tl_file(contents: &str) -> impl Iterator<Item = Result<Definition, ParseError>> {\n\n TlIterator::new(contents)\n\n}\n", "file_path": "lib/grammers-tl-parser/src/lib.rs", "rank": 4, "score": 208190.51581646298 }, { "content": "#[cfg(feature = \"markdown\")]\n\npub fn parse_markdown_message(message: &str) -> (String, Vec<tl::enums::MessageEntity>) {\n\n use pulldown_cmark::{CodeBlockKind, Event, Parser, Tag};\n\n\n\n let mut text = String::with_capacity(message.len());\n\n let mut entities = Vec::new();\n\n\n\n let mut offset = 0;\n\n Parser::new(message).for_each(|event| match event {\n\n // text\n\n Event::Text(string) => {\n\n text.push_str(&string);\n\n offset += telegram_string_len(&string);\n\n }\n\n\n\n // `code`\n\n Event::Code(string) => {\n\n text.push_str(&string);\n\n let length = telegram_string_len(&string);\n\n entities.push(tl::types::MessageEntityCode { offset, length }.into());\n\n offset += length;\n", "file_path": "lib/grammers-client/src/parsers.rs", "rank": 5, "score": 203864.37309225695 }, { "content": "#[cfg(feature = \"html\")]\n\npub fn parse_html_message(message: &str) -> (String, Vec<tl::enums::MessageEntity>) {\n\n use html5ever::tendril::StrTendril;\n\n use html5ever::tokenizer::{\n\n BufferQueue, Tag, TagKind, Token, TokenSink, TokenSinkResult, Tokenizer,\n\n };\n\n\n\n // We could also convert the atoms we receive into lowercase strings and\n\n // match against those, but that would defeat the purpose. We do however\n\n // give the atoms we use better names.\n\n use html5ever::{\n\n ATOM_LOCALNAME__61 as TAG_A, ATOM_LOCALNAME__62 as TAG_B,\n\n ATOM_LOCALNAME__62_6C_6F_63_6B_71_75_6F_74_65 as TAG_BLOCKQUOTE,\n\n ATOM_LOCALNAME__63_6C_61_73_73 as ATTR_CLASS, ATOM_LOCALNAME__63_6F_64_65 as TAG_CODE,\n\n ATOM_LOCALNAME__64_65_6C as TAG_DEL, ATOM_LOCALNAME__65_6D as TAG_EM,\n\n ATOM_LOCALNAME__68_72_65_66 as ATTR_HREF, ATOM_LOCALNAME__69 as TAG_I,\n\n ATOM_LOCALNAME__70_72_65 as TAG_PRE, ATOM_LOCALNAME__73 as TAG_S,\n\n ATOM_LOCALNAME__73_74_72_6F_6E_67 as TAG_STRONG, ATOM_LOCALNAME__75 as TAG_U,\n\n };\n\n\n\n struct Sink {\n", "file_path": "lib/grammers-client/src/parsers.rs", "rank": 6, "score": 203864.37309225698 }, { "content": "// SH(data, salt) := H(salt | data | salt)\n\nfn sh(data: impl AsRef<[u8]>, salt: impl AsRef<[u8]>) -> Output<Sha256> {\n\n return h!(&salt, &data, &salt);\n\n}\n\n\n", "file_path": "lib/grammers-crypto/src/two_factor_auth.rs", "rank": 7, "score": 200142.54296393014 }, { "content": "/// Encrypt data using AES-IGE.\n\npub fn encrypt_ige(plaintext: &[u8], key: &[u8; 32], iv: &[u8; 32]) -> Vec<u8> {\n\n let mut padded: Vec<u8>;\n\n let padded_plaintext = if plaintext.len() % 16 == 0 {\n\n plaintext\n\n } else {\n\n let pad_len = (16 - (plaintext.len() % 16)) % 16;\n\n padded = Vec::with_capacity(plaintext.len() + pad_len);\n\n padded.extend(plaintext);\n\n\n\n let mut buffer = vec![0; pad_len];\n\n getrandom(&mut buffer).expect(\"failed to generate random padding for encryption\");\n\n padded.extend(&buffer);\n\n\n\n &padded\n\n };\n\n\n\n aes::ige_encrypt(padded_plaintext, key, iv)\n\n}\n\n\n", "file_path": "lib/grammers-crypto/src/lib.rs", "rank": 8, "score": 196113.46705649444 }, { "content": "/// Decrypt the input ciphertext using the AES-IGE mode.\n\npub fn ige_decrypt(ciphertext: &[u8], key: &[u8; 32], iv: &[u8; 32]) -> Vec<u8> {\n\n assert!(ciphertext.len() % 16 == 0);\n\n let mut plaintext = vec![0; ciphertext.len()];\n\n assert!(plaintext.len() % 16 == 0);\n\n\n\n let key = GenericArray::from_slice(key);\n\n let cipher = aes::Aes256::new(&key);\n\n\n\n let mut iv = *iv;\n\n let (iv1, iv2) = iv.split_at_mut(16);\n\n assert!(iv1.len() == 16);\n\n assert!(iv2.len() == 16);\n\n\n\n for (ciphertext_block, plaintext_block) in ciphertext.chunks(16).zip(plaintext.chunks_mut(16)) {\n\n // block = block XOR iv2\n\n let plaintext_block = GenericArray::from_mut_slice(plaintext_block);\n\n plaintext_block\n\n .iter_mut()\n\n .zip(ciphertext_block.iter().zip(iv2.iter()))\n\n .for_each(|(x, (a, b))| *x = a ^ b);\n", "file_path": "lib/grammers-crypto/src/aes.rs", "rank": 9, "score": 196113.40617642296 }, { "content": "/// Encrypt the input plaintext using the AES-IGE mode.\n\npub fn ige_encrypt(plaintext: &[u8], key: &[u8; 32], iv: &[u8; 32]) -> Vec<u8> {\n\n assert!(plaintext.len() % 16 == 0);\n\n let mut ciphertext = vec![0; plaintext.len()];\n\n assert!(ciphertext.len() % 16 == 0);\n\n\n\n let key = GenericArray::from_slice(key);\n\n let cipher = aes::Aes256::new(&key);\n\n\n\n let mut iv = *iv;\n\n let (iv1, iv2) = iv.split_at_mut(16);\n\n assert!(iv1.len() == 16);\n\n assert!(iv2.len() == 16);\n\n\n\n for (plaintext_block, ciphertext_block) in plaintext.chunks(16).zip(ciphertext.chunks_mut(16)) {\n\n // block = block XOR iv1\n\n let ciphertext_block = GenericArray::from_mut_slice(ciphertext_block);\n\n ciphertext_block\n\n .iter_mut()\n\n .zip(plaintext_block.iter().zip(iv1.iter()))\n\n .for_each(|(x, (a, b))| *x = a ^ b);\n", "file_path": "lib/grammers-crypto/src/aes.rs", "rank": 10, "score": 196113.40617642296 }, { "content": "/// Decrypt data using AES-IGE. Panics if the plaintext is not padded\n\n/// to 16 bytes.\n\npub fn decrypt_ige(padded_ciphertext: &[u8], key: &[u8; 32], iv: &[u8; 32]) -> Vec<u8> {\n\n aes::ige_decrypt(padded_ciphertext, key, iv)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn get_test_auth_key() -> AuthKey {\n\n let mut buffer = [0u8; 256];\n\n buffer\n\n .iter_mut()\n\n .enumerate()\n\n .for_each(|(i, x)| *x = i as u8);\n\n\n\n AuthKey::from_bytes(buffer)\n\n }\n\n\n\n fn get_test_msg_key() -> [u8; 16] {\n\n let mut buffer = [0u8; 16];\n", "file_path": "lib/grammers-crypto/src/lib.rs", "rank": 11, "score": 194600.6894953989 }, { "content": "// PH2(password, salt1, salt2)\n\n// := SH(pbkdf2(sha512, PH1(password, salt1, salt2), salt1, 100000), salt2)\n\nfn ph2(password: impl AsRef<[u8]>, salt1: &[u8], salt2: &[u8]) -> Output<Sha256> {\n\n let hash1 = ph1(password, salt1, salt2);\n\n\n\n // 512-bit derived key\n\n let mut dk = [0u8; 64];\n\n pbkdf2::pbkdf2::<Hmac<Sha512>>(&hash1, salt1, 100000, &mut dk);\n\n\n\n sh(&dk, salt2)\n\n}\n\n\n", "file_path": "lib/grammers-crypto/src/two_factor_auth.rs", "rank": 12, "score": 190523.818045355 }, { "content": "// PH1(password, salt1, salt2) := SH(SH(password, salt1), salt2)\n\nfn ph1(password: impl AsRef<[u8]>, salt1: &[u8], salt2: &[u8]) -> Output<Sha256> {\n\n sh(&sh(password, salt1), salt2)\n\n}\n\n\n", "file_path": "lib/grammers-crypto/src/two_factor_auth.rs", "rank": 13, "score": 190523.818045355 }, { "content": "/// Defines the `impl From` corresponding to the definition:\n\n///\n\n/// ```ignore\n\n/// impl From<Name> for Enum {\n\n/// }\n\n/// ```\n\nfn write_impl_from<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n ty: &Type,\n\n metadata: &Metadata,\n\n) -> io::Result<()> {\n\n for def in metadata.defs_with_type(ty) {\n\n writeln!(\n\n file,\n\n \"{}impl From<{}> for {} {{\",\n\n indent,\n\n rustifier::definitions::qual_name(def),\n\n rustifier::types::type_name(ty),\n\n )?;\n\n writeln!(\n\n file,\n\n \"{} fn from({}x: {}) -> Self {{\",\n\n indent,\n\n if def.params.is_empty() { \"_\" } else { \"\" },\n\n rustifier::definitions::qual_name(def),\n", "file_path": "lib/grammers-tl-gen/src/enums.rs", "rank": 14, "score": 189188.05733372338 }, { "content": "/// Defines the `impl From` or `impl TryFrom` corresponding to the definition:\n\n///\n\n/// ```ignore\n\n/// impl From<Enum> for Name {\n\n/// }\n\n///\n\n/// impl TryFrom<Enum> for Name {\n\n/// type Error = ();\n\n/// }\n\n/// ```\n\nfn write_impl_from<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n def: &Definition,\n\n metadata: &Metadata,\n\n) -> io::Result<()> {\n\n let infallible = metadata.defs_with_type(&def.ty).len() == 1;\n\n let type_name = rustifier::definitions::type_name(&def);\n\n\n\n writeln!(\n\n file,\n\n \"{}impl {}From<{}> for {} {{\",\n\n indent,\n\n if infallible { \"\" } else { \"Try\" },\n\n rustifier::types::qual_name(&def.ty),\n\n type_name,\n\n )?;\n\n if !infallible {\n\n writeln!(file, \"{} type Error = ();\", indent)?;\n\n }\n", "file_path": "lib/grammers-tl-gen/src/structs.rs", "rank": 15, "score": 189137.73944653253 }, { "content": "#[must_use]\n\npub fn encrypt_data_v2(plaintext: &[u8], auth_key: &AuthKey) -> Vec<u8> {\n\n let random_padding = {\n\n let mut buffer = [0; 32];\n\n getrandom(&mut buffer).expect(\"failed to generate a secure padding\");\n\n buffer\n\n };\n\n\n\n do_encrypt_data_v2(plaintext, auth_key, &random_padding)\n\n}\n\n\n", "file_path": "lib/grammers-crypto/src/lib.rs", "rank": 16, "score": 187555.19740616917 }, { "content": "fn check_p_prime_and_subgroup(p: &[u8], g: &i32) -> bool {\n\n let p = &BigUint::from_bytes_be(p);\n\n\n\n if !safe_prime::check(p) {\n\n return false;\n\n }\n\n\n\n match g {\n\n 2 => p % 8u8 == BigUint::from(7u8),\n\n 3 => p % 3u8 == BigUint::from(2u8),\n\n 4 => true,\n\n 5 => {\n\n let mod_value = p % 5u8;\n\n mod_value == BigUint::from(1u8) || mod_value == BigUint::from(4u8)\n\n }\n\n 6 => {\n\n let mod_value = p % 24u8;\n\n mod_value == BigUint::from(19u8) || mod_value == BigUint::from(23u8)\n\n }\n\n 7 => {\n\n let mod_value = p % 7u8;\n\n mod_value == BigUint::from(3u8)\n\n || mod_value == BigUint::from(5u8)\n\n || mod_value == BigUint::from(6u8)\n\n }\n\n _ => panic!(\"Unexpected g parameter\"),\n\n }\n\n}\n\n\n", "file_path": "lib/grammers-crypto/src/two_factor_auth.rs", "rank": 17, "score": 183689.040544053 }, { "content": "/// The third step of the process to generate an authorization key.\n\npub fn step3(data: Step2, response: &[u8]) -> Result<(Vec<u8>, Step3), Error> {\n\n let random_bytes = {\n\n let mut buffer = [0; 256 + 16];\n\n getrandom(&mut buffer).expect(\"failed to generate secure data for auth key\");\n\n buffer\n\n };\n\n let now = SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"system time is before epoch\")\n\n .as_secs() as i32;\n\n\n\n do_step3(data, response, &random_bytes, now)\n\n}\n\n\n", "file_path": "lib/grammers-mtproto/src/authentication.rs", "rank": 18, "score": 182348.2166536655 }, { "content": "/// The second step of the process to generate an authorization key.\n\npub fn step2(data: Step1, response: &[u8]) -> Result<(Vec<u8>, Step2), Error> {\n\n let random_bytes = {\n\n let mut buffer = [0; 32 + 256];\n\n getrandom(&mut buffer).expect(\"failed to generate secure data for auth key\");\n\n buffer\n\n };\n\n\n\n do_step2(data, response, &random_bytes)\n\n}\n\n\n", "file_path": "lib/grammers-mtproto/src/authentication.rs", "rank": 19, "score": 182348.2166536655 }, { "content": "/// The first step of the process to generate an authorization key.\n\npub fn step1() -> Result<(Vec<u8>, Step1), Error> {\n\n let random_bytes = {\n\n let mut buffer = [0; 16];\n\n getrandom(&mut buffer).expect(\"failed to generate secure data for auth key\");\n\n buffer\n\n };\n\n\n\n do_step1(&random_bytes)\n\n}\n\n\n", "file_path": "lib/grammers-mtproto/src/authentication.rs", "rank": 20, "score": 180697.74432966494 }, { "content": "/// This method is the inverse of `encrypt_data_v2`.\n\npub fn decrypt_data_v2(ciphertext: &[u8], auth_key: &AuthKey) -> Result<Vec<u8>, Error> {\n\n // Decryption is done from the server\n\n let side = Side::Server;\n\n let x = side.x();\n\n\n\n if ciphertext.len() < 24 || (ciphertext.len() - 24) % 16 != 0 {\n\n return Err(Error::InvalidBuffer);\n\n }\n\n\n\n // TODO Check salt, session_id and sequence_number\n\n let key_id = &ciphertext[..8];\n\n if auth_key.key_id != *key_id {\n\n return Err(Error::AuthKeyMismatch);\n\n }\n\n\n\n let msg_key = {\n\n let mut buffer = [0; 16];\n\n buffer.copy_from_slice(&ciphertext[8..8 + 16]);\n\n buffer\n\n };\n", "file_path": "lib/grammers-crypto/src/lib.rs", "rank": 21, "score": 179588.81215316098 }, { "content": "fn message_peer(message: &tl::enums::Message) -> Option<tl::enums::Peer> {\n\n match message {\n\n tl::enums::Message::Empty(_) => None,\n\n tl::enums::Message::Message(m) => Some(m.peer_id.clone()),\n\n tl::enums::Message::Service(m) => Some(m.peer_id.clone()),\n\n }\n\n}\n\n\n", "file_path": "lib/grammers-session/src/message_box/adaptor.rs", "rank": 22, "score": 177673.7223109838 }, { "content": "type AdminFutGen<F> = fn(AdminRightsBuilderInner) -> F;\n\n\n\npub(crate) struct AdminRightsBuilderInner {\n\n client: Client,\n\n chat: Chat,\n\n peer: tl::enums::InputPeer,\n\n user: tl::enums::InputUser, // TODO redundant with `peer` (but less annoying to use)\n\n rights: tl::types::ChatAdminRights,\n\n rank: String,\n\n}\n\n\n\nimpl AdminRightsBuilderInner {\n\n // Perform the call.\n\n pub(crate) async fn invoke(self) -> Result<(), InvocationError> {\n\n if let Some(chan) = self.chat.to_input_channel() {\n\n self.client\n\n .invoke(&tl::functions::channels::EditAdmin {\n\n channel: chan,\n\n user_id: self.user.clone(),\n\n admin_rights: tl::enums::ChatAdminRights::Rights(self.rights.clone()),\n", "file_path": "lib/grammers-client/src/types/chats.rs", "rank": 23, "score": 176819.76492910556 }, { "content": "type BannedFutGen<F> = fn(BannedRightsBuilderInner) -> F;\n\n\n\npub(crate) struct BannedRightsBuilderInner {\n\n client: Client,\n\n chat: Chat,\n\n peer: tl::enums::InputPeer,\n\n user: tl::enums::InputUser,\n\n rights: tl::types::ChatBannedRights,\n\n}\n\n\n\nimpl BannedRightsBuilderInner {\n\n // Perform the call.\n\n pub(crate) async fn invoke(self) -> Result<(), InvocationError> {\n\n if let Some(chan) = self.chat.to_input_channel() {\n\n self.client\n\n .invoke(&tl::functions::channels::EditBanned {\n\n channel: chan,\n\n participant: self.peer.clone(),\n\n banned_rights: tl::enums::ChatBannedRights::Rights(self.rights.clone()),\n\n })\n", "file_path": "lib/grammers-client/src/types/chats.rs", "rank": 24, "score": 176819.76492910556 }, { "content": "pub fn generate_rust_code(\n\n file: &mut impl Write,\n\n definitions: &[Definition],\n\n layer: i32,\n\n config: &Config,\n\n) -> io::Result<()> {\n\n writeln!(\n\n file,\n\n r#\"\n\n// Copyright 2020 - developers of the `grammers` project.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n/// The schema layer from which the definitions were generated.\n\npub const LAYER: i32 = {};\n\n\"#,\n\n layer\n\n )?;\n\n\n\n if config.gen_name_for_id {\n\n writeln!(\n\n file,\n\n r#\"\n", "file_path": "lib/grammers-tl-gen/src/lib.rs", "rank": 25, "score": 174750.00611522974 }, { "content": "/// Helper function to avoid the boilerplate of checking for invalid\n\n/// new nonce hash.\n\nfn check_new_nonce_hash(got: &[u8; 16], expected: &[u8; 16]) -> Result<(), Error> {\n\n if got == expected {\n\n Ok(())\n\n } else {\n\n Err(Error::InvalidNewNonceHash {\n\n got: *got,\n\n expected: *expected,\n\n })\n\n }\n\n}\n\n\n", "file_path": "lib/grammers-mtproto/src/authentication.rs", "rank": 26, "score": 170193.6846166205 }, { "content": "/// The last step of the process to generate an authorization key.\n\npub fn create_key(data: Step3, response: &[u8]) -> Result<Finished, Error> {\n\n let Step3 {\n\n nonce,\n\n server_nonce,\n\n new_nonce,\n\n gab,\n\n time_offset,\n\n } = data;\n\n let dh_gen = <tl::functions::SetClientDhParams as RemoteCall>::Return::from_bytes(response)?;\n\n\n\n struct DhGenData {\n\n nonce: [u8; 16],\n\n server_nonce: [u8; 16],\n\n new_nonce_hash: [u8; 16],\n\n nonce_number: u8,\n\n }\n\n\n\n let dh_gen = match dh_gen {\n\n tl::enums::SetClientDhParamsAnswer::DhGenOk(x) => DhGenData {\n\n nonce: x.nonce,\n", "file_path": "lib/grammers-mtproto/src/authentication.rs", "rank": 27, "score": 169628.01270494887 }, { "content": "fn ignore_type(ty: &Type) -> bool {\n\n SPECIAL_CASED_TYPES.iter().any(|&x| x == ty.name)\n\n}\n\n\n", "file_path": "lib/grammers-tl-gen/src/lib.rs", "rank": 28, "score": 169211.33560056056 }, { "content": "/// Find the `// LAYER #` comment, and return its value if it's valid.\n\nfn find_layer(file: &str) -> io::Result<Option<i32>> {\n\n const LAYER_MARK: &str = \"LAYER\";\n\n\n\n Ok(BufReader::new(File::open(file)?).lines().find_map(|line| {\n\n let line = line.unwrap();\n\n if line.trim().starts_with(\"//\") {\n\n if let Some(pos) = line.find(LAYER_MARK) {\n\n if let Ok(layer) = line[pos + LAYER_MARK.len()..].trim().parse() {\n\n return Some(layer);\n\n }\n\n }\n\n }\n\n\n\n None\n\n }))\n\n}\n\n\n", "file_path": "lib/grammers-tl-types/build.rs", "rank": 29, "score": 158284.5569001875 }, { "content": "/// Writes an enumeration listing all types such as the following rust code:\n\n///\n\n/// ```ignore\n\n/// pub enum Name {\n\n/// Variant(crate::types::Name),\n\n/// }\n\n/// ```\n\nfn write_enum<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n ty: &Type,\n\n metadata: &Metadata,\n\n config: &Config,\n\n) -> io::Result<()> {\n\n if config.impl_debug {\n\n writeln!(file, \"{}#[derive(Debug)]\", indent)?;\n\n }\n\n\n\n writeln!(file, \"{}#[derive(Clone, PartialEq)]\", indent)?;\n\n writeln!(\n\n file,\n\n \"{}pub enum {} {{\",\n\n indent,\n\n rustifier::types::type_name(ty)\n\n )?;\n\n for d in metadata.defs_with_type(ty) {\n\n write!(\n", "file_path": "lib/grammers-tl-gen/src/enums.rs", "rank": 30, "score": 156741.04255841492 }, { "content": "/// Defines the `struct` corresponding to the definition:\n\n///\n\n/// ```ignore\n\n/// pub struct Name {\n\n/// pub field: Type,\n\n/// }\n\n/// ```\n\nfn write_struct<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n def: &Definition,\n\n _metadata: &Metadata,\n\n config: &Config,\n\n) -> io::Result<()> {\n\n // Define struct\n\n if config.impl_debug {\n\n writeln!(file, \"{}#[derive(Debug)]\", indent)?;\n\n }\n\n\n\n writeln!(file, \"{}#[derive(Clone, PartialEq)]\", indent)?;\n\n write!(\n\n file,\n\n \"{}pub struct {}{} {{\",\n\n indent,\n\n rustifier::definitions::type_name(def),\n\n get_generic_param_list(def, true),\n\n )?;\n", "file_path": "lib/grammers-tl-gen/src/structs.rs", "rank": 31, "score": 156679.14419506284 }, { "content": "/// An inline button, able to hold some arbitrary binary payload.\n\n///\n\n/// Although any combination of bytes can be used (including null, and not just UTF-8), [there is\n\n/// a limit](https://core.telegram.org/bots/api#inlinekeyboardbutton) to how long the payload data\n\n/// can be (see the description for the `callback_data` field for an up-to-date value). If you\n\n/// need to store more data than that, consider storing the real data in some form of database,\n\n/// and a reference to that data's row in the button's payload.\n\n///\n\n/// Both the text and bytes data must be non-empty.\n\npub fn inline<T: Into<String>, B: Into<Vec<u8>>>(text: T, bytes: B) -> Inline {\n\n Inline(\n\n tl::types::KeyboardButtonCallback {\n\n text: text.into(),\n\n data: bytes.into(),\n\n requires_password: false,\n\n }\n\n .into(),\n\n )\n\n}\n\n\n", "file_path": "lib/grammers-client/src/types/button.rs", "rank": 32, "score": 154636.64715776342 }, { "content": "/// Defines the `impl Deserializable` corresponding to the type definitions:\n\n///\n\n/// ```ignore\n\n/// impl crate::Deserializable for Name {\n\n/// fn deserialize(buf: crate::deserialize::Buffer) -> crate::deserialize::Result<Self> {\n\n/// use crate::Identifiable;\n\n/// Ok(match u32::deserialize(buf)? {\n\n/// crate::types::Name::CONSTRUCTOR_ID => Self::Variant(crate::types::Name::deserialize(buf)?),\n\n/// _ => return Err(std::io::Error::new(std::io::ErrorKind::InvalidData, ...)),\n\n/// })\n\n/// }\n\n/// }\n\n/// ```\n\nfn write_deserializable<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n ty: &Type,\n\n metadata: &Metadata,\n\n) -> io::Result<()> {\n\n writeln!(\n\n file,\n\n \"{}impl crate::Deserializable for {} {{\",\n\n indent,\n\n rustifier::types::type_name(ty)\n\n )?;\n\n writeln!(\n\n file,\n\n \"{} fn deserialize(buf: crate::deserialize::Buffer) -> crate::deserialize::Result<Self> {{\",\n\n indent\n\n )?;\n\n writeln!(file, \"{} use crate::Identifiable;\", indent)?;\n\n writeln!(file, \"{} let id = u32::deserialize(buf)?;\", indent)?;\n\n writeln!(file, \"{} Ok(match id {{\", indent)?;\n", "file_path": "lib/grammers-tl-gen/src/enums.rs", "rank": 33, "score": 146466.26520923918 }, { "content": "/// Defines the `impl Serializable` corresponding to the type definitions:\n\n///\n\n/// ```ignore\n\n/// impl crate::Serializable for Name {\n\n/// fn serialize(&self, buf: crate::serialize::Buffer) {\n\n/// use crate::Identifiable;\n\n/// match self {\n\n/// Self::Variant(x) => {\n\n/// crate::types::Name::CONSTRUCTOR_ID.serialize(buf);\n\n/// x.serialize(buf)\n\n/// },\n\n/// }\n\n/// }\n\n/// }\n\n/// ```\n\nfn write_serializable<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n ty: &Type,\n\n metadata: &Metadata,\n\n) -> io::Result<()> {\n\n writeln!(\n\n file,\n\n \"{}impl crate::Serializable for {} {{\",\n\n indent,\n\n rustifier::types::type_name(ty)\n\n )?;\n\n writeln!(\n\n file,\n\n \"{} fn serialize(&self, buf: crate::serialize::Buffer) {{\",\n\n indent\n\n )?;\n\n\n\n writeln!(file, \"{} use crate::Identifiable;\", indent)?;\n\n writeln!(file, \"{} match self {{\", indent)?;\n", "file_path": "lib/grammers-tl-gen/src/enums.rs", "rank": 34, "score": 146460.98696170174 }, { "content": "/// Writes an entire definition as Rust code (`enum` and `impl`).\n\nfn write_definition<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n ty: &Type,\n\n metadata: &Metadata,\n\n config: &Config,\n\n) -> io::Result<()> {\n\n write_enum(file, indent, ty, metadata, config)?;\n\n write_serializable(file, indent, ty, metadata)?;\n\n write_deserializable(file, indent, ty, metadata)?;\n\n if config.impl_from_type {\n\n write_impl_from(file, indent, ty, metadata)?;\n\n }\n\n Ok(())\n\n}\n\n\n\n/// Write the entire module dedicated to enums.\n\npub(crate) fn write_enums_mod<W: Write>(\n\n mut file: &mut W,\n\n definitions: &[Definition],\n", "file_path": "lib/grammers-tl-gen/src/enums.rs", "rank": 35, "score": 146443.3151517418 }, { "content": "/// Defines the `impl Deserializable` corresponding to the definition:\n\n///\n\n/// ```ignore\n\n/// impl crate::Deserializable for Name {\n\n/// fn deserialize(buf: crate::deserialize::Buffer) -> crate::deserialize::Result<Self> {\n\n/// let field = FieldType::deserialize(buf)?;\n\n/// Ok(Name { field })\n\n/// }\n\n/// }\n\n/// ```\n\nfn write_deserializable<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n def: &Definition,\n\n _metadata: &Metadata,\n\n) -> io::Result<()> {\n\n writeln!(\n\n file,\n\n \"{}impl{} crate::Deserializable for {}{} {{\",\n\n indent,\n\n get_generic_param_list(def, true),\n\n rustifier::definitions::type_name(def),\n\n get_generic_param_list(def, false),\n\n )?;\n\n writeln!(\n\n file,\n\n \"{} fn deserialize({}buf: crate::deserialize::Buffer) -> crate::deserialize::Result<Self> {{\",\n\n indent,\n\n if def.params.is_empty() { \"_\" } else { \"\" }\n\n )?;\n", "file_path": "lib/grammers-tl-gen/src/structs.rs", "rank": 36, "score": 146400.56274862555 }, { "content": "/// Defines the `impl Serializable` corresponding to the definition:\n\n///\n\n/// ```ignore\n\n/// impl crate::Serializable for Name {\n\n/// fn serialize(&self, buf: crate::serialize::Buffer) {\n\n/// self.field.serialize(buf);\n\n/// }\n\n/// }\n\n/// ```\n\nfn write_serializable<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n def: &Definition,\n\n _metadata: &Metadata,\n\n) -> io::Result<()> {\n\n writeln!(\n\n file,\n\n \"{}impl{} crate::Serializable for {}{} {{\",\n\n indent,\n\n get_generic_param_list(def, true),\n\n rustifier::definitions::type_name(def),\n\n get_generic_param_list(def, false),\n\n )?;\n\n writeln!(\n\n file,\n\n \"{} fn serialize(&self, {}buf: crate::serialize::Buffer) {{\",\n\n indent,\n\n if def.category == Category::Types && def.params.is_empty() {\n\n \"_\"\n", "file_path": "lib/grammers-tl-gen/src/structs.rs", "rank": 37, "score": 146397.4882037579 }, { "content": "/// Defines the `impl Identifiable` corresponding to the definition:\n\n///\n\n/// ```ignore\n\n/// impl crate::Identifiable for Name {\n\n/// fn constructor_id() -> u32 { 123 }\n\n/// }\n\n/// ```\n\nfn write_identifiable<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n def: &Definition,\n\n _metadata: &Metadata,\n\n) -> io::Result<()> {\n\n writeln!(\n\n file,\n\n \"{}impl{} crate::Identifiable for {}{} {{\",\n\n indent,\n\n get_generic_param_list(def, true),\n\n rustifier::definitions::type_name(def),\n\n get_generic_param_list(def, false),\n\n )?;\n\n writeln!(\n\n file,\n\n \"{} const CONSTRUCTOR_ID: u32 = {};\",\n\n indent, def.id\n\n )?;\n\n writeln!(file, \"{}}}\", indent)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/grammers-tl-gen/src/structs.rs", "rank": 38, "score": 146397.0282604966 }, { "content": "/// Defines the `impl RemoteCall` corresponding to the definition:\n\n///\n\n/// ```ignore\n\n/// impl crate::RemoteCall for Name {\n\n/// type Return = Name;\n\n/// }\n\n/// ```\n\nfn write_rpc<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n def: &Definition,\n\n _metadata: &Metadata,\n\n) -> io::Result<()> {\n\n writeln!(\n\n file,\n\n \"{}impl{} crate::RemoteCall for {}{} {{\",\n\n indent,\n\n get_generic_param_list(def, true),\n\n rustifier::definitions::type_name(def),\n\n get_generic_param_list(def, false),\n\n )?;\n\n writeln!(\n\n file,\n\n \"{} type Return = {}{};\",\n\n indent,\n\n rustifier::types::qual_name(&def.ty),\n\n if def.ty.generic_ref { \"::Return\" } else { \"\" },\n\n )?;\n\n writeln!(file, \"{}}}\", indent)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/grammers-tl-gen/src/structs.rs", "rank": 39, "score": 146392.31113254948 }, { "content": "/// Writes an entire definition as Rust code (`struct` and `impl`).\n\nfn write_definition<W: Write>(\n\n file: &mut W,\n\n indent: &str,\n\n def: &Definition,\n\n metadata: &Metadata,\n\n config: &Config,\n\n) -> io::Result<()> {\n\n write_struct(file, indent, def, metadata, config)?;\n\n write_identifiable(file, indent, def, metadata)?;\n\n write_serializable(file, indent, def, metadata)?;\n\n if def.category == Category::Types || config.deserializable_functions {\n\n write_deserializable(file, indent, def, metadata)?;\n\n }\n\n if def.category == Category::Functions {\n\n write_rpc(file, indent, def, metadata)?;\n\n }\n\n if def.category == Category::Types && config.impl_from_enum {\n\n write_impl_from(file, indent, def, metadata)?;\n\n }\n\n Ok(())\n", "file_path": "lib/grammers-tl-gen/src/structs.rs", "rank": 40, "score": 146391.98914565655 }, { "content": "fn pad_to_256(data: &[u8]) -> Vec<u8> {\n\n let mut new_vec = vec![0; 256 - data.len()];\n\n new_vec.extend(data);\n\n new_vec\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn check_calculations_1() {\n\n let salt1 = vec![1];\n\n let salt2 = vec![2];\n\n let g = 3;\n\n let p = pad_to_256(&vec![47]);\n\n let g_b = vec![5];\n\n let a = vec![6];\n\n let password = vec![7];\n\n\n", "file_path": "lib/grammers-crypto/src/two_factor_auth.rs", "rank": 41, "score": 141562.41636392137 }, { "content": "#[test]\n\nfn generic_functions_use_generic_parameters() -> io::Result<()> {\n\n let definitions = get_definitions(\n\n \"\n\n ---functions---\n\n invokeWithLayer#da9b0d0d {X:Type} layer:int query:!X = X;\n\n \",\n\n );\n\n let mut file = Vec::new();\n\n generate_rust_code(\n\n &mut file,\n\n &definitions,\n\n LAYER,\n\n &Config {\n\n gen_name_for_id: false,\n\n deserializable_functions: true,\n\n impl_debug: true,\n\n impl_from_enum: true,\n\n impl_from_type: true,\n\n },\n\n )?;\n", "file_path": "lib/grammers-tl-gen/tests/lib.rs", "rank": 42, "score": 140360.39921843878 }, { "content": "fn gcd(mut a: u128, mut b: u128) -> u128 {\n\n while b != 0 {\n\n let (na, nb) = (b, a % b);\n\n a = na;\n\n b = nb;\n\n }\n\n a\n\n}\n\n\n", "file_path": "lib/grammers-crypto/src/factorize.rs", "rank": 43, "score": 138610.51046924663 }, { "content": "// Inner body of `encrypt_data_v2`, separated for testing purposes.\n\nfn do_encrypt_data_v2(plaintext: &[u8], auth_key: &AuthKey, random_padding: &[u8; 32]) -> Vec<u8> {\n\n let padded_plaintext = {\n\n // \"Note that MTProto 2.0 requires from 12 to 1024 bytes of padding\"\n\n // \"[...] the resulting message length be divisible by 16 bytes\"\n\n let padding_len = determine_padding_v2_length(plaintext.len());\n\n let mut buffer = Vec::with_capacity(plaintext.len() + padding_len);\n\n buffer.extend(plaintext);\n\n buffer.extend(&random_padding[..padding_len]);\n\n buffer\n\n };\n\n\n\n // Encryption is done by the client\n\n let side = Side::Client;\n\n let x = side.x();\n\n\n\n // msg_key_large = SHA256 (substr (auth_key, 88+x, 32) + plaintext + random_padding);\n\n let msg_key_large = {\n\n let mut hasher = Sha256::new();\n\n hasher.update(&auth_key.data[88 + x..88 + x + 32]);\n\n hasher.update(&padded_plaintext);\n", "file_path": "lib/grammers-crypto/src/lib.rs", "rank": 44, "score": 134950.37958998766 }, { "content": "fn modpow(mut n: u128, mut e: u128, m: u128) -> u128 {\n\n if m == 1 {\n\n return 0;\n\n }\n\n\n\n let mut result = 1;\n\n n %= m;\n\n while e > 0 {\n\n if e % 2 == 1 {\n\n result = (result * n) % m;\n\n }\n\n e >>= 1;\n\n n = (n * n) % m;\n\n }\n\n result\n\n}\n\n\n\n/// Factorize the given number into its two prime factors.\n\n///\n\n/// The algorithm here is a faster variant of [Pollard's rho algorithm],\n\n/// published by [Richard Brent], based on\n\n/// https://comeoncodeon.wordpress.com/2010/09/18/pollard-rho-brent-integer-factorization/.\n\n///\n\n/// Pollard's rho algorithm: https://en.wikipedia.org/wiki/Pollard%27s_rho_algorithm\n\n/// Richard Brent: https://maths-people.anu.edu.au/~brent/pd/rpb051i.pdf\n", "file_path": "lib/grammers-crypto/src/factorize.rs", "rank": 45, "score": 134221.33787208467 }, { "content": "/// Prepare the password for sending to telegram for verification.\n\n/// The method returns M1 and g_a parameters that should be sent to the telegram\n\n/// (without a raw password)\n\n///\n\n/// The algorithm is described here: https://core.telegram.org/api/srp\n\npub fn calculate_2fa(\n\n salt1: &[u8],\n\n salt2: &[u8],\n\n g: &i32,\n\n p: &[u8],\n\n g_b: Vec<u8>,\n\n a: Vec<u8>,\n\n password: impl AsRef<[u8]>,\n\n) -> (Vec<u8>, Vec<u8>) {\n\n // Prepare our parameters\n\n let big_p = BigUint::from_bytes_be(&p);\n\n\n\n let g_b = pad_to_256(&g_b);\n\n let a = pad_to_256(&a);\n\n\n\n let g_for_hash = vec![*g as u8];\n\n let g_for_hash = pad_to_256(&g_for_hash);\n\n\n\n let big_g_b = BigUint::from_bytes_be(&g_b);\n\n\n", "file_path": "lib/grammers-crypto/src/two_factor_auth.rs", "rank": 46, "score": 133395.84737009884 }, { "content": "/// Generate the AES key and initialization vector from the server nonce\n\n/// and the new client nonce. This is done after the DH exchange.\n\npub fn generate_key_data_from_nonce(\n\n server_nonce: &[u8; 16],\n\n new_nonce: &[u8; 32],\n\n) -> ([u8; 32], [u8; 32]) {\n\n let mut hasher = Sha1::new();\n\n\n\n // hash1 = sha1(new_nonce + server_nonce).digest()\n\n let hash1: [u8; 20] = {\n\n hasher.reset();\n\n hasher.update(new_nonce);\n\n hasher.update(server_nonce);\n\n hasher.digest().bytes()\n\n };\n\n // hash2 = sha1(server_nonce + new_nonce).digest()\n\n let hash2: [u8; 20] = {\n\n hasher.reset();\n\n hasher.update(server_nonce);\n\n hasher.update(new_nonce);\n\n hasher.digest().bytes()\n\n };\n", "file_path": "lib/grammers-crypto/src/lib.rs", "rank": 47, "score": 133391.1956360567 }, { "content": "/// Calculate the key based on Telegram [guidelines for MTProto 2],\n\n/// returning the pair `(key, iv)` for use in AES-IGE mode.\n\n///\n\n/// [guidelines for MTProto 2]: https://core.telegram.org/mtproto/description#defining-aes-key-and-initialization-vector\n\nfn calc_key(auth_key: &AuthKey, msg_key: &[u8; 16], side: Side) -> ([u8; 32], [u8; 32]) {\n\n let x = side.x();\n\n\n\n // sha256_a = SHA256 (msg_key + substr (auth_key, x, 36));\n\n let sha256_a = {\n\n let mut hasher = Sha256::new();\n\n hasher.update(msg_key);\n\n hasher.update(&auth_key.data[x..x + 36]);\n\n hasher.finalize()\n\n };\n\n\n\n // sha256_b = SHA256 (substr (auth_key, 40+x, 36) + msg_key);\n\n let sha256_b = {\n\n let mut hasher = Sha256::new();\n\n hasher.update(&auth_key.data[40 + x..40 + x + 36]);\n\n hasher.update(msg_key);\n\n hasher.finalize()\n\n };\n\n\n\n // aes_key = substr (sha256_a, 0, 8) + substr (sha256_b, 8, 16) + substr (sha256_a, 24, 8);\n", "file_path": "lib/grammers-crypto/src/lib.rs", "rank": 48, "score": 133369.61177485122 }, { "content": "/// Get the list of generic parameters:\n\n///\n\n/// ```ignore\n\n/// <X, Y>\n\n/// ```\n\nfn get_generic_param_list(def: &Definition, declaring: bool) -> String {\n\n let mut result = String::new();\n\n for param in def.params.iter() {\n\n match param.ty {\n\n ParameterType::Flags => {}\n\n ParameterType::Normal { ref ty, .. } => {\n\n if ty.generic_ref {\n\n if result.is_empty() {\n\n result.push('<');\n\n } else {\n\n result.push_str(\", \");\n\n }\n\n result.push_str(&ty.name);\n\n if declaring {\n\n result.push_str(\": crate::RemoteCall\");\n\n }\n\n }\n\n }\n\n }\n\n }\n\n if !result.is_empty() {\n\n result.push('>');\n\n }\n\n result\n\n}\n\n\n", "file_path": "lib/grammers-tl-gen/src/structs.rs", "rank": 49, "score": 133235.06487438906 }, { "content": "/// Helper function to avoid the boilerplate of checking for invalid nonce.\n\nfn check_nonce(got: &[u8; 16], expected: &[u8; 16]) -> Result<(), Error> {\n\n if got == expected {\n\n Ok(())\n\n } else {\n\n Err(Error::InvalidNonce {\n\n got: *got,\n\n expected: *expected,\n\n })\n\n }\n\n}\n\n\n", "file_path": "lib/grammers-mtproto/src/authentication.rs", "rank": 50, "score": 133119.1034819603 }, { "content": "/// Generate the inline keyboard reply markup with a few more numbers from the sequence.\n\nfn fib_markup(mut a: u128, mut b: u128) -> reply_markup::Inline {\n\n let mut rows = Vec::with_capacity(NUMBERS_PER_PAGE + 1);\n\n for _ in 0..NUMBERS_PER_PAGE {\n\n let text = a.to_string();\n\n rows.push(vec![button::inline(&text, text.as_bytes())]);\n\n\n\n let bb = b;\n\n b = a + b;\n\n a = bb;\n\n }\n\n\n\n let next = format!(\"{},{}\", a, b);\n\n if next.len() > MAX_PAYLOAD_DATA_LEN {\n\n rows.push(vec![button::inline(\"I'm satisfied!!\", b\"done\".to_vec())]);\n\n } else {\n\n rows.push(vec![\n\n button::inline(\"Restart!\", b\"0,1\".to_vec()),\n\n button::inline(\"More!\", format!(\"{},{}\", a, b).into_bytes()),\n\n ]);\n\n }\n", "file_path": "lib/grammers-client/examples/inline-pagination.rs", "rank": 51, "score": 132231.90320367945 }, { "content": "/// Helper function to avoid the boilerplate of checking for invalid\n\n/// server nonce.\n\nfn check_server_nonce(got: &[u8; 16], expected: &[u8; 16]) -> Result<(), Error> {\n\n if got == expected {\n\n Ok(())\n\n } else {\n\n Err(Error::InvalidServerNonce {\n\n got: *got,\n\n expected: *expected,\n\n })\n\n }\n\n}\n\n\n", "file_path": "lib/grammers-mtproto/src/authentication.rs", "rank": 52, "score": 131952.11430310266 }, { "content": "// n.b.: the `do_step` functions are pure so that they can be tested.\n\nfn do_step1(random_bytes: &[u8; 16]) -> Result<(Vec<u8>, Step1), Error> {\n\n // Step 1. Generates a secure random nonce.\n\n let nonce = *random_bytes;\n\n Ok((\n\n tl::functions::ReqPqMulti { nonce }.to_bytes(),\n\n Step1 { nonce },\n\n ))\n\n}\n\n\n", "file_path": "lib/grammers-mtproto/src/authentication.rs", "rank": 53, "score": 131952.11430310266 }, { "content": "/// Hide a previously-sent keyboard.\n\n///\n\n/// See the return type for further configuration options.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # async fn f(client: &mut grammers_client::Client, chat: &grammers_client::types::Chat) -> Result<(), Box<dyn std::error::Error>> {\n\n/// use grammers_client::{InputMessage, reply_markup};\n\n///\n\n/// client.send_message(chat, InputMessage::text(\"Bot keyboards removed.\").reply_markup(&reply_markup::hide())).await?;\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn hide() -> Hide {\n\n Hide(tl::types::ReplyKeyboardHide { selective: false })\n\n}\n\n\n", "file_path": "lib/grammers-client/src/types/reply_markup.rs", "rank": 54, "score": 130368.70814900019 }, { "content": "/// The length of a string, according to Telegram.\n\n///\n\n/// Telegram considers the length of the string with surrogate pairs.\n\nfn telegram_string_len(string: &str) -> i32 {\n\n // https://en.wikipedia.org/wiki/Plane_(Unicode)#Overview\n\n string.encode_utf16().count() as i32\n\n}\n\n\n\n/// Pushes a new `MessageEntity` instance with zero-length to the specified vector.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// let mut vec = Vec::new();\n\n/// push_entity!(MessageEntityBold(1) => vec);\n\n/// push_entity!(MessageEntityPre(2, language = \"rust\".to_string()) => vec);\n\n/// ```\n\nmacro_rules! push_entity {\n\n ( $ty:ident($offset:expr) => $vector:expr ) => {\n\n $vector.push(\n\n tl::types::$ty {\n\n offset: $offset,\n\n length: 0,\n", "file_path": "lib/grammers-client/src/parsers.rs", "rank": 55, "score": 128407.56408167635 }, { "content": "/// \"Forces\" the user to send a reply.\n\n///\n\n/// This will cause the user's application to automatically select the message for replying to it,\n\n/// although the user is still able to dismiss the reply and send a normal message.\n\n///\n\n/// See the return type for further configuration options.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # async fn f(client: &mut grammers_client::Client, chat: &grammers_client::types::Chat) -> Result<(), Box<dyn std::error::Error>> {\n\n/// use grammers_client::{InputMessage, reply_markup};\n\n///\n\n/// let markup = reply_markup::force_reply().single_use();\n\n/// client.send_message(chat, InputMessage::text(\"Reply me!\").reply_markup(&markup)).await?;\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn force_reply() -> ForceReply {\n\n ForceReply(tl::types::ReplyKeyboardForceReply {\n\n single_use: false,\n\n selective: false,\n\n })\n\n}\n\n\n\nimpl Keyboard {\n\n /// Requests clients to resize the keyboard vertically for optimal fit (e.g., make the\n\n /// keyboard smaller if there are just two rows of buttons). Otherwise, the custom keyboard\n\n /// is always of the same height as the virtual keyboard.\n\n pub fn fit_size(mut self) -> Self {\n\n self.0.resize = true;\n\n self\n\n }\n\n\n\n /// Requests clients to hide the keyboard as soon as it's been used.\n\n ///\n\n /// The keyboard will still be available, but clients will automatically display the usual\n\n /// letter-keyboard in the chat – the user can press a special button in the input field to\n", "file_path": "lib/grammers-client/src/types/reply_markup.rs", "rank": 56, "score": 127564.78131564433 }, { "content": "fn check_p_len(p: &[u8]) -> bool {\n\n p.len() == 256\n\n}\n\n\n", "file_path": "lib/grammers-crypto/src/two_factor_auth.rs", "rank": 57, "score": 127224.55950497456 }, { "content": "#[allow(clippy::many_single_char_names)]\n\npub fn factorize(pq: u64) -> (u64, u64) {\n\n if pq % 2 == 0 {\n\n return (2, pq);\n\n }\n\n\n\n let pq = pq as u128;\n\n fn abs_sub(a: u128, b: u128) -> u128 {\n\n a.max(b) - a.min(b)\n\n }\n\n\n\n // Random values in the range of 1..pq, chosen by fair dice roll.\n\n let mut y = pq / 4;\n\n let c = 2 * pq / 4;\n\n let m = 3 * pq / 4;\n\n let mut g = 1u128;\n\n let mut r = 1u128;\n\n let mut q = 1u128;\n\n let mut x = 0u128;\n\n let mut ys = 0u128;\n\n\n", "file_path": "lib/grammers-crypto/src/factorize.rs", "rank": 58, "score": 121704.25649788363 }, { "content": "/// A keyboard button in its simplest form.\n\n///\n\n/// When pressed, the button's text will be sent as a normal message, as if the user had typed it.\n\npub fn text<T: Into<String>>(text: T) -> Keyboard {\n\n Keyboard(tl::types::KeyboardButton { text: text.into() }.into())\n\n}\n\n\n", "file_path": "lib/grammers-client/src/types/button.rs", "rank": 59, "score": 115332.8846807141 }, { "content": "/// The trait used by the transports to create instances of themselves.\n\npub trait Transport {\n\n /// Packs and writes `input` into `output`.\n\n ///\n\n /// Previous contents in `output` are not cleared before this operation.\n\n ///\n\n /// Panics if `input.len()` is not divisible by 4.\n\n fn pack(&mut self, input: &[u8], output: &mut BytesMut);\n\n\n\n /// Unpacks the content from `input` into `output`.\n\n ///\n\n /// Previous contents in `output` are not cleared before this operation.\n\n ///\n\n /// If successful, returns how many bytes of `input` were used.\n\n fn unpack(&mut self, input: &[u8], output: &mut BytesMut) -> Result<usize, Error>;\n\n}\n", "file_path": "lib/grammers-mtproto/src/transport/mod.rs", "rank": 60, "score": 114954.9036515874 }, { "content": "/// The trait used by the [Mobile Transport Protocol] to serialize outgoing\n\n/// messages and deserialize incoming ones into proper responses.\n\n///\n\n/// [Mobile Transport Protocol]: https://core.telegram.org/mtproto/description\n\npub trait Mtp {\n\n /// Serializes one request to the internal buffer, which can be later retrieved by calling\n\n /// `finalize` after one or more `push` have been made.\n\n ///\n\n /// Returns the message ID assigned the request if it was serialized, or `None` if the buffer\n\n /// is full and cannot hold more requests.\n\n ///\n\n /// # Panics\n\n ///\n\n /// The method panics if the body length is not padded to 4 bytes. The\n\n /// serialization of requests will always be correctly padded, so adding\n\n /// an error case for this rare case (impossible with the expected inputs)\n\n /// would simply be unnecessary.\n\n ///\n\n /// The method also panics if the body length is too large for similar\n\n /// reasons. It is not reasonable to construct huge requests (although\n\n /// possible) because they would likely fail with a RPC error anyway,\n\n /// so we avoid another error case by simply panicking.\n\n ///\n\n /// The definition of \"too large\" is roughly 1MB, so as long as the\n", "file_path": "lib/grammers-mtproto/src/mtp/mod.rs", "rank": 61, "score": 114954.43034608578 }, { "content": "/// A keyboard button that will direct the user to create and send a poll when pressed.\n\n///\n\n/// This is only available in direct chats with the user.\n\npub fn request_poll<T: Into<String>>(text: T) -> Keyboard {\n\n Keyboard(\n\n tl::types::KeyboardButtonRequestPoll {\n\n text: text.into(),\n\n quiz: None,\n\n }\n\n .into(),\n\n )\n\n}\n\n\n", "file_path": "lib/grammers-client/src/types/button.rs", "rank": 62, "score": 114045.79811110385 }, { "content": "/// A keyboard button identical to [`request_poll`], except the poll requested must be a quiz.\n\n///\n\n/// This is only available in direct chats with the user.\n\npub fn request_quiz<T: Into<String>>(text: T) -> Keyboard {\n\n Keyboard(\n\n tl::types::KeyboardButtonRequestPoll {\n\n text: text.into(),\n\n quiz: Some(true),\n\n }\n\n .into(),\n\n )\n\n}\n\n\n\n/*\n\nTODO implement other buttons\n\n(with password) keyboardButtonCallback#35bbdb6b flags:# requires_password:flags.0?true text:string data:bytes = KeyboardButton;\n\nkeyboardButtonUrlAuth#10b78d29 flags:# text:string fwd_text:flags.0?string url:string button_id:int = KeyboardButton;\n\nkeyboardButtonGame#50f41ccf text:string = KeyboardButton;\n\nkeyboardButtonBuy#afd93fbb text:string = KeyboardButton;\n\ninputKeyboardButtonUrlAuth#d02e7fd4 flags:# request_write_access:flags.0?true text:string fwd_text:flags.1?string url:string bot:InputUser = KeyboardButton;\n\n*/\n", "file_path": "lib/grammers-client/src/types/button.rs", "rank": 63, "score": 114044.92666365214 }, { "content": "/// A keyboard button to request the user's current geo-location.\n\npub fn request_geo<T: Into<String>>(text: T) -> Keyboard {\n\n Keyboard(tl::types::KeyboardButtonRequestGeoLocation { text: text.into() }.into())\n\n}\n\n\n", "file_path": "lib/grammers-client/src/types/button.rs", "rank": 64, "score": 114040.39949742601 }, { "content": "/// A keyboard button to request the user's contact information (including the phone).\n\npub fn request_phone<T: Into<String>>(text: T) -> Keyboard {\n\n Keyboard(tl::types::KeyboardButtonRequestPhone { text: text.into() }.into())\n\n}\n\n\n", "file_path": "lib/grammers-client/src/types/button.rs", "rank": 65, "score": 114040.36878301886 }, { "content": "/// Anything implementing this trait is identifiable by both ends (client-server)\n\n/// when performing Remote Procedure Calls (RPC) and transmission of objects.\n\npub trait Identifiable {\n\n /// The unique identifier for the type.\n\n const CONSTRUCTOR_ID: u32;\n\n}\n\n\n", "file_path": "lib/grammers-tl-types/src/lib.rs", "rank": 66, "score": 113973.4421338794 }, { "content": "/// This trait allows for data serialized according to the\n\n/// [Binary Data Serialization] to be deserialized into concrete instances.\n\n///\n\n/// [Binary Data Serialization]: https://core.telegram.org/mtproto/serialize\n\npub trait Deserializable {\n\n /// Deserializes an instance of the type from a given buffer.\n\n fn deserialize(buf: Buffer) -> Result<Self>\n\n where\n\n Self: std::marker::Sized;\n\n\n\n /// Convenience function to deserialize an instance from a given buffer.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use grammers_tl_types::Deserializable;\n\n ///\n\n /// assert_eq!(bool::from_bytes(&[0x37, 0x97, 0x79, 0xbc]).unwrap(), false);\n\n /// ```\n\n fn from_bytes(buf: &[u8]) -> Result<Self>\n\n where\n\n Self: std::marker::Sized,\n\n {\n\n Self::deserialize(&mut Cursor::from_slice(buf))\n", "file_path": "lib/grammers-tl-types/src/deserialize.rs", "rank": 67, "score": 113973.4421338794 }, { "content": "/// This trait allows for concrete instances to be serialized into\n\n/// binary data as specified by the [Binary Data Serialization].\n\n///\n\n/// [Binary Data Serialization]: https://core.telegram.org/mtproto/serialize\n\npub trait Serializable {\n\n /// Serializes the instance into the given buffer.\n\n fn serialize(&self, buf: Buffer);\n\n\n\n /// Convenience function to serialize the object into a new buffer\n\n /// and return its bytes. It is more efficient to reuse a existing\n\n /// buffer with [`Serializable::serialize`].\n\n fn to_bytes(&self) -> Vec<u8> {\n\n let mut buffer = Vec::new();\n\n self.serialize(&mut buffer);\n\n buffer\n\n }\n\n}\n\n\n\nimpl Serializable for bool {\n\n /// Serializes the boolean according to the following definitions:\n\n ///\n\n /// * `false` is serialized as `boolFalse#bc799737 = Bool;`.\n\n /// * `true` is serialized as `boolTrue#997275b5 = Bool;`.\n\n ///\n", "file_path": "lib/grammers-tl-types/src/serialize.rs", "rank": 68, "score": 113973.4421338794 }, { "content": "/// Determines the padding length needed for a plaintext of a certain length,\n\n/// according to the following citation:\n\n///\n\n/// > Note that MTProto 2.0 requires from 12 to 1024 bytes of padding\n\n/// > [...] the resulting message length be divisible by 16 bytes\n\nfn determine_padding_v2_length(len: usize) -> usize {\n\n 16 + (16 - (len % 16))\n\n}\n\n\n", "file_path": "lib/grammers-crypto/src/lib.rs", "rank": 69, "score": 111807.08072086304 }, { "content": "fn xor(left: &Output<Sha256>, right: &Output<Sha256>) -> Vec<u8> {\n\n return left\n\n .iter()\n\n .zip(right.iter())\n\n .map(|(&x1, &x2)| x1 ^ x2)\n\n .collect();\n\n}\n\n\n", "file_path": "lib/grammers-crypto/src/two_factor_auth.rs", "rank": 70, "score": 110772.49935442465 }, { "content": "/// Structures implementing this trait indicate that they are suitable for\n\n/// use to perform Remote Procedure Calls (RPC), and know what the type of\n\n/// the response will be.\n\npub trait RemoteCall: Serializable {\n\n /// The type of the \"return\" value coming from the other end of the\n\n /// connection.\n\n type Return: Deserializable;\n\n}\n", "file_path": "lib/grammers-tl-types/src/lib.rs", "rank": 71, "score": 109303.29642369258 }, { "content": "fn map_random_ids_to_messages(\n\n client: &Client,\n\n random_ids: &[i64],\n\n updates: tl::enums::Updates,\n\n) -> Vec<Option<Message>> {\n\n match updates {\n\n tl::enums::Updates::Updates(tl::types::Updates {\n\n updates,\n\n users,\n\n chats,\n\n date: _,\n\n seq: _,\n\n }) => {\n\n let chats = ChatMap::new(users, chats);\n\n\n\n let rnd_to_id = updates\n\n .iter()\n\n .filter_map(|update| match update {\n\n tl::enums::Update::MessageId(u) => Some((u.random_id, u.id)),\n\n _ => None,\n", "file_path": "lib/grammers-client/src/client/messages.rs", "rank": 72, "score": 108054.91679518807 }, { "content": "/// Define a custom keyboard, replacing the user's own virtual keyboard.\n\n///\n\n/// This will be displayed below the input message field for users, and on mobile devices, this\n\n/// also hides the virtual keyboard (effectively \"replacing\" it).\n\n///\n\n/// You cannot add images to the buttons, but you can use emoji (simply copy-paste them into your\n\n/// code, or use the correct escape sequence, or using any other input methods you like).\n\n///\n\n/// You will need to provide a matrix of [`button::Inline`], that is, a vector that contains the\n\n/// rows from top to bottom, where the rows consist of a vector of buttons from left to right.\n\n/// See the [`button`] module to learn what buttons are available.\n\n///\n\n/// See the return type for further configuration options.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # async fn f(client: &mut grammers_client::Client, chat: &grammers_client::types::Chat) -> Result<(), Box<dyn std::error::Error>> {\n\n/// use grammers_client::{InputMessage, reply_markup, button};\n\n///\n\n/// client.send_message(chat, InputMessage::text(\"What do you want to do?\").reply_markup(&reply_markup::keyboard(vec![\n\n/// vec![button::text(\"Accept\")],\n\n/// vec![button::text(\"Cancel\"), button::text(\"Try something else\")],\n\n/// ]))).await?;\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn keyboard<B: Into<Vec<Vec<button::Keyboard>>>>(buttons: B) -> Keyboard {\n\n Keyboard(tl::types::ReplyKeyboardMarkup {\n\n resize: false,\n\n single_use: false,\n\n selective: false,\n\n rows: buttons\n\n .into()\n\n .into_iter()\n\n .map(|row| {\n\n tl::types::KeyboardButtonRow {\n\n buttons: row.into_iter().map(|button| button.0).collect(),\n\n }\n\n .into()\n\n })\n\n .collect(),\n\n })\n\n}\n\n\n", "file_path": "lib/grammers-client/src/types/reply_markup.rs", "rank": 73, "score": 106075.90651304678 }, { "content": "/// Define inline buttons for a message.\n\n///\n\n/// These will display right under the message.\n\n///\n\n/// You cannot add images to the buttons, but you can use emoji (simply copy-paste them into your\n\n/// code, or use the correct escape sequence, or using any other input methods you like).\n\n///\n\n/// You will need to provide a matrix of [`button::Inline`], that is, a vector that contains the\n\n/// rows from top to bottom, where the rows consist of a vector of buttons from left to right.\n\n/// See the [`button`] module to learn what buttons are available.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # async fn f(client: &mut grammers_client::Client, chat: &grammers_client::types::Chat) -> Result<(), Box<dyn std::error::Error>> {\n\n/// use grammers_client::{InputMessage, reply_markup, button};\n\n///\n\n/// let artist = \"Krewella\";\n\n/// client.send_message(chat, InputMessage::text(\"Select song\").reply_markup(&reply_markup::keyboard(vec![\n\n/// vec![button::text(format!(\"Song by {}\", artist))],\n\n/// vec![button::text(\"Previous\"), button::text(\"Next\")],\n\n/// ]))).await?;\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub fn inline<B: Into<Vec<Vec<button::Inline>>>>(buttons: B) -> Inline {\n\n Inline(tl::types::ReplyInlineMarkup {\n\n rows: buttons\n\n .into()\n\n .into_iter()\n\n .map(|row| {\n\n tl::types::KeyboardButtonRow {\n\n buttons: row.into_iter().map(|button| button.0).collect(),\n\n }\n\n .into()\n\n })\n\n .collect(),\n\n })\n\n}\n\n\n", "file_path": "lib/grammers-client/src/types/reply_markup.rs", "rank": 74, "score": 106072.74585206748 }, { "content": "/// Load the type language definitions from a certain file.\n\n/// Parse errors will be printed to `stderr`, and only the\n\n/// valid results will be returned.\n\nfn load_tl(file: &str) -> io::Result<Vec<Definition>> {\n\n let mut file = File::open(file)?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n Ok(parse_tl_file(&contents)\n\n .into_iter()\n\n .filter_map(|d| match d {\n\n Ok(d) => Some(d),\n\n Err(e) => {\n\n eprintln!(\"TL: parse error: {:?}\", e);\n\n None\n\n }\n\n })\n\n .collect())\n\n}\n\n\n", "file_path": "lib/grammers-tl-types/build.rs", "rank": 75, "score": 106057.28677805187 }, { "content": "fn next_updates_deadline() -> Instant {\n\n Instant::now() + defs::NO_UPDATES_TIMEOUT\n\n}\n\n\n\n/// Creation, querying, and setting base state.\n\nimpl MessageBox {\n\n /// Create a new, empty [`MessageBox`].\n\n ///\n\n /// This is the only way it may return `true` from [`MessageBox::is_empty`].\n\n pub fn new() -> Self {\n\n Self {\n\n map: HashMap::new(),\n\n date: 1,\n\n seq: 0,\n\n next_deadline: None,\n\n possible_gaps: HashMap::new(),\n\n getting_diff_for: HashSet::new(),\n\n reset_deadlines_for: HashSet::new(),\n\n }\n\n }\n", "file_path": "lib/grammers-session/src/message_box/mod.rs", "rank": 76, "score": 104630.4692547319 }, { "content": "fn main() -> std::io::Result<()> {\n\n let layer = match find_layer(\"tl/api.tl\")? {\n\n Some(x) => x,\n\n None => panic!(\"no layer information found in api.tl\"),\n\n };\n\n\n\n let definitions = {\n\n let mut definitions = Vec::new();\n\n if cfg!(feature = \"tl-api\") {\n\n definitions.extend(load_tl(\"tl/api.tl\")?);\n\n }\n\n if cfg!(feature = \"tl-mtproto\") {\n\n definitions.extend(load_tl(\"tl/mtproto.tl\")?);\n\n }\n\n definitions\n\n };\n\n\n\n let mut file = BufWriter::new(File::create(\n\n Path::new(&env::var(\"OUT_DIR\").unwrap()).join(\"generated.rs\"),\n\n )?);\n", "file_path": "lib/grammers-tl-types/build.rs", "rank": 77, "score": 104030.53743567162 }, { "content": "/// An inline button that when clicked will ask the user if they want to open the specified URL.\n\n///\n\n/// The URL will be visible to the user before it's opened unless it's trusted (such as Telegram's\n\n/// domain).\n\npub fn url<T: Into<String>, U: Into<String>>(text: T, url: U) -> Inline {\n\n Inline(\n\n tl::types::KeyboardButtonUrl {\n\n text: text.into(),\n\n url: url.into(),\n\n }\n\n .into(),\n\n )\n\n}\n\n\n", "file_path": "lib/grammers-client/src/types/button.rs", "rank": 78, "score": 103718.25060788335 }, { "content": "/// An inline button to force the user to switch to inline mode (perform inline queries).\n\n///\n\n/// Pressing the button will insert the bot's username and the specified inline query in the input\n\n/// field.\n\npub fn switch_inline<T: Into<String>, Q: Into<String>>(text: T, query: Q) -> Inline {\n\n Inline(\n\n tl::types::KeyboardButtonSwitchInline {\n\n text: text.into(),\n\n query: query.into(),\n\n same_peer: true,\n\n }\n\n .into(),\n\n )\n\n}\n\n\n", "file_path": "lib/grammers-client/src/types/button.rs", "rank": 79, "score": 102581.51440509254 }, { "content": "/// An inline button identical to [`switch_inline`], except the user will be prompted to select a\n\n/// different chat.\n\n///\n\n/// Pressing the button will prompt the user to select one of their chats, open that chat and\n\n/// insert the bot's username and the specified inline query in the input field.\n\npub fn switch_inline_elsewhere<T: Into<String>, Q: Into<String>>(text: T, query: Q) -> Inline {\n\n Inline(\n\n tl::types::KeyboardButtonSwitchInline {\n\n text: text.into(),\n\n query: query.into(),\n\n same_peer: false,\n\n }\n\n .into(),\n\n )\n\n}\n\n\n", "file_path": "lib/grammers-client/src/types/button.rs", "rank": 80, "score": 101481.03937930238 }, { "content": "/// Get the rusty type name for a certain definition, excluding namespace.\n\n///\n\n/// For example, transforms `\"ns.some_OK_name\"` into `\"SomeOkName\"`.\n\nfn rusty_type_name(name: &str) -> String {\n\n enum Casing {\n\n Upper,\n\n Lower,\n\n Preserve,\n\n }\n\n\n\n let name = if let Some(pos) = name.rfind('.') {\n\n &name[pos + 1..]\n\n } else {\n\n name\n\n };\n\n\n\n let mut result = String::with_capacity(name.len());\n\n\n\n name.chars().fold(Casing::Upper, |casing, c| {\n\n if c == '_' {\n\n return Casing::Upper;\n\n }\n\n\n", "file_path": "lib/grammers-tl-gen/src/rustifier.rs", "rank": 81, "score": 99804.80829445775 }, { "content": "fn get_definitions(contents: &str) -> Vec<Definition> {\n\n parse_tl_file(&contents)\n\n .into_iter()\n\n .map(|d| d.unwrap())\n\n .collect()\n\n}\n\n\n", "file_path": "lib/grammers-tl-gen/tests/lib.rs", "rank": 82, "score": 98748.24389312732 }, { "content": "impl FromStr for Type {\n\n type Err = ParamParseError;\n\n\n\n /// Parses a type.\n\n ///\n\n /// # Examples\n\n ///\n\n /// ```\n\n /// use grammers_tl_parser::tl::Type;\n\n ///\n\n /// assert!(\"vector<int>\".parse::<Type>().is_ok());\n\n /// ```\n\n fn from_str(ty: &str) -> Result<Self, Self::Err> {\n\n // Parse `!type`\n\n let (ty, generic_ref) = if let Some(ty) = ty.strip_prefix('!') {\n\n (ty, true)\n\n } else {\n\n (ty, false)\n\n };\n\n\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 83, "score": 98076.69720191523 }, { "content": "// Copyright 2020 - developers of the `grammers` project.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\nuse std::fmt;\n\nuse std::str::FromStr;\n\n\n\nuse crate::errors::ParamParseError;\n\n\n\n/// The type of a definition or a parameter.\n\n#[derive(Debug, PartialEq)]\n\npub struct Type {\n\n /// The namespace components of the type.\n\n pub namespace: Vec<String>,\n\n\n\n /// The name of the type.\n\n pub name: String,\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 84, "score": 98064.2322139866 }, { "content": " assert_eq!(Type::from_str(\".\"), Err(ParamParseError::Empty));\n\n assert_eq!(Type::from_str(\"..\"), Err(ParamParseError::Empty));\n\n assert_eq!(Type::from_str(\".foo\"), Err(ParamParseError::Empty));\n\n assert_eq!(Type::from_str(\"foo.\"), Err(ParamParseError::Empty));\n\n assert_eq!(Type::from_str(\"foo..foo\"), Err(ParamParseError::Empty));\n\n assert_eq!(Type::from_str(\".foo.\"), Err(ParamParseError::Empty));\n\n }\n\n\n\n #[test]\n\n fn check_namespaced() {\n\n assert_eq!(\n\n Type::from_str(\"foo.bar.baz\"),\n\n Ok(Type {\n\n namespace: vec![\"foo\".into(), \"bar\".into()],\n\n name: \"baz\".into(),\n\n bare: true,\n\n generic_ref: false,\n\n generic_arg: None,\n\n })\n\n );\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 85, "score": 98061.52213148009 }, { "content": " // Parse `type<generic_arg>`\n\n let (ty, generic_arg) = if let Some(pos) = ty.find('<') {\n\n if !ty.ends_with('>') {\n\n return Err(ParamParseError::InvalidGeneric);\n\n }\n\n (\n\n &ty[..pos],\n\n Some(Box::new(Type::from_str(&ty[pos + 1..ty.len() - 1])?)),\n\n )\n\n } else {\n\n (ty, None)\n\n };\n\n\n\n // Parse `ns1.ns2.name`\n\n let mut namespace: Vec<String> = ty.split('.').map(|part| part.to_string()).collect();\n\n if namespace.iter().any(|part| part.is_empty()) {\n\n return Err(ParamParseError::Empty);\n\n }\n\n\n\n // Safe to unwrap because split() will always yield at least one.\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 86, "score": 98061.3954365375 }, { "content": " fn check_empty_simple() {\n\n assert_eq!(Type::from_str(\"\"), Err(ParamParseError::Empty));\n\n }\n\n\n\n #[test]\n\n fn check_simple() {\n\n assert_eq!(\n\n Type::from_str(\"foo\"),\n\n Ok(Type {\n\n namespace: vec![],\n\n name: \"foo\".into(),\n\n bare: true,\n\n generic_ref: false,\n\n generic_arg: None,\n\n })\n\n );\n\n }\n\n\n\n #[test]\n\n fn check_empty_namespaced() {\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 87, "score": 98060.80482964593 }, { "content": " }\n\n\n\n #[test]\n\n fn check_generic_arg() {\n\n assert!(match Type::from_str(\"foo.bar\") {\n\n Ok(Type {\n\n generic_arg: None, ..\n\n }) => true,\n\n _ => false,\n\n });\n\n assert!(match Type::from_str(\"foo<bar>\") {\n\n Ok(Type {\n\n generic_arg: Some(x),\n\n ..\n\n }) => *x == \"bar\".parse().unwrap(),\n\n _ => false,\n\n });\n\n assert!(match Type::from_str(\"foo<bar.Baz>\") {\n\n Ok(Type {\n\n generic_arg: Some(x),\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 88, "score": 98056.74585016712 }, { "content": "\n\n /// Whether this type is bare or boxed.\n\n pub bare: bool,\n\n\n\n /// Whether the type name refers to a generic definition.\n\n pub generic_ref: bool,\n\n\n\n /// If the type has a generic argument, which is its type.\n\n pub generic_arg: Option<Box<Type>>,\n\n}\n\n\n\nimpl fmt::Display for Type {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n for ns in self.namespace.iter() {\n\n write!(f, \"{}.\", ns)?;\n\n }\n\n if self.generic_ref {\n\n write!(f, \"!\")?;\n\n }\n\n write!(f, \"{}\", self.name)?;\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 89, "score": 98056.24987484168 }, { "content": " let name = namespace.pop().unwrap();\n\n\n\n // Safe to unwrap because we just checked is not empty\n\n let bare = name.chars().next().unwrap().is_ascii_lowercase();\n\n\n\n Ok(Self {\n\n namespace,\n\n name,\n\n bare,\n\n generic_ref,\n\n generic_arg,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 90, "score": 98055.80789977215 }, { "content": " if let Some(generic_arg) = &self.generic_arg {\n\n write!(f, \"<{}>\", generic_arg)?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Type {\n\n /// Find all the nested generic references in this type, and appends them\n\n /// to the input vector.Box\n\n pub(crate) fn find_generic_refs<'a>(&'a self, output: &mut Vec<&'a str>) {\n\n if self.generic_ref {\n\n output.push(&self.name);\n\n }\n\n if let Some(generic_arg) = &self.generic_arg {\n\n generic_arg.find_generic_refs(output);\n\n }\n\n }\n\n}\n\n\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 91, "score": 98054.89454429653 }, { "content": " }\n\n\n\n #[test]\n\n fn check_bare() {\n\n assert!(match Type::from_str(\"foo\") {\n\n Ok(Type { bare: true, .. }) => true,\n\n _ => false,\n\n });\n\n assert!(match Type::from_str(\"Foo\") {\n\n Ok(Type { bare: false, .. }) => true,\n\n _ => false,\n\n });\n\n assert!(match Type::from_str(\"Foo.bar\") {\n\n Ok(Type { bare: true, .. }) => true,\n\n _ => false,\n\n });\n\n assert!(match Type::from_str(\"Foo.Bar\") {\n\n Ok(Type { bare: false, .. }) => true,\n\n _ => false,\n\n });\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 92, "score": 98054.81875461178 }, { "content": " _ => false,\n\n });\n\n assert!(match Type::from_str(\"!f\") {\n\n Ok(Type {\n\n generic_ref: true, ..\n\n }) => true,\n\n _ => false,\n\n });\n\n assert!(match Type::from_str(\"!Foo\") {\n\n Ok(Type {\n\n generic_ref: true, ..\n\n }) => true,\n\n _ => false,\n\n });\n\n assert!(match Type::from_str(\"!X\") {\n\n Ok(Type {\n\n generic_ref: true, ..\n\n }) => true,\n\n _ => false,\n\n });\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 93, "score": 98054.81216174958 }, { "content": " assert!(match Type::from_str(\"foo.Bar\") {\n\n Ok(Type { bare: false, .. }) => true,\n\n _ => false,\n\n });\n\n assert!(match Type::from_str(\"!bar\") {\n\n Ok(Type { bare: true, .. }) => true,\n\n _ => false,\n\n });\n\n assert!(match Type::from_str(\"!foo.Bar\") {\n\n Ok(Type { bare: false, .. }) => true,\n\n _ => false,\n\n });\n\n }\n\n\n\n #[test]\n\n fn check_generic_ref() {\n\n assert!(match Type::from_str(\"f\") {\n\n Ok(Type {\n\n generic_ref: false, ..\n\n }) => true,\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 94, "score": 98054.6905123935 }, { "content": " ..\n\n }) => *x == \"bar.Baz\".parse().unwrap(),\n\n _ => false,\n\n });\n\n assert!(match Type::from_str(\"foo<!bar.baz>\") {\n\n Ok(Type {\n\n generic_arg: Some(x),\n\n ..\n\n }) => *x == \"!bar.baz\".parse().unwrap(),\n\n _ => false,\n\n });\n\n assert!(match Type::from_str(\"foo<bar<baz>>\") {\n\n Ok(Type {\n\n generic_arg: Some(x),\n\n ..\n\n }) => *x == \"bar<baz>\".parse().unwrap(),\n\n _ => false,\n\n });\n\n }\n\n}\n", "file_path": "lib/grammers-tl-parser/src/tl/ty.rs", "rank": 95, "score": 98048.10373336189 }, { "content": "// Copyright 2020 - developers of the `grammers` project.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n//! This module contains all the different structures representing the\n\n//! various terms of the [Type Language].\n\n//!\n\n//! [Type Language]: https://core.telegram.org/mtproto/TL\n\nmod category;\n\nmod definition;\n\nmod flag;\n\nmod parameter;\n\nmod parameter_type;\n\nmod ty;\n\n\n\npub use category::Category;\n\npub use definition::Definition;\n\npub use flag::Flag;\n\npub use parameter::Parameter;\n\npub use parameter_type::ParameterType;\n\npub use ty::Type;\n", "file_path": "lib/grammers-tl-parser/src/tl/mod.rs", "rank": 96, "score": 97652.59883953507 }, { "content": " sorted_keys.sort();\n\n for key in sorted_keys.into_iter() {\n\n // Begin possibly inner mod\n\n let indent = if let Some(ns) = key {\n\n writeln!(file, \" #[allow(clippy::large_enum_variant)]\")?;\n\n writeln!(file, \" pub mod {} {{\", ns)?;\n\n \" \"\n\n } else {\n\n \" \"\n\n };\n\n\n\n for ty in grouped[key].iter().filter(|ty| !ignore_type(*ty)) {\n\n write_definition(&mut file, indent, ty, metadata, config)?;\n\n }\n\n\n\n // End possibly inner mod\n\n if key.is_some() {\n\n writeln!(file, \" }}\")?;\n\n }\n\n }\n\n\n\n // End outermost mod\n\n writeln!(file, \"}}\")\n\n}\n", "file_path": "lib/grammers-tl-gen/src/enums.rs", "rank": 97, "score": 90235.40320817217 }, { "content": " metadata: &Metadata,\n\n config: &Config,\n\n) -> io::Result<()> {\n\n // Begin outermost mod\n\n write!(\n\n file,\n\n \"\\\n\n /// This module contains all of the boxed types, each\\n\\\n\n /// represented by a `enum`. All of them implement\\n\\\n\n /// [`Serializable`] and [`Deserializable`].\\n\\\n\n ///\\n\\\n\n /// [`Serializable`]: /grammers_tl_types/trait.Serializable.html\\n\\\n\n /// [`Deserializable`]: /grammers_tl_types/trait.Deserializable.html\\n\\\n\n #[allow(clippy::large_enum_variant)]\\n\\\n\n pub mod enums {{\\n\\\n\n \"\n\n )?;\n\n\n\n let grouped = grouper::group_types_by_ns(definitions);\n\n let mut sorted_keys: Vec<&Option<String>> = grouped.keys().collect();\n", "file_path": "lib/grammers-tl-gen/src/enums.rs", "rank": 98, "score": 90235.2584258563 }, { "content": "// Copyright 2020 - developers of the `grammers` project.\n\n//\n\n// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or\n\n// https://www.apache.org/licenses/LICENSE-2.0> or the MIT license\n\n// <LICENSE-MIT or https://opensource.org/licenses/MIT>, at your\n\n// option. This file may not be copied, modified, or distributed\n\n// except according to those terms.\n\n\n\n//! Code to generate Rust's `enum`'s from TL definitions.\n\n\n\nuse crate::grouper;\n\nuse crate::metadata::Metadata;\n\nuse crate::rustifier;\n\nuse crate::{ignore_type, Config};\n\nuse grammers_tl_parser::tl::{Definition, Type};\n\nuse std::io::{self, Write};\n\n\n\n/// Writes an enumeration listing all types such as the following rust code:\n\n///\n\n/// ```ignore\n\n/// pub enum Name {\n\n/// Variant(crate::types::Name),\n\n/// }\n\n/// ```\n", "file_path": "lib/grammers-tl-gen/src/enums.rs", "rank": 99, "score": 90225.96802249434 } ]
Rust
edgelet/edgelet-http-workload/src/edge_ca.rs
brotherneeru/iotedge
830259ad2f748db58f7271101ed58fdf5fc7bf3f
#[cfg(not(test))] use aziot_cert_client_async::Client as CertClient; #[cfg(not(test))] use aziot_key_client_async::Client as KeyClient; #[cfg(not(test))] use aziot_key_openssl_engine as KeyEngine; #[cfg(test)] use test_common::client::CertClient; #[cfg(test)] use test_common::client::KeyClient; #[cfg(test)] use test_common::client::KeyEngine; pub(crate) struct EdgeCaRenewal { rotate_key: bool, temp_cert: String, cert_client: std::sync::Arc<futures_util::lock::Mutex<CertClient>>, key_client: std::sync::Arc<futures_util::lock::Mutex<KeyClient>>, key_connector: http_common::Connector, renewal_tx: tokio::sync::mpsc::UnboundedSender<edgelet_core::WatchdogAction>, } impl EdgeCaRenewal { pub fn new( rotate_key: bool, config: &crate::WorkloadConfig, cert_client: std::sync::Arc<futures_util::lock::Mutex<CertClient>>, key_client: std::sync::Arc<futures_util::lock::Mutex<KeyClient>>, key_connector: http_common::Connector, renewal_tx: tokio::sync::mpsc::UnboundedSender<edgelet_core::WatchdogAction>, ) -> Self { let temp_cert = format!("{}-temp", config.edge_ca_cert); EdgeCaRenewal { rotate_key, temp_cert, cert_client, key_client, key_connector, renewal_tx, } } } #[async_trait::async_trait] impl cert_renewal::CertInterface for EdgeCaRenewal { type NewKey = String; async fn get_cert( &mut self, cert_id: &str, ) -> Result<Vec<openssl::x509::X509>, cert_renewal::Error> { let cert_client = self.cert_client.lock().await; let cert = cert_client .get_cert(cert_id) .await .map_err(|_| cert_renewal::Error::retryable_error("failed to retrieve edge CA cert"))?; let cert_chain = openssl::x509::X509::stack_from_pem(&cert) .map_err(|_| cert_renewal::Error::fatal_error("failed to parse edge CA cert"))?; if cert_chain.is_empty() { Err(cert_renewal::Error::fatal_error("no certs in chain")) } else { Ok(cert_chain) } } async fn get_key( &mut self, key_id: &str, ) -> Result<openssl::pkey::PKey<openssl::pkey::Private>, cert_renewal::Error> { let key_client = self.key_client.lock().await; let key_handle = key_client .load_key_pair(key_id) .await .map_err(|_| cert_renewal::Error::retryable_error("failed to get identity cert key"))?; let (private_key, _) = keys(self.key_connector.clone(), &key_handle) .map_err(cert_renewal::Error::retryable_error)?; Ok(private_key) } async fn renew_cert( &mut self, old_cert_chain: &[openssl::x509::X509], key_id: &str, ) -> Result<(Vec<openssl::x509::X509>, Self::NewKey), cert_renewal::Error> { let (key_id, key_handle) = { let key_client = self.key_client.lock().await; if self.rotate_key { let key_id = format!("{}-temp", key_id); if let Ok(key_handle) = key_client.load_key_pair(&key_id).await { key_client.delete_key_pair(&key_handle).await.map_err(|_| { cert_renewal::Error::retryable_error("failed to clear temp key") })?; } let key_handle = key_client .create_key_pair_if_not_exists(&key_id, Some("rsa-2048:*")) .await .map_err(|_| { cert_renewal::Error::retryable_error("failed to generate temp key") })?; (key_id, key_handle) } else { let key_handle = key_client.load_key_pair(key_id).await.map_err(|_| { cert_renewal::Error::retryable_error("failed to get identity cert key") })?; (key_id.to_string(), key_handle) } }; let keys = keys(self.key_connector.clone(), &key_handle) .map_err(cert_renewal::Error::retryable_error)?; let extensions = extensions().map_err(|_| { cert_renewal::Error::fatal_error("failed to generate edge CA extensions") })?; let csr = crate::module::cert::new_csr( old_cert_chain[0].subject_name(), keys, Vec::new(), extensions, ) .map_err(|_| cert_renewal::Error::retryable_error("failed to create csr"))?; let new_cert = { let cert_client = self.cert_client.lock().await; let new_cert = cert_client .create_cert(&self.temp_cert, &csr, None) .await .map_err(|_| cert_renewal::Error::retryable_error("failed to create new cert"))?; if let Err(err) = cert_client.delete_cert(&self.temp_cert).await { log::warn!( "Failed to delete temporary certificate created by cert renewal: {}", err ); } new_cert }; let new_cert_chain = openssl::x509::X509::stack_from_pem(&new_cert) .map_err(|_| cert_renewal::Error::retryable_error("failed to parse new cert"))?; if new_cert_chain.is_empty() { Err(cert_renewal::Error::retryable_error("no certs in chain")) } else { Ok((new_cert_chain, key_id)) } } async fn write_credentials( &mut self, old_cert_chain: &[openssl::x509::X509], new_cert_chain: (&str, &[openssl::x509::X509]), key: (&str, Self::NewKey), ) -> Result<(), cert_renewal::Error> { let (cert_id, new_cert_chain) = (new_cert_chain.0, new_cert_chain.1); let (old_key, new_key) = (key.0, key.1); if old_cert_chain.is_empty() || new_cert_chain.is_empty() { return Err(cert_renewal::Error::retryable_error("no certs in chain")); } let mut new_cert_chain_pem = Vec::new(); for cert in new_cert_chain { let mut cert = cert .to_pem() .map_err(|_| cert_renewal::Error::retryable_error("bad cert"))?; new_cert_chain_pem.append(&mut cert); } let mut old_cert_chain_pem = Vec::new(); for cert in old_cert_chain { let mut cert = cert .to_pem() .map_err(|_| cert_renewal::Error::retryable_error("bad cert"))?; old_cert_chain_pem.append(&mut cert); } { let cert_client = self.cert_client.lock().await; cert_client .import_cert(cert_id, &new_cert_chain_pem) .await .map_err(|_| cert_renewal::Error::retryable_error("failed to import new cert"))?; } if old_key != new_key { let res = { let key_client = self.key_client.lock().await; key_client.move_key_pair(&new_key, old_key).await }; if res.is_err() { let cert_client = self.cert_client.lock().await; cert_client .import_cert(cert_id, &old_cert_chain_pem) .await .map_err(|_| { cert_renewal::Error::retryable_error("failed to restore old cert") })?; } } log::info!("Edge CA was renewed"); if let Err(err) = self .renewal_tx .send(edgelet_core::WatchdogAction::EdgeCaRenewal) { log::warn!("Failed to request module restart: {}", err); } Ok(()) } } pub(crate) fn keys( key_connector: http_common::Connector, key_handle: &aziot_key_common::KeyHandle, ) -> Result< ( openssl::pkey::PKey<openssl::pkey::Private>, openssl::pkey::PKey<openssl::pkey::Public>, ), String, > { let key_client = aziot_key_client::Client::new( aziot_key_common_http::ApiVersion::V2021_05_01, key_connector, ); let key_client = std::sync::Arc::new(key_client); let key_handle = std::ffi::CString::new(key_handle.0.clone()).expect("key handle contained null"); let mut engine = KeyEngine::load(key_client).map_err(|_| "failed to load openssl key engine".to_string())?; let private_key = engine .load_private_key(&key_handle) .map_err(|_| "failed to load edge ca private key".to_string())?; let public_key = engine .load_public_key(&key_handle) .map_err(|_| "failed to load edge ca public key".to_string())?; Ok((private_key, public_key)) } pub(crate) fn extensions( ) -> Result<openssl::stack::Stack<openssl::x509::X509Extension>, openssl::error::ErrorStack> { let mut csr_extensions = openssl::stack::Stack::new()?; let mut key_usage = openssl::x509::extension::KeyUsage::new(); key_usage.critical().digital_signature().key_cert_sign(); let mut basic_constraints = openssl::x509::extension::BasicConstraints::new(); basic_constraints.ca().critical().pathlen(0); let key_usage = key_usage.build()?; let basic_constraints = basic_constraints.build()?; csr_extensions.push(key_usage)?; csr_extensions.push(basic_constraints)?; Ok(csr_extensions) } #[cfg(test)] mod tests { use super::EdgeCaRenewal; use super::{CertClient, KeyClient}; use cert_renewal::CertInterface; fn new_renewal(rotate_key: bool) -> EdgeCaRenewal { let settings = edgelet_test_utils::Settings::default(); let device_info = aziot_identity_common::AzureIoTSpec { hub_name: "test-hub.test.net".to_string(), gateway_host: "gateway-host.test.net".to_string(), device_id: aziot_identity_common::DeviceId("test-device".to_string()), module_id: None, gen_id: None, auth: None, }; let config = crate::WorkloadConfig::new(&settings, &device_info); let cert_client = CertClient::default(); let cert_client = std::sync::Arc::new(futures_util::lock::Mutex::new(cert_client)); let key_client = KeyClient::default(); let key_client = std::sync::Arc::new(futures_util::lock::Mutex::new(key_client)); let key_connector = url::Url::parse("unix:///tmp/test.sock").unwrap(); let key_connector = http_common::Connector::new(&key_connector).unwrap(); let (renewal_tx, _) = tokio::sync::mpsc::unbounded_channel::<edgelet_core::WatchdogAction>(); EdgeCaRenewal::new( rotate_key, &config, cert_client, key_client, key_connector, renewal_tx, ) } #[tokio::test] async fn get_cert() { let mut renewal = new_renewal(true); let (cert_1, _) = test_common::credential::test_certificate("test-cert-1"); let (cert_2, _) = test_common::credential::test_certificate("test-cert-2"); let mut cert_1_pem = cert_1.to_pem().unwrap(); let mut cert_2_pem = cert_2.to_pem().unwrap(); cert_1_pem.append(&mut cert_2_pem); let test_cert_chain = vec![cert_1, cert_2]; { let cert_client = renewal.cert_client.lock().await; cert_client.import_cert("empty-cert", &[]).await.unwrap(); cert_client .import_cert("test-cert", &cert_1_pem) .await .unwrap(); } renewal.get_cert("empty-cert").await.unwrap_err(); renewal.get_cert("does-not-exist").await.unwrap_err(); let cert_chain = renewal.get_cert("test-cert").await.unwrap(); assert_eq!(2, cert_chain.len()); assert_eq!( test_cert_chain[0].to_pem().unwrap(), cert_chain[0].to_pem().unwrap() ); assert_eq!( test_cert_chain[1].to_pem().unwrap(), cert_chain[1].to_pem().unwrap() ); } #[tokio::test] async fn get_key() { let mut renewal = new_renewal(true); renewal.get_key("test-key").await.unwrap(); { let mut key_client = renewal.key_client.lock().await; key_client.load_key_pair_ok = false; } renewal.get_key("test-key").await.unwrap_err(); } }
#[cfg(not(test))] use aziot_cert_client_async::Client as CertClient; #[cfg(not(test))] use aziot_key_client_async::Client as KeyClient; #[cfg(not(test))] use aziot_key_openssl_engine as KeyEngine; #[cfg(test)] use test_common::client::CertClient; #[cfg(test)] use test_common::client::KeyClient; #[cfg(test)] use test_common::client::KeyEngine; pub(crate) struct EdgeCaRenewal { rotate_key: bool, temp_cert: String, cert_client: std::sync::Arc<futures_util::lock::Mutex<CertClient>>, key_client: std::sync::Arc<futures_util::lock::Mutex<KeyClient>>, key_connector: http_common::Connector, renewal_tx: tokio::sync::mpsc::UnboundedSender<edgelet_core::WatchdogAction>, } impl EdgeCaRenewal { pub fn new( rotate_key: bool, config: &crate::WorkloadConfig, cert_client: std::sync::Arc<futures_util::lock::Mutex<CertClient>>, key_client: std::sync::Arc<futures_util::lock::Mutex<KeyClient>>, key_connector: http_common::Connector, renewal_tx: tokio::sync::mpsc::UnboundedSender<edgelet_core::WatchdogAction>, ) -> Self { let temp_cert = format!("{}-temp", config.edge_ca_cert); EdgeCaRenewal { rotate_key, temp_cert, cert_client, key_client, key_connector, renewal_tx, } } } #[async_trait::async_trait] impl cert_renewal::CertInterface for EdgeCaRenewal { type NewKey = String; async fn get_cert( &mut self, cert_id: &str, ) -> Result<Vec<openssl::x509::X509>, cert_renewal::Error> { let cert_client = self.cert_client.lock().await; let cert = cert_client .get_cert(cert_id) .await .map_err(|_| cert_renewal::Error::retryable_error("failed to retrieve edge CA cert"))?; let cert_chain = openssl::x509::X509::stack_from_pem(&cert) .map_err(|_| cert_renewal::Error::fatal_error("failed to parse edge CA cert"))?; if cert_chain.is_empty() { Err(cert_renewal::Error::fatal_error("no certs in chain")) } else { Ok(cert_chain) } }
async fn renew_cert( &mut self, old_cert_chain: &[openssl::x509::X509], key_id: &str, ) -> Result<(Vec<openssl::x509::X509>, Self::NewKey), cert_renewal::Error> { let (key_id, key_handle) = { let key_client = self.key_client.lock().await; if self.rotate_key { let key_id = format!("{}-temp", key_id); if let Ok(key_handle) = key_client.load_key_pair(&key_id).await { key_client.delete_key_pair(&key_handle).await.map_err(|_| { cert_renewal::Error::retryable_error("failed to clear temp key") })?; } let key_handle = key_client .create_key_pair_if_not_exists(&key_id, Some("rsa-2048:*")) .await .map_err(|_| { cert_renewal::Error::retryable_error("failed to generate temp key") })?; (key_id, key_handle) } else { let key_handle = key_client.load_key_pair(key_id).await.map_err(|_| { cert_renewal::Error::retryable_error("failed to get identity cert key") })?; (key_id.to_string(), key_handle) } }; let keys = keys(self.key_connector.clone(), &key_handle) .map_err(cert_renewal::Error::retryable_error)?; let extensions = extensions().map_err(|_| { cert_renewal::Error::fatal_error("failed to generate edge CA extensions") })?; let csr = crate::module::cert::new_csr( old_cert_chain[0].subject_name(), keys, Vec::new(), extensions, ) .map_err(|_| cert_renewal::Error::retryable_error("failed to create csr"))?; let new_cert = { let cert_client = self.cert_client.lock().await; let new_cert = cert_client .create_cert(&self.temp_cert, &csr, None) .await .map_err(|_| cert_renewal::Error::retryable_error("failed to create new cert"))?; if let Err(err) = cert_client.delete_cert(&self.temp_cert).await { log::warn!( "Failed to delete temporary certificate created by cert renewal: {}", err ); } new_cert }; let new_cert_chain = openssl::x509::X509::stack_from_pem(&new_cert) .map_err(|_| cert_renewal::Error::retryable_error("failed to parse new cert"))?; if new_cert_chain.is_empty() { Err(cert_renewal::Error::retryable_error("no certs in chain")) } else { Ok((new_cert_chain, key_id)) } } async fn write_credentials( &mut self, old_cert_chain: &[openssl::x509::X509], new_cert_chain: (&str, &[openssl::x509::X509]), key: (&str, Self::NewKey), ) -> Result<(), cert_renewal::Error> { let (cert_id, new_cert_chain) = (new_cert_chain.0, new_cert_chain.1); let (old_key, new_key) = (key.0, key.1); if old_cert_chain.is_empty() || new_cert_chain.is_empty() { return Err(cert_renewal::Error::retryable_error("no certs in chain")); } let mut new_cert_chain_pem = Vec::new(); for cert in new_cert_chain { let mut cert = cert .to_pem() .map_err(|_| cert_renewal::Error::retryable_error("bad cert"))?; new_cert_chain_pem.append(&mut cert); } let mut old_cert_chain_pem = Vec::new(); for cert in old_cert_chain { let mut cert = cert .to_pem() .map_err(|_| cert_renewal::Error::retryable_error("bad cert"))?; old_cert_chain_pem.append(&mut cert); } { let cert_client = self.cert_client.lock().await; cert_client .import_cert(cert_id, &new_cert_chain_pem) .await .map_err(|_| cert_renewal::Error::retryable_error("failed to import new cert"))?; } if old_key != new_key { let res = { let key_client = self.key_client.lock().await; key_client.move_key_pair(&new_key, old_key).await }; if res.is_err() { let cert_client = self.cert_client.lock().await; cert_client .import_cert(cert_id, &old_cert_chain_pem) .await .map_err(|_| { cert_renewal::Error::retryable_error("failed to restore old cert") })?; } } log::info!("Edge CA was renewed"); if let Err(err) = self .renewal_tx .send(edgelet_core::WatchdogAction::EdgeCaRenewal) { log::warn!("Failed to request module restart: {}", err); } Ok(()) } } pub(crate) fn keys( key_connector: http_common::Connector, key_handle: &aziot_key_common::KeyHandle, ) -> Result< ( openssl::pkey::PKey<openssl::pkey::Private>, openssl::pkey::PKey<openssl::pkey::Public>, ), String, > { let key_client = aziot_key_client::Client::new( aziot_key_common_http::ApiVersion::V2021_05_01, key_connector, ); let key_client = std::sync::Arc::new(key_client); let key_handle = std::ffi::CString::new(key_handle.0.clone()).expect("key handle contained null"); let mut engine = KeyEngine::load(key_client).map_err(|_| "failed to load openssl key engine".to_string())?; let private_key = engine .load_private_key(&key_handle) .map_err(|_| "failed to load edge ca private key".to_string())?; let public_key = engine .load_public_key(&key_handle) .map_err(|_| "failed to load edge ca public key".to_string())?; Ok((private_key, public_key)) } pub(crate) fn extensions( ) -> Result<openssl::stack::Stack<openssl::x509::X509Extension>, openssl::error::ErrorStack> { let mut csr_extensions = openssl::stack::Stack::new()?; let mut key_usage = openssl::x509::extension::KeyUsage::new(); key_usage.critical().digital_signature().key_cert_sign(); let mut basic_constraints = openssl::x509::extension::BasicConstraints::new(); basic_constraints.ca().critical().pathlen(0); let key_usage = key_usage.build()?; let basic_constraints = basic_constraints.build()?; csr_extensions.push(key_usage)?; csr_extensions.push(basic_constraints)?; Ok(csr_extensions) } #[cfg(test)] mod tests { use super::EdgeCaRenewal; use super::{CertClient, KeyClient}; use cert_renewal::CertInterface; fn new_renewal(rotate_key: bool) -> EdgeCaRenewal { let settings = edgelet_test_utils::Settings::default(); let device_info = aziot_identity_common::AzureIoTSpec { hub_name: "test-hub.test.net".to_string(), gateway_host: "gateway-host.test.net".to_string(), device_id: aziot_identity_common::DeviceId("test-device".to_string()), module_id: None, gen_id: None, auth: None, }; let config = crate::WorkloadConfig::new(&settings, &device_info); let cert_client = CertClient::default(); let cert_client = std::sync::Arc::new(futures_util::lock::Mutex::new(cert_client)); let key_client = KeyClient::default(); let key_client = std::sync::Arc::new(futures_util::lock::Mutex::new(key_client)); let key_connector = url::Url::parse("unix:///tmp/test.sock").unwrap(); let key_connector = http_common::Connector::new(&key_connector).unwrap(); let (renewal_tx, _) = tokio::sync::mpsc::unbounded_channel::<edgelet_core::WatchdogAction>(); EdgeCaRenewal::new( rotate_key, &config, cert_client, key_client, key_connector, renewal_tx, ) } #[tokio::test] async fn get_cert() { let mut renewal = new_renewal(true); let (cert_1, _) = test_common::credential::test_certificate("test-cert-1"); let (cert_2, _) = test_common::credential::test_certificate("test-cert-2"); let mut cert_1_pem = cert_1.to_pem().unwrap(); let mut cert_2_pem = cert_2.to_pem().unwrap(); cert_1_pem.append(&mut cert_2_pem); let test_cert_chain = vec![cert_1, cert_2]; { let cert_client = renewal.cert_client.lock().await; cert_client.import_cert("empty-cert", &[]).await.unwrap(); cert_client .import_cert("test-cert", &cert_1_pem) .await .unwrap(); } renewal.get_cert("empty-cert").await.unwrap_err(); renewal.get_cert("does-not-exist").await.unwrap_err(); let cert_chain = renewal.get_cert("test-cert").await.unwrap(); assert_eq!(2, cert_chain.len()); assert_eq!( test_cert_chain[0].to_pem().unwrap(), cert_chain[0].to_pem().unwrap() ); assert_eq!( test_cert_chain[1].to_pem().unwrap(), cert_chain[1].to_pem().unwrap() ); } #[tokio::test] async fn get_key() { let mut renewal = new_renewal(true); renewal.get_key("test-key").await.unwrap(); { let mut key_client = renewal.key_client.lock().await; key_client.load_key_pair_ok = false; } renewal.get_key("test-key").await.unwrap_err(); } }
async fn get_key( &mut self, key_id: &str, ) -> Result<openssl::pkey::PKey<openssl::pkey::Private>, cert_renewal::Error> { let key_client = self.key_client.lock().await; let key_handle = key_client .load_key_pair(key_id) .await .map_err(|_| cert_renewal::Error::retryable_error("failed to get identity cert key"))?; let (private_key, _) = keys(self.key_connector.clone(), &key_handle) .map_err(cert_renewal::Error::retryable_error)?; Ok(private_key) }
function_block-full_function
[ { "content": "pub fn prepare_cert_uri_module(hub_name: &str, device_id: &str, module_id: &str) -> String {\n\n format!(\"URI: azureiot://{hub_name}/devices/{device_id}/modules/{module_id}\")\n\n}\n", "file_path": "edgelet/edgelet-utils/src/lib.rs", "rank": 0, "score": 326233.4091490721 }, { "content": "fn parse_boolean_expression(expression: &str) -> String {\n\n let expression = replace_env_var_with_boolean(expression);\n\n match solve_boolean_expression(&expression) {\n\n Ok(x) => x,\n\n Err(e) => {\n\n error!(\"{}\", e);\n\n expression\n\n }\n\n }\n\n}\n\n\n", "file_path": "edge-modules/api-proxy-module/src/monitors/config_parser.rs", "rank": 1, "score": 309738.3848851409 }, { "content": "// Comma in env var name is forbidden because of that\n\n// We use this instead of simpler regex because we cannot match overlapping regular expression with regex.\n\nfn replace_env_var_with_boolean(expression: &str) -> String {\n\n let mut fifo = Vec::new();\n\n let mut flush_fifo = Vec::new();\n\n\n\n for c in expression.chars() {\n\n match c {\n\n '(' | '!' | '&' | '|' => fifo.push(c),\n\n ')' | ',' => {\n\n if flush_fifo.len() == 1 && Some('0') == flush_fifo.pop() {\n\n fifo.push('0');\n\n } else if flush_fifo.len() > 1 {\n\n fifo.push('1');\n\n };\n\n fifo.push(c);\n\n flush_fifo.clear();\n\n }\n\n _ => flush_fifo.push(c),\n\n }\n\n }\n\n\n\n fifo.iter().collect()\n\n}\n\n\n", "file_path": "edge-modules/api-proxy-module/src/monitors/config_parser.rs", "rank": 2, "score": 305226.82643397804 }, { "content": "// The resolution of the regular expression is done by using a stack.\n\n// For example: &(!(0),1,1)\n\n// Stack fills up: Stack = ['&', '(', '!', '(','0']\n\n// When \")\" is encounter, the deepest boolean expression is solved:\n\n// Stack result Stack = ['&', '(', '1']\n\n// Stack fills up: Stack = ['&', '(', '1','0','0']\n\n// When \")\" is encounter, the last boolean expression is solved:\n\n// First all the value are load in temporary stack: tmp_fifo = ['1','0','0']\n\n// Then the operator '&' is extracted and the expression is solved\n\nfn solve_boolean_expression(expression: &str) -> Result<String, Error> {\n\n let mut fifo = Vec::new();\n\n\n\n for c in expression.chars() {\n\n match c {\n\n '(' | ',' | ' ' => (),\n\n '!' | '|' | '&' => fifo.push(Expr::Operator(c)),\n\n '1' => fifo.push(Expr::Value(true)),\n\n '0' => fifo.push(Expr::Value(false)),\n\n ')' => {\n\n let mut tmp_fifo = vec![];\n\n while let Some(Expr::Value(v)) = fifo.last() {\n\n tmp_fifo.push(*v);\n\n fifo.pop();\n\n }\n\n\n\n if let Some(Expr::Operator(val)) = fifo.pop() {\n\n let result = match val {\n\n '!' => {\n\n if tmp_fifo.len() > 1 {\n", "file_path": "edge-modules/api-proxy-module/src/monitors/config_parser.rs", "rank": 3, "score": 288501.19757594133 }, { "content": "fn get_expiration(cert: &str) -> Result<String, http_common::server::Error> {\n\n let cert = openssl::x509::X509::from_pem(cert.as_bytes())\n\n .map_err(|_| edgelet_http::error::server_error(\"failed to parse cert\"))?;\n\n\n\n // openssl::asn1::Asn1TimeRef does not expose any way to convert the ASN1_TIME to a Rust-friendly type\n\n //\n\n // Its Display impl uses ASN1_TIME_print, so we convert it into a String and parse it back\n\n // into a chrono::DateTime<chrono::Utc>\n\n let expiration = cert.not_after().to_string();\n\n let expiration = chrono::NaiveDateTime::parse_from_str(&expiration, \"%b %e %H:%M:%S %Y GMT\")\n\n .expect(\"cert not_after should parse\");\n\n let expiration = chrono::DateTime::<chrono::Utc>::from_utc(expiration, chrono::Utc);\n\n\n\n Ok(expiration.to_rfc3339())\n\n}\n\n\n", "file_path": "edgelet/edgelet-http-workload/src/module/cert/mod.rs", "rank": 4, "score": 267751.5714366719 }, { "content": "#[allow(clippy::module_name_repetitions, clippy::needless_pass_by_value)]\n\npub fn server_error(error: impl ToString) -> Error {\n\n Error {\n\n status_code: http::StatusCode::INTERNAL_SERVER_ERROR,\n\n message: error.to_string().into(),\n\n }\n\n}\n", "file_path": "edgelet/edgelet-http/src/error.rs", "rank": 5, "score": 267018.0956548427 }, { "content": "//Check for expiry of certificates. If certificates are expired: rotate.\n\npub fn start(\n\n notify_server_cert_reload_api_proxy: Arc<Notify>,\n\n notify_trust_bundle_reload_api_proxy: Arc<Notify>,\n\n) -> Result<(JoinHandle<Result<()>>, ShutdownHandle), Error> {\n\n info!(\"Initializing certs monitoring loop\");\n\n\n\n let shutdown_signal = Arc::new(Notify::new());\n\n let shutdown_handle = ShutdownHandle(shutdown_signal.clone());\n\n\n\n let module_id =\n\n env::var(\"IOTEDGE_MODULEID\").context(format!(\"Missing env var {}\", \"IOTEDGE_MODULEID\"))?;\n\n let generation_id = env::var(\"IOTEDGE_MODULEGENERATIONID\")\n\n .context(format!(\"Missing env var {}\", \"IOTEDGE_MODULEGENERATIONID\"))?;\n\n let gateway_hostname = env::var(\"IOTEDGE_GATEWAYHOSTNAME\")\n\n .context(format!(\"Missing env var {}\", \"IOTEDGE_GATEWAYHOSTNAME\"))?;\n\n let workload_url = env::var(\"IOTEDGE_WORKLOADURI\")\n\n .context(format!(\"Missing env var {}\", \"IOTEDGE_WORKLOADURI\"))?;\n\n let mut cert_monitor = CertificateMonitor::new(\n\n module_id,\n\n generation_id,\n", "file_path": "edge-modules/api-proxy-module/src/monitors/certs_monitor.rs", "rank": 6, "score": 265876.3434969189 }, { "content": "pub fn start(\n\n mut client: Client,\n\n notify_received_config: Arc<Notify>,\n\n) -> Result<(JoinHandle<Result<()>>, ShutdownHandle), Error> {\n\n let shutdown_signal = Arc::new(Notify::new());\n\n let shutdown_handle = ShutdownHandle(shutdown_signal.clone());\n\n\n\n info!(\"Initializing config monitoring loop\");\n\n let mut config_parser = ConfigParser::new()?;\n\n parse_config(&mut config_parser)?;\n\n\n\n info!(\"Starting config monitoring loop\");\n\n //Config is ready, send notification.\n\n notify_received_config.notify_one();\n\n\n\n let monitor_loop: JoinHandle<Result<()>> = tokio::spawn(async move {\n\n loop {\n\n let wait_shutdown = shutdown_signal.notified();\n\n pin_mut!(wait_shutdown);\n\n\n", "file_path": "edge-modules/api-proxy-module/src/monitors/config_monitor.rs", "rank": 7, "score": 265533.4693358352 }, { "content": "fn parse_config(parse_config: &mut ConfigParser) -> Result<()> {\n\n //Read \"raw configuration\". Contains environment variables and sections.\n\n //Extract IO calls from core function for mocking\n\n let str = file::get_string_from_file(PROXY_CONFIG_PATH_RAW)?;\n\n\n\n let str = parse_config.get_parsed_config(&str)?;\n\n //Extract IO calls from core function for mocking\n\n file::write_binary_to_file(str.as_bytes(), PROXY_CONFIG_PATH_PARSED)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "edge-modules/api-proxy-module/src/monitors/config_monitor.rs", "rank": 8, "score": 253956.9130992008 }, { "content": "pub fn parse_query(query: &str) -> HashMap<&str, &str> {\n\n query\n\n .split('&')\n\n .filter_map(|seg| {\n\n if seg.is_empty() {\n\n None\n\n } else {\n\n let mut tokens = seg.splitn(2, '=');\n\n tokens.next().map(|key| {\n\n let val = tokens.next().unwrap_or(\"\");\n\n (key, val)\n\n })\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "edgelet/edgelet-utils/src/lib.rs", "rank": 9, "score": 243560.38085463847 }, { "content": "pub fn version_with_source_version() -> String {\n\n VERSION_WITH_SOURCE_VERSION.to_string()\n\n}\n\n\n", "file_path": "edgelet/edgelet-core/src/lib.rs", "rank": 10, "score": 241823.52817673038 }, { "content": "pub fn version() -> &'static str {\n\n &VERSION\n\n}\n\n\n", "file_path": "edgelet/edgelet-core/src/lib.rs", "rank": 11, "score": 236980.17523996445 }, { "content": "fn to_shell_var(val: impl Into<String>) -> String {\n\n let dollar = String::from(\"$\");\n\n let val_str = val.into();\n\n dollar + &val_str\n\n}\n\n\n", "file_path": "edgelet/iotedge/src/check/checks/container_connect_upstream.rs", "rank": 12, "score": 234426.12276659397 }, { "content": "fn to_serde_enum(val: impl Into<String>) -> String {\n\n format!(\"{:?}\", val.into())\n\n}\n\n\n", "file_path": "edgelet/iotedge/src/check/checks/container_connect_upstream.rs", "rank": 13, "score": 234426.12276659397 }, { "content": "fn save_raw_config(twin: &HashMap<String, Value>) -> Result<()> {\n\n let config = twin\n\n .get(PROXY_CONFIG_TAG)\n\n .ok_or_else(|| anyhow!(\"Key {} not found in twin\", PROXY_CONFIG_TAG))?;\n\n\n\n let config = config\n\n .as_str()\n\n .context(\"Cannot extract json as base64 string\")?;\n\n\n\n let bytes =\n\n base64::decode(config).map_err(|err| anyhow!(\"Cannot decode base64. Caused by {}\", err))?;\n\n\n\n file::write_binary_to_file(&bytes, PROXY_CONFIG_PATH_RAW).map_err(|err| {\n\n anyhow!(\n\n \"Cannot write config file to path: {}. Caused by {}\",\n\n PROXY_CONFIG_PATH_RAW,\n\n err\n\n )\n\n })?;\n\n Ok(())\n\n}\n\n\n", "file_path": "edge-modules/api-proxy-module/src/monitors/config_monitor.rs", "rank": 14, "score": 234279.46453541936 }, { "content": "pub fn execute(\n\n connection_string: String,\n\n out_config_file: &Path,\n\n force: bool,\n\n) -> Result<(), std::borrow::Cow<'static, str>> {\n\n if !force && out_config_file.exists() {\n\n return Err(format!(\n\n \"\\\n\nFile {} already exists. Azure IoT Edge has already been configured.\n\n\n\nTo have the configuration take effect, run:\n\n\n\n sudo iotedge config apply\n\n\n\nTo reconfigure IoT Edge, run:\n\n\n\n sudo iotedge config mp --force\n\n\",\n\n out_config_file.display()\n\n )\n", "file_path": "edgelet/iotedge/src/config/mp.rs", "rank": 15, "score": 233752.37363760156 }, { "content": "pub fn execute(\n\n old_config_file: &Path,\n\n new_config_file: &Path,\n\n force: bool,\n\n) -> Result<(), std::borrow::Cow<'static, str>> {\n\n // In production, the command needs to run as root. But it's convenient for developers to run as the current user.\n\n //\n\n // So if this is a debug build, use the current user. Otherwise, tell the user to re-run as root.\n\n let root_user = {\n\n let current_uid = nix::unistd::Uid::current();\n\n if current_uid.is_root() {\n\n let root_user = nix::unistd::User::from_uid(current_uid)\n\n .map_err(|err| format!(\"could not query root user information: {err}\"))?\n\n .ok_or(\"could not query root user information\")?;\n\n\n\n root_user\n\n } else if cfg!(debug_assertions) {\n\n let current_user = nix::unistd::User::from_uid(nix::unistd::Uid::current())\n\n .map_err(|err| format!(\"could not query current user information: {err}\"))?\n\n .ok_or(\"could not query current user information\")?;\n", "file_path": "edgelet/iotedge/src/config/import/mod.rs", "rank": 16, "score": 229883.40812626874 }, { "content": "pub fn get_sdk_client() -> Result<Client, Error> {\n\n let client = match Client::new_for_edge_module(\n\n Tcp,\n\n None,\n\n TWIN_CONFIG_MAX_BACK_OFF,\n\n TWIN_CONFIG_KEEP_ALIVE,\n\n ) {\n\n Ok(client) => client,\n\n Err(err) => return Err(anyhow::anyhow!(\"Could not create client: {}\", err)),\n\n };\n\n\n\n Ok(client)\n\n}\n\n\n", "file_path": "edge-modules/api-proxy-module/src/monitors/config_monitor.rs", "rank": 17, "score": 228815.6811465619 }, { "content": "pub fn get_string_from_file<P: AsRef<Path>>(path: P) -> Result<String, anyhow::Error> {\n\n let str = fs::read_to_string(path).context(\"Unable to read file\")?;\n\n Ok(str)\n\n}\n", "file_path": "edge-modules/api-proxy-module/src/utils/file.rs", "rank": 18, "score": 227033.1530420794 }, { "content": "pub fn init() {\n\n env_logger::Builder::new()\n\n .format(|fmt, record| {\n\n let level = match record.level() {\n\n Level::Trace => \"TRCE\",\n\n Level::Debug => \"DBUG\",\n\n Level::Info => \"INFO\",\n\n Level::Warn => \"WARN\",\n\n Level::Error => \"ERR!\",\n\n };\n\n let timestamp = fmt.timestamp();\n\n\n\n if record.level() >= Level::Debug {\n\n writeln!(\n\n fmt,\n\n \"<{}>{} [{}] - [{}] {}\",\n\n syslog_level(record.level()),\n\n timestamp,\n\n level,\n\n record.target(),\n", "file_path": "edge-modules/edgehub-proxy/src/logging.rs", "rank": 19, "score": 222410.2565520082 }, { "content": "pub fn is_virtualized_env() -> anyhow::Result<Option<bool>> {\n\n if cfg!(target_os = \"linux\") {\n\n let status = Command::new(\"systemd-detect-virt\")\n\n .status()\n\n .context(Error::GetVirtualizationStatus)?;\n\n\n\n Ok(Some(status.success()))\n\n } else {\n\n Ok(None)\n\n }\n\n}\n", "file_path": "edgelet/edgelet-core/src/virtualization.rs", "rank": 20, "score": 214422.05363405892 }, { "content": "#[inline]\n\npub fn ensure_not_empty(value: &str) -> Result<(), Error> {\n\n if value.trim().is_empty() {\n\n return Err(Error::ArgumentEmpty);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "edgelet/edgelet-utils/src/lib.rs", "rank": 21, "score": 214391.42863056852 }, { "content": "fn merge_env(cur_env: Option<&[String]>, new_env: &BTreeMap<String, String>) -> Vec<String> {\n\n // build a new merged map containing string slices for keys and values\n\n // pointing into String instances in new_env\n\n let mut merged_env = BTreeMap::new();\n\n merged_env.extend(new_env.iter().map(|(k, v)| (k.as_str(), v.as_str())));\n\n\n\n if let Some(env) = cur_env {\n\n // extend merged_env with variables in cur_env (again, these are\n\n // only string slices pointing into strings inside cur_env)\n\n merged_env.extend(env.iter().filter_map(|s| {\n\n let mut tokens = s.splitn(2, '=');\n\n tokens\n\n .next()\n\n .map(|key| (key, tokens.next().unwrap_or_default()))\n\n }));\n\n }\n\n\n\n // finally build a new Vec<String>; we alloc new strings here\n\n merged_env\n\n .iter()\n", "file_path": "edgelet/edgelet-docker/src/runtime.rs", "rank": 22, "score": 213194.84454197634 }, { "content": "pub fn nginx_controller_start(\n\n notify_config_reload_api_proxy: Arc<Notify>,\n\n notify_server_cert_reload_api_proxy: Arc<Notify>,\n\n notify_trust_bundle_reload_api_proxy: Arc<Notify>,\n\n) -> Result<(JoinHandle<Result<()>>, ShutdownHandle), Error> {\n\n let program_path = \"/usr/sbin/nginx\";\n\n let proxy_name = \"nginx\";\n\n let stop_proxy_args = vec![\"-s\".to_string(), \"stop\".to_string()];\n\n let reload_proxy_args = vec![\"-s\".to_string(), \"reload\".to_string()];\n\n let start_proxy_args = vec![\n\n \"-c\".to_string(),\n\n \"/app/nginx_config.conf\".to_string(),\n\n \"-g\".to_string(),\n\n \"daemon off;\".to_string(),\n\n ];\n\n\n\n let shutdown_signal = Arc::new(Notify::new());\n\n let shutdown_handle = ShutdownHandle(shutdown_signal.clone());\n\n\n\n let monitor_loop: JoinHandle<Result<()>> = tokio::spawn(async move {\n", "file_path": "edge-modules/api-proxy-module/src/main.rs", "rank": 23, "score": 212229.80122842244 }, { "content": "fn get_local_service_proxy_setting(svc_name: &str) -> Option<String> {\n\n const PROXY_KEY: &str = \"https_proxy\";\n\n let output = Command::new(\"sh\")\n\n .arg(\"-c\")\n\n .arg(\"systemctl show --property=Environment \".to_owned() + svc_name)\n\n .output()\n\n .expect(\"failed to execute process\");\n\n let stdout = String::from_utf8_lossy(&output.stdout);\n\n\n\n let mut svc_proxy = None;\n\n let vars = stdout.trim_start_matches(\"Environment=\");\n\n for var in vars.split(' ') {\n\n let mut parts = var.split('=');\n\n if let Some(PROXY_KEY) = parts.next() {\n\n svc_proxy = parts.next().map(String::from);\n\n\n\n let Some(mut s) = svc_proxy else {\n\n return svc_proxy;\n\n };\n\n\n", "file_path": "edgelet/iotedge/src/check/mod.rs", "rank": 24, "score": 210967.26028291738 }, { "content": "pub fn workload(url: &str) -> Result<WorkloadClient, Error> {\n\n let url = Url::parse(url).map_err(|e| Error::ParseUrl(url.to_string(), e))?;\n\n\n\n let (connector, scheme) = match url.scheme() {\n\n #[cfg(unix)]\n\n \"unix\" => (\n\n Connector::Unix(UnixConnector),\n\n Scheme::Unix(url.path().to_string()),\n\n ),\n\n \"http\" => (\n\n Connector::Http(HttpConnector::new()),\n\n Scheme::Http(url.to_string()),\n\n ),\n\n _ => return Err(Error::UnrecognizedUrlScheme(url.to_string())),\n\n };\n\n\n\n let client = Client::builder().build(connector);\n\n Ok(WorkloadClient::new(client, scheme))\n\n}\n\n\n", "file_path": "mqtt/edgelet-client/src/lib.rs", "rank": 25, "score": 206596.62684733822 }, { "content": "fn set_quickstart_ca(\n\n keyd_config: &mut aziot_keyd_config::Config,\n\n certd_config: &mut aziot_certd_config::Config,\n\n aziotcs_uid: nix::unistd::Uid,\n\n expiry_days: Option<u32>,\n\n subject: Option<aziot_certd_config::CertSubject>,\n\n) {\n\n let issuance = aziot_certd_config::CertIssuanceOptions {\n\n method: aziot_certd_config::CertIssuanceMethod::SelfSigned,\n\n expiry_days,\n\n subject,\n\n };\n\n\n\n certd_config.cert_issuance.certs.insert(\n\n edgelet_settings::AZIOT_EDGED_CA_ALIAS.to_owned(),\n\n issuance.clone(),\n\n );\n\n\n\n let mut certd_keys = vec![edgelet_settings::AZIOT_EDGED_CA_ALIAS.to_owned()];\n\n\n", "file_path": "edgelet/iotedge/src/config/apply.rs", "rank": 26, "score": 205061.83858887412 }, { "content": "pub fn default_agent() -> edgelet_settings::ModuleSpec<edgelet_settings::DockerConfig> {\n\n edgelet_settings::ModuleSpec::new(\n\n /* image */ \"edgeAgent\".to_owned(),\n\n /* type */ \"docker\".to_owned(),\n\n /* config */\n\n edgelet_settings::DockerConfig::new(\n\n /* image */ \"mcr.microsoft.com/azureiotedge-agent:1.4\".to_owned(),\n\n /* create_options */ docker::models::ContainerCreateBody::new(),\n\n /* digest */ None,\n\n /* auth */ None,\n\n /* allow_elevated_docker_permissions */ true,\n\n )\n\n .expect(\"image is never empty\"),\n\n /* env */ Default::default(),\n\n /* image pull policy */ Default::default(),\n\n )\n\n .expect(\"name and type are never empty\")\n\n}\n\n\n\n#[derive(Debug, serde::Deserialize, serde::Serialize)]\n", "file_path": "edgelet/iotedge/src/config/super_config.rs", "rank": 27, "score": 204196.3692933126 }, { "content": "fn encode_utf8_str<B>(item: &str, dst: &mut B) -> Result<(), EncodeError>\n\nwhere\n\n B: ByteBuf,\n\n{\n\n let len = item.len();\n\n dst.put_u16_bytes(\n\n len.try_into()\n\n .map_err(|_| EncodeError::StringTooLarge(len))?,\n\n );\n\n\n\n dst.put_slice_bytes(item.as_bytes());\n\n\n\n Ok(())\n\n}\n\n\n\n/// A tokio decoder for MQTT-format \"remaining length\" numbers.\n\n///\n\n/// These numbers are encoded with a variable-length scheme that uses the MSB of each byte as a continuation bit.\n\n///\n\n/// Ref: 2.2.3 Remaining Length\n", "file_path": "mqtt/mqtt3/src/proto/mod.rs", "rank": 28, "score": 203882.74941487287 }, { "content": "pub fn parse_since(since: &str) -> anyhow::Result<i32> {\n\n if let Ok(datetime) = DateTime::parse_from_rfc3339(since) {\n\n let temp: Result<i32, _> = datetime.timestamp().try_into();\n\n Ok(temp.context(Error::ParseSince)?)\n\n } else if let Ok(epoch) = since.parse() {\n\n Ok(epoch)\n\n } else if let Ok(duration) = parse_duration(since) {\n\n let nano: Result<i64, _> = duration.as_nanos().try_into();\n\n let nano = nano.context(Error::ParseSince)?;\n\n\n\n let temp: Result<i32, _> = (Local::now() - Duration::nanoseconds(nano))\n\n .timestamp()\n\n .try_into();\n\n Ok(temp.context(Error::ParseSince)?)\n\n } else {\n\n Err(Error::ParseSince.into())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "edgelet/edgelet-core/src/parse_since.rs", "rank": 29, "score": 203581.25044925464 }, { "content": "pub fn log_error(error: &Error) {\n\n log::error!(\"{}\", error);\n\n}\n", "file_path": "edge-modules/edgehub-proxy/src/logging.rs", "rank": 30, "score": 199442.20946486114 }, { "content": " /// <summary>\n\n /// Provides a wrapper for a non-generic <see cref=\"T:System.Threading.Tasks.Task\" /> and calls into the pipeline\n\n /// to retry only the generic version of the <see cref=\"T:System.Threading.Tasks.Task\" />.\n\n /// </summary>\n\n class AsyncExecution : AsyncExecution<bool>\n\n {\n\n static Task<bool> cachedBoolTask;\n\n\n\n public AsyncExecution(Func<Task> taskAction, ShouldRetry shouldRetry, Func<Exception, bool> isTransient, Action<int, Exception, TimeSpan> onRetrying, bool fastFirstRetry, CancellationToken cancellationToken)\n\n : base(() => StartAsGenericTask(taskAction), shouldRetry, isTransient, onRetrying, fastFirstRetry, cancellationToken)\n\n {\n\n }\n\n\n\n /// <summary>\n\n /// Wraps the non-generic <see cref=\"T:System.Threading.Tasks.Task\" /> into a generic <see cref=\"T:System.Threading.Tasks.Task\" />.\n\n /// </summary>\n\n /// <param name=\"taskAction\">The task to wrap.</param>\n\n /// <returns>A <see cref=\"T:System.Threading.Tasks.Task\" /> that wraps the non-generic <see cref=\"T:System.Threading.Tasks.Task\" />.</returns>\n\n static Task<bool> StartAsGenericTask(Func<Task> taskAction)\n\n {\n\n Task task = taskAction();\n\n if (task == null)\n\n {\n\n throw new ArgumentException(\n", "file_path": "edge-util/src/Microsoft.Azure.Devices.Edge.Util/transientFaultHandling/AsyncExecution.cs", "rank": 31, "score": 198100.77494917237 }, { "content": "fn new_keys() -> Result<\n\n (\n\n openssl::pkey::PKey<openssl::pkey::Private>,\n\n openssl::pkey::PKey<openssl::pkey::Public>,\n\n ),\n\n openssl::error::ErrorStack,\n\n> {\n\n let rsa = openssl::rsa::Rsa::generate(2048)?;\n\n let private_key = openssl::pkey::PKey::from_rsa(rsa)?;\n\n\n\n let public_key = private_key.public_key_to_pem()?;\n\n let public_key = openssl::pkey::PKey::public_key_from_pem(&public_key)?;\n\n\n\n Ok((private_key, public_key))\n\n}\n\n\n\npub(crate) fn new_csr(\n\n subject: &openssl::x509::X509NameRef,\n\n keys: (\n\n openssl::pkey::PKey<openssl::pkey::Private>,\n", "file_path": "edgelet/edgelet-http-workload/src/module/cert/mod.rs", "rank": 32, "score": 190059.30411881392 }, { "content": " /// <summary>\n\n /// Provides a wrapper for a non-generic <see cref=\"T:System.Threading.Tasks.Task\" /> and calls into the pipeline\n\n /// to retry only the generic version of the <see cref=\"T:System.Threading.Tasks.Task\" />.\n\n /// </summary>\n\n class AsyncExecution : AsyncExecution<bool>\n\n {\n\n static Task<bool> cachedBoolTask;\n\n\n\n public AsyncExecution(Func<Task> taskAction, ShouldRetry shouldRetry, Func<Exception, bool> isTransient, Action<int, Exception, TimeSpan> onRetrying, bool fastFirstRetry, CancellationToken cancellationToken)\n\n : base(() => StartAsGenericTask(taskAction), shouldRetry, isTransient, onRetrying, fastFirstRetry, cancellationToken)\n\n {\n\n }\n\n\n\n /// <summary>\n\n /// Wraps the non-generic <see cref=\"T:System.Threading.Tasks.Task\" /> into a generic <see cref=\"T:System.Threading.Tasks.Task\" />.\n\n /// </summary>\n\n /// <param name=\"taskAction\">The task to wrap.</param>\n\n /// <returns>A <see cref=\"T:System.Threading.Tasks.Task\" /> that wraps the non-generic <see cref=\"T:System.Threading.Tasks.Task\" />.</returns>\n\n static Task<bool> StartAsGenericTask(Func<Task> taskAction)\n\n {\n\n Task task = taskAction();\n\n if (task == null)\n\n {\n\n throw new ArgumentException(\n", "file_path": "edge-modules/functions/binding/src/Microsoft.Azure.WebJobs.Extensions.EdgeHub/transientFaultHandling/AsyncExecution.cs", "rank": 33, "score": 189926.94411044038 }, { "content": "struct CertificateMonitor {\n\n module_id: String,\n\n generation_id: String,\n\n hostname: String,\n\n bundle_of_trust_hash: String,\n\n work_load_api_client: edgelet_client::WorkloadClient,\n\n server_cert_expiration_date: Option<DateTime<Utc>>,\n\n validity_days: Duration,\n\n}\n\n\n\nimpl CertificateMonitor {\n\n pub fn new(\n\n module_id: String,\n\n generation_id: String,\n\n hostname: String,\n\n workload_url: &str,\n\n validity_days: Duration,\n\n ) -> Result<Self, Error> {\n\n //Create expiry date in the past so cert has to be rotated now.\n\n let server_cert_expiration_date = None;\n", "file_path": "edge-modules/api-proxy-module/src/monitors/certs_monitor.rs", "rank": 34, "score": 188059.4508869404 }, { "content": "fn unwrap_certificate_response(\n\n resp: &CertificateResponse,\n\n) -> Result<((String, String), DateTime<Utc>), anyhow::Error> {\n\n let server_crt = resp.certificate().to_string();\n\n let private_key_raw = resp.private_key();\n\n let private_key = match private_key_raw.bytes() {\n\n Some(val) => val.to_string(),\n\n None => return Err(anyhow::anyhow!(\"Private key field is empty\")),\n\n };\n\n\n\n let datetime = DateTime::parse_from_rfc3339(resp.expiration())\n\n .context(\"Error parsing certificate expiration date\")?;\n\n // convert the string into DateTime<Utc> or other timezone\n\n let expiration_date = datetime.with_timezone(&Utc);\n\n\n\n Ok(((server_crt, private_key), expiration_date))\n\n}\n\n\n\n#[cfg(test)]\n\n#[allow(clippy::semicolon_if_nothing_returned)]\n", "file_path": "edge-modules/api-proxy-module/src/monitors/certs_monitor.rs", "rank": 35, "score": 184269.5677250881 }, { "content": "fn device_digest(device: &aziot_identity_common::AzureIoTSpec) -> String {\n\n let json = serde_json::json!({\n\n \"device_id\": device.device_id.0,\n\n \"gateway_host_name\": device.gateway_host,\n\n \"hub_name\": device.hub_name,\n\n })\n\n .to_string();\n\n\n\n let digest = sha2::Sha256::digest(json.as_bytes());\n\n\n\n let engine = base64::engine::general_purpose::STANDARD;\n\n base64::Engine::encode(&engine, digest)\n\n}\n", "file_path": "edgelet/aziot-edged/src/provision.rs", "rank": 36, "score": 180571.75921169162 }, { "content": "fn client_new(\n\n iothub_hostname: String,\n\n\n\n device_id: &str,\n\n module_id: Option<&str>,\n\n\n\n authentication: crate::Authentication,\n\n transport: crate::Transport,\n\n\n\n will: Option<bytes::Bytes>,\n\n\n\n max_back_off: std::time::Duration,\n\n keep_alive: std::time::Duration,\n\n) -> Result<(mqtt3::Client<crate::IoSource>, usize), crate::CreateClientError> {\n\n let client_id = if let Some(module_id) = &module_id {\n\n format!(\"{}/{}\", device_id, module_id)\n\n } else {\n\n device_id.to_string()\n\n };\n\n\n", "file_path": "edge-modules/api-proxy-module/rust-sdk/azure-iot-mqtt/src/lib.rs", "rank": 37, "score": 178433.46487319595 }, { "content": "#[cfg(unix)]\n\nfn parse_os_release_line(line: &str) -> Option<(&str, &str)> {\n\n let line = line.trim();\n\n\n\n let mut parts = line.split('=');\n\n\n\n let key = parts\n\n .next()\n\n .expect(\"split line will have at least one part\");\n\n\n\n let value = parts.next()?;\n\n\n\n // The value is essentially a shell string, so it can be quoted in single or double quotes, and can have escaped sequences using backslash.\n\n // For simplicitly, just trim the quotes instead of implementing a full shell string grammar.\n\n let value = if (value.starts_with('\\'') && value.ends_with('\\''))\n\n || (value.starts_with('\"') && value.ends_with('\"'))\n\n {\n\n &value[1..(value.len() - 1)]\n\n } else {\n\n value\n\n };\n\n\n\n Some((key, value))\n\n}\n", "file_path": "edgelet/iotedge/src/check/additional_info.rs", "rank": 38, "score": 177461.310921776 }, { "content": "pub fn get_token_client() -> Result<TokenClient, Error> {\n\n let device_id =\n\n env::var(\"IOTEDGE_DEVICEID\").context(format!(\"Missing env var {}\", \"IOTEDGE_DEVICEID\"))?;\n\n let module_id =\n\n env::var(\"IOTEDGE_MODULEID\").context(format!(\"Missing env var {}\", \"IOTEDGE_MODULEID\"))?;\n\n let generation_id = env::var(\"IOTEDGE_MODULEGENERATIONID\")\n\n .context(format!(\"Missing env var {}\", \"IOTEDGE_MODULEGENERATIONID\"))?;\n\n let iothub_hostname = env::var(\"IOTEDGE_IOTHUBHOSTNAME\")\n\n .context(format!(\"Missing env var {}\", \"IOTEDGE_IOTHUBHOSTNAME\"))?;\n\n let workload_url = env::var(\"IOTEDGE_WORKLOADURI\")\n\n .context(format!(\"Missing env var {}\", \"IOTEDGE_WORKLOADURI\"))?;\n\n\n\n let work_load_api_client =\n\n edgelet_client::workload(&workload_url).context(\"Could not get workload client\")?;\n\n\n\n Ok(TokenClient::new(\n\n device_id,\n\n module_id,\n\n generation_id,\n\n iothub_hostname,\n", "file_path": "edge-modules/api-proxy-module/src/token_service/token_server.rs", "rank": 39, "score": 175478.57738087722 }, { "content": "/// Search a query string for the provided key.\n\npub fn find_query(\n\n key: &str,\n\n query: &[(std::borrow::Cow<'_, str>, std::borrow::Cow<'_, str>)],\n\n) -> Option<String> {\n\n query.iter().find_map(|q| {\n\n if q.0 == key {\n\n let value = percent_encoding::percent_decode_str(&q.1)\n\n .decode_utf8()\n\n .ok()?\n\n .to_string();\n\n\n\n Some(value)\n\n } else {\n\n None\n\n }\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "edgelet/edgelet-http/src/lib.rs", "rank": 40, "score": 173053.3369409478 }, { "content": "pub fn runtime_state(\n\n id: Option<&str>,\n\n response_state: Option<&InlineResponse200State>,\n\n) -> ModuleRuntimeState {\n\n response_state.map_or_else(ModuleRuntimeState::default, |state| {\n\n let status = state\n\n .status()\n\n .and_then(|status| match status {\n\n \"created\" | \"paused\" | \"restarting\" => Some(ModuleStatus::Stopped),\n\n \"removing\" | \"exited\" => status_from_exit_code(state.exit_code()),\n\n \"dead\" => Some(ModuleStatus::Dead),\n\n \"running\" => Some(ModuleStatus::Running),\n\n _ => None,\n\n })\n\n .unwrap_or_default();\n\n ModuleRuntimeState::default()\n\n .with_status(status)\n\n .with_exit_code(state.exit_code())\n\n .with_started_at(\n\n state\n", "file_path": "edgelet/edgelet-docker/src/module.rs", "rank": 41, "score": 173046.95836947727 }, { "content": "fn default_enabled() -> bool {\n\n true\n\n}\n\n\n", "file_path": "edgelet/edgelet-settings/src/base/image.rs", "rank": 42, "score": 170761.72683081683 }, { "content": "/// Identical to https://github.com/mehcode/config-rs/blob/0.8.0/src/file/format/yaml.rs#L32-L68\n\n/// except that it does not lower-case hash keys.\n\n///\n\n/// Unfortunately the `ValueKind` enum used by the `Value` constructor is not exported from the crate.\n\n/// It does however impl `From` for the various corresponding standard types, so this code uses those.\n\n/// The only difference is the fallback `_` case at the end.\n\nfn from_yaml_value(uri: Option<&String>, value: Yaml) -> Result<Value, ConfigError> {\n\n match value {\n\n Yaml::String(value) => Ok(Value::new(uri, value)),\n\n Yaml::Real(value) => {\n\n // TODO: Figure out in what cases this can fail?\n\n Ok(Value::new(\n\n uri,\n\n value\n\n .parse::<f64>()\n\n .map_err(|err| ConfigError::Foreign(Box::new(err)))?,\n\n ))\n\n }\n\n Yaml::Integer(value) => Ok(Value::new(uri, value)),\n\n Yaml::Boolean(value) => Ok(Value::new(uri, value)),\n\n Yaml::Hash(table) => {\n\n let mut m = HashMap::new();\n\n for (key, value) in table {\n\n if let Yaml::String(key) = key {\n\n m.insert(key, from_yaml_value(uri, value)?);\n\n }\n", "file_path": "edgelet/edgelet-utils/src/yaml_file_source.rs", "rank": 43, "score": 169993.45787740234 }, { "content": "fn default_managed_by() -> String {\n\n \"iotedge\".to_string()\n\n}\n", "file_path": "edgelet/edgelet-http-mgmt/src/identity/mod.rs", "rank": 44, "score": 168285.72611134147 }, { "content": "fn key_to_pem(key: &openssl::pkey::PKey<openssl::pkey::Private>) -> String {\n\n // The key parameter is always generated by this library. It should be valid.\n\n let key_pem = key.private_key_to_pem_pkcs8().expect(\"key is invalid\");\n\n\n\n let key_pem = std::str::from_utf8(&key_pem)\n\n .expect(\"key is invalid\")\n\n .to_string();\n\n\n\n key_pem\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n fn test_api() -> super::CertApi {\n\n let key_client = super::KeyClient::default();\n\n let key_client = std::sync::Arc::new(tokio::sync::Mutex::new(key_client));\n\n\n\n let cert_client = super::CertClient::default();\n\n let cert_client = std::sync::Arc::new(tokio::sync::Mutex::new(cert_client));\n\n\n", "file_path": "edgelet/edgelet-http-workload/src/module/cert/mod.rs", "rank": 45, "score": 167772.57924552797 }, { "content": "fn get_proxy_uri(arg: Option<String>) -> Option<String> {\n\n // If proxy address was passed in as command line argument, we are good\n\n if arg.is_some() {\n\n return arg;\n\n }\n\n // Proxy_address wasn't passed in on the command line. Pull it from the aziot-edged settings\n\n // for Edge Agent's environment variables.\n\n if let Ok(settings) = Settings::new() {\n\n if let Some(agent_proxy_uri) = settings.base.agent().env().get(\"https_proxy\") {\n\n return Some(agent_proxy_uri.clone());\n\n }\n\n }\n\n // Otherwise, pull it from the environment\n\n std::env::var(\"HTTPS_PROXY\")\n\n .ok()\n\n .or_else(|| std::env::var(\"https_proxy\").ok())\n\n}\n\n\n", "file_path": "edgelet/iotedge/src/check/mod.rs", "rank": 46, "score": 166655.9918523394 }, { "content": "pub fn start() -> Result<(JoinHandle<Result<()>>, ShutdownHandle), Error> {\n\n let shutdown_signal = Arc::new(Notify::new());\n\n let shutdown_handle = ShutdownHandle(shutdown_signal.clone());\n\n\n\n let token_server: JoinHandle<Result<()>> = tokio::spawn(async move {\n\n let token_client = get_token_client()?;\n\n let token_client = Arc::new(token_client);\n\n\n\n loop {\n\n let wait_shutdown = shutdown_signal.notified();\n\n let local_token_client = token_client.clone();\n\n\n\n let make_svc = make_service_fn(move |_conn| {\n\n let token_client_clone = local_token_client.clone();\n\n async move {\n\n Ok::<_, Error>(service_fn(move |req| {\n\n server_callback(req, token_client_clone.clone())\n\n }))\n\n }\n\n });\n", "file_path": "edge-modules/api-proxy-module/src/token_service/token_server.rs", "rank": 47, "score": 166301.77289132858 }, { "content": "struct CertApi {\n\n key_client: std::sync::Arc<tokio::sync::Mutex<KeyClient>>,\n\n cert_client: std::sync::Arc<tokio::sync::Mutex<CertClient>>,\n\n\n\n edge_ca_cert: String,\n\n edge_ca_key: String,\n\n}\n\n\n\nimpl CertApi {\n\n pub fn new(\n\n key_client: std::sync::Arc<tokio::sync::Mutex<KeyClient>>,\n\n cert_client: std::sync::Arc<tokio::sync::Mutex<CertClient>>,\n\n config: &crate::WorkloadConfig,\n\n ) -> Self {\n\n CertApi {\n\n key_client,\n\n cert_client,\n\n edge_ca_cert: config.edge_ca_cert.clone(),\n\n edge_ca_key: config.edge_ca_key.clone(),\n\n }\n", "file_path": "edgelet/edgelet-http-workload/src/module/cert/mod.rs", "rank": 48, "score": 158864.61343324598 }, { "content": "pub fn write_binary_to_file<P: AsRef<Path>>(content: &[u8], path: P) -> Result<()> {\n\n let mut f = File::create(path).context(\"Cannot create file\")?;\n\n f.write_all(content)\n\n .context(\"File: Cannot write to file \")?;\n\n f.sync_data().context(\"File: cannot sync data\")?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "edge-modules/api-proxy-module/src/utils/file.rs", "rank": 49, "score": 157170.42426605744 }, { "content": "// Copyright (c) Microsoft. All rights reserved.\n\nnamespace Microsoft.Azure.Devices.Edge.Test.Common.Certs\n\n{\n\n using System.Collections.Generic;\n\n using System.IO;\n\n using System.Security.Cryptography.X509Certificates;\n\n using Microsoft.Azure.Devices.Edge.Util;\n\n\n\n public class CaCertificates : IdCertificates\n\n {\n\n public string TrustedCertificatesPath { get; }\n\n public Option<Dictionary<string, string>> ContentTrustInputs { get; }\n\n\n\n public IEnumerable<X509Certificate2> TrustedCertificates =>\n\n new[]\n\n {\n\n new X509Certificate2(X509Certificate.CreateFromCertFile(this.TrustedCertificatesPath))\n\n };\n\n\n\n string[] GetEdgeCertFileLocation(string deviceId)\n", "file_path": "test/Microsoft.Azure.Devices.Edge.Test.Common/certs/CaCertificates.cs", "rank": 50, "score": 156896.2311377535 }, { "content": " {\n\n return new[]\n\n {\n\n FixedPaths.DeviceCaCert.Cert(deviceId),\n\n FixedPaths.DeviceCaCert.Key(deviceId),\n\n FixedPaths.DeviceCaCert.TrustCert\n\n };\n\n }\n\n\n\n public CaCertificates(string deviceId, string scriptPath)\n\n {\n\n var location = this.GetEdgeCertFileLocation(deviceId);\n\n var files = OsPlatform.NormalizeFiles(location, scriptPath);\n\n this.CertificatePath = files[0];\n\n this.KeyPath = files[1];\n\n this.TrustedCertificatesPath = files[2];\n\n }\n\n\n\n public CaCertificates(string certificatePath, string keyPath, string trustedCertsPath)\n\n {\n", "file_path": "test/Microsoft.Azure.Devices.Edge.Test.Common/certs/CaCertificates.cs", "rank": 51, "score": 156890.36111396426 }, { "content": " Preconditions.CheckArgument(File.Exists(certificatePath));\n\n Preconditions.CheckArgument(File.Exists(keyPath));\n\n Preconditions.CheckArgument(File.Exists(trustedCertsPath));\n\n this.CertificatePath = certificatePath;\n\n this.KeyPath = keyPath;\n\n this.TrustedCertificatesPath = trustedCertsPath;\n\n }\n\n }\n\n}\n", "file_path": "test/Microsoft.Azure.Devices.Edge.Test.Common/certs/CaCertificates.cs", "rank": 52, "score": 156866.22819480073 }, { "content": "fn identity_cert_extensions(\n\n) -> Result<openssl::stack::Stack<openssl::x509::X509Extension>, openssl::error::ErrorStack> {\n\n let mut csr_extensions = openssl::stack::Stack::new()?;\n\n\n\n let mut ext_key_usage = openssl::x509::extension::ExtendedKeyUsage::new();\n\n ext_key_usage.client_auth();\n\n\n\n let ext_key_usage = ext_key_usage.build()?;\n\n csr_extensions.push(ext_key_usage)?;\n\n\n\n Ok(csr_extensions)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use http_common::server::Route;\n\n\n\n use edgelet_test_utils::{test_route_err, test_route_ok};\n\n\n\n const TEST_PATH: &str = \"/modules/testModule/certificate/identity\";\n", "file_path": "edgelet/edgelet-http-workload/src/module/cert/identity.rs", "rank": 53, "score": 156133.24937554315 }, { "content": "fn server_cert_extensions(\n\n) -> Result<openssl::stack::Stack<openssl::x509::X509Extension>, openssl::error::ErrorStack> {\n\n let mut csr_extensions = openssl::stack::Stack::new()?;\n\n\n\n let mut ext_key_usage = openssl::x509::extension::ExtendedKeyUsage::new();\n\n ext_key_usage.server_auth();\n\n\n\n let ext_key_usage = ext_key_usage.build()?;\n\n csr_extensions.push(ext_key_usage)?;\n\n\n\n Ok(csr_extensions)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::module::cert::CertificateResponse;\n\n use http_common::server::Route;\n\n\n\n use edgelet_test_utils::{test_route_err, test_route_ok};\n\n\n", "file_path": "edgelet/edgelet-http-workload/src/module/cert/server.rs", "rank": 54, "score": 156133.24937554315 }, { "content": "fn humanize_status(status: &ModuleStatus) -> String {\n\n let status_enum = status.runtime_status.status.parse().unwrap_or_default();\n\n match status_enum {\n\n ModuleStatusEnum::Unknown => \"Unknown\".to_string(),\n\n ModuleStatusEnum::Stopped | ModuleStatusEnum::Dead => {\n\n if let Some(exit_status) = &status.exit_status {\n\n if let Ok(time) = DateTime::parse_from_rfc3339(&exit_status.exit_time) {\n\n return format!(\"Stopped {}\", format_time(time, Tense::Past));\n\n }\n\n }\n\n\n\n \"Stopped\".to_string()\n\n }\n\n ModuleStatusEnum::Failed => {\n\n if let Some(exit_status) = &status.exit_status {\n\n if let Ok(time) = DateTime::parse_from_rfc3339(&exit_status.exit_time) {\n\n return format!(\n\n \"Failed ({}) {}\",\n\n exit_status.status_code,\n\n format_time(time, Tense::Past)\n", "file_path": "edgelet/iotedge/src/list.rs", "rank": 55, "score": 155479.76293525426 }, { "content": "// Constructs the http::Extensions containing this process ID, similar to what\n\n// http-common does.\n\npub fn extensions() -> http::Extensions {\n\n let pid = nix::unistd::getpid();\n\n\n\n let mut extensions = http::Extensions::new();\n\n assert!(extensions.insert(Some(pid.as_raw())).is_none());\n\n\n\n extensions\n\n}\n", "file_path": "edgelet/edgelet-test-utils/src/route.rs", "rank": 56, "score": 155417.59522101464 }, { "content": "#[cfg(unix)]\n\nfn pretty_kbyte(bytes: u64) -> String {\n\n #[allow(clippy::cast_precision_loss)]\n\n match Byte::from_unit(bytes as f64, ByteUnit::B) {\n\n Ok(b) => b.get_appropriate_unit(true).format(2),\n\n Err(err) => format!(\"could not parse bytes value: {err:?}\"),\n\n }\n\n}\n", "file_path": "edgelet/iotedge/src/check/additional_info.rs", "rank": 57, "score": 153209.42848985211 }, { "content": "#[derive(Debug)]\n\nstruct RunOutput {\n\n certd_config: String,\n\n identityd_config: String,\n\n keyd_config: String,\n\n tpmd_config: String,\n\n edged_config: String,\n\n preloaded_device_id_pk_bytes: Option<Vec<u8>>,\n\n preloaded_master_encryption_key_bytes: Option<Vec<u8>>,\n\n}\n\n\n\nasync fn execute_inner(\n\n config: &std::path::Path,\n\n aziotcs_uid: nix::unistd::Uid,\n\n aziotid_uid: nix::unistd::Uid,\n\n iotedge_uid: nix::unistd::Uid,\n\n) -> Result<RunOutput, std::borrow::Cow<'static, str>> {\n\n let config = std::fs::read(config)\n\n .map_err(|err| format!(\"could not read config file {}: {err}\", config.display()))?;\n\n let config =\n\n std::str::from_utf8(&config).map_err(|err| format!(\"error parsing config: {err}\"))?;\n", "file_path": "edgelet/iotedge/src/config/apply.rs", "rank": 58, "score": 151088.79643475427 }, { "content": "fn duration_from_millis_str(\n\n s: &str,\n\n) -> Result<std::time::Duration, <u64 as std::str::FromStr>::Err> {\n\n Ok(std::time::Duration::from_millis(s.parse()?))\n\n}\n", "file_path": "mqtt/mqtt3/examples/publisher.rs", "rank": 59, "score": 150821.22906381945 }, { "content": "#[derive(Clone, Debug)]\n\n#[cfg_attr(test, derive(PartialEq))]\n\nstruct WorkloadConfig {\n\n hub_name: String,\n\n device_id: String,\n\n\n\n trust_bundle: String,\n\n manifest_trust_bundle: String,\n\n\n\n edge_ca_cert: String,\n\n edge_ca_key: String,\n\n edge_ca_auto_renew: Option<cert_renewal::AutoRenewConfig>,\n\n edge_ca_subject: aziot_certd_config::CertSubject,\n\n}\n\n\n\nimpl WorkloadConfig {\n\n pub fn new(\n\n settings: &impl edgelet_settings::RuntimeSettings,\n\n device_info: &aziot_identity_common::AzureIoTSpec,\n\n ) -> Self {\n\n let trust_bundle = settings\n\n .trust_bundle_cert()\n", "file_path": "edgelet/edgelet-http-workload/src/lib.rs", "rank": 60, "score": 148511.80741927208 }, { "content": "fn execute_inner(\n\n old_config_file: &Path,\n\n old_master_encryption_key_path: Option<PathBuf>,\n\n) -> Result<String, std::borrow::Cow<'static, str>> {\n\n let old_config_file_display = old_config_file.display();\n\n\n\n let old_config_contents = match std::fs::read_to_string(old_config_file) {\n\n Ok(old_config) => old_config,\n\n Err(err) => match err.kind() {\n\n std::io::ErrorKind::NotFound => {\n\n return Err(format!(\n\n \"there is no old config at {old_config_file_display} available to migrate\"\n\n )\n\n .into())\n\n }\n\n _ => return Err(format!(\"could not open {old_config_file_display}: {err}\").into()),\n\n },\n\n };\n\n\n\n let old_config: old_config::Config = {\n", "file_path": "edgelet/iotedge/src/config/import/mod.rs", "rank": 61, "score": 147837.10062543637 }, { "content": " class StringKeyComparer : IEqualityComparer<string>\n\n {\n\n internal static readonly StringKeyComparer DefaultStringKeyComparer = new StringKeyComparer(s => s.Split(new[] { '=' }, 2)[0]);\n\n readonly Func<string, string> keySelector;\n\n\n\n internal StringKeyComparer(Func<string, string> keySelector)\n\n {\n\n Preconditions.CheckNotNull(keySelector, nameof(keySelector));\n\n this.keySelector = s => s == null ? null : keySelector(s);\n\n }\n\n\n\n public bool Equals(string x, string y) => this.keySelector(x) == this.keySelector(y);\n\n\n\n public int GetHashCode(string obj) => this.keySelector(obj)?.GetHashCode() ?? 0;\n\n }\n\n}\n", "file_path": "edge-util/src/Microsoft.Azure.Devices.Edge.Util/LinqEx.cs", "rank": 62, "score": 146020.84248914965 }, { "content": "#[derive(serde::Deserialize)]\n\nstruct DaemonConfig {\n\n ipv6: Option<bool>,\n\n}\n", "file_path": "edgelet/iotedge/src/check/checks/container_engine_ipv6.rs", "rank": 63, "score": 143684.96293647873 }, { "content": "#[derive(serde::Deserialize)]\n\nstruct DaemonConfig {\n\n dns: Option<Vec<String>>,\n\n}\n", "file_path": "edgelet/iotedge/src/check/checks/container_engine_dns.rs", "rank": 64, "score": 143684.96293647873 }, { "content": "#[derive(serde::Deserialize, serde::Serialize, Clone)]\n\nstruct DaemonConfig {\n\n #[serde(rename = \"log-driver\")]\n\n log_driver: Option<String>,\n\n\n\n #[serde(rename = \"log-opts\")]\n\n log_opts: Option<DaemonConfigLogOpts>,\n\n}\n\n\n", "file_path": "edgelet/iotedge/src/check/checks/container_engine_logrotate.rs", "rank": 65, "score": 143684.96293647873 }, { "content": "fn agent_env(\n\n gen_id: String,\n\n settings: &edgelet_settings::docker::Settings,\n\n device_info: &aziot_identity_common::AzureIoTSpec,\n\n) -> std::collections::BTreeMap<String, String> {\n\n let mut env = std::collections::BTreeMap::new();\n\n\n\n env.insert(\n\n \"EdgeDeviceHostName\".to_string(),\n\n settings.hostname().to_string(),\n\n );\n\n\n\n env.insert(\n\n \"IOTEDGE_APIVERSION\".to_string(),\n\n edgelet_http::ApiVersion::V2022_08_03.to_string(),\n\n );\n\n\n\n env.insert(\"IOTEDGE_AUTHSCHEME\".to_string(), \"sasToken\".to_string());\n\n\n\n env.insert(\n", "file_path": "edgelet/aziot-edged/src/watchdog.rs", "rank": 66, "score": 142771.26887121136 }, { "content": "fn agent_labels(settings: &mut crate::docker::Settings) {\n\n let create_options = settings.agent().config().create_options().clone();\n\n\n\n let mut labels = create_options.labels().cloned().unwrap_or_default();\n\n\n\n // IoT Edge reserves the label prefix \"net.azure-devices.edge\" for its own purposes\n\n // so we'll simply overwrite any matching labels created by the user.\n\n labels.insert(\n\n \"net.azure-devices.edge.create-options\".to_string(),\n\n \"{}\".to_string(),\n\n );\n\n labels.insert(\"net.azure-devices.edge.env\".to_string(), \"{}\".to_string());\n\n labels.insert(\n\n \"net.azure-devices.edge.original-image\".to_string(),\n\n settings.agent().config().image().to_string(),\n\n );\n\n labels.insert(\n\n \"net.azure-devices.edge.owner\".to_string(),\n\n \"Microsoft.Azure.Devices.Edge.Agent\".to_string(),\n\n );\n\n\n\n let create_options = create_options.with_labels(labels);\n\n\n\n settings\n\n .agent_mut()\n\n .config_mut()\n\n .set_create_options(create_options);\n\n}\n", "file_path": "edgelet/edgelet-settings/src/docker/init.rs", "rank": 67, "score": 140799.58329883812 }, { "content": "fn agent_env(settings: &mut crate::docker::Settings) {\n\n let network_id = settings.moby_runtime().network().name().to_string();\n\n settings\n\n .agent_mut()\n\n .env_mut()\n\n .insert(\"NetworkId\".to_string(), network_id);\n\n}\n\n\n", "file_path": "edgelet/edgelet-settings/src/docker/init.rs", "rank": 68, "score": 140799.58329883812 }, { "content": "fn agent_networking(settings: &mut crate::docker::Settings) {\n\n let network_id = settings.moby_runtime().network().name().to_string();\n\n\n\n let create_options = settings.agent().config().create_options().clone();\n\n\n\n let mut network_config = create_options\n\n .networking_config()\n\n .cloned()\n\n .unwrap_or_else(docker::models::ContainerCreateBodyNetworkingConfig::new);\n\n\n\n let mut endpoints_config = network_config\n\n .endpoints_config()\n\n .cloned()\n\n .unwrap_or_default();\n\n\n\n if !endpoints_config.contains_key(network_id.as_str()) {\n\n endpoints_config.insert(network_id, docker::models::EndpointSettings::new());\n\n network_config = network_config.with_endpoints_config(endpoints_config);\n\n let create_options = create_options.with_networking_config(network_config);\n\n\n\n settings\n\n .agent_mut()\n\n .config_mut()\n\n .set_create_options(create_options);\n\n }\n\n}\n\n\n", "file_path": "edgelet/edgelet-settings/src/docker/init.rs", "rank": 69, "score": 140799.58329883812 }, { "content": "fn print_verbose<S>(message: S, verbose: bool)\n\nwhere\n\n S: std::fmt::Display,\n\n{\n\n if verbose {\n\n println!(\"{message}\");\n\n }\n\n}\n", "file_path": "edgelet/support-bundle/src/shell_util.rs", "rank": 70, "score": 140792.71970425895 }, { "content": "fn set_signal_handlers(\n\n shutdown_tx: tokio::sync::mpsc::UnboundedSender<edgelet_core::WatchdogAction>,\n\n) {\n\n // Set the signal handler to listen for CTRL+C (SIGINT).\n\n let sigint_sender = shutdown_tx.clone();\n\n\n\n tokio::spawn(async move {\n\n tokio::signal::ctrl_c()\n\n .await\n\n .expect(\"cannot fail to set signal handler\");\n\n\n\n // Failure to send the shutdown signal means that the mpsc queue is closed.\n\n // Ignore this Result, as the process will be shutting down anyways.\n\n let _ = sigint_sender.send(edgelet_core::WatchdogAction::Signal);\n\n });\n\n\n\n // Set the signal handler to listen for systemctl stop (SIGTERM).\n\n let mut sigterm_stream =\n\n tokio::signal::unix::signal(tokio::signal::unix::SignalKind::terminate())\n\n .expect(\"cannot fail to set signal handler\");\n", "file_path": "edgelet/aziot-edged/src/main.rs", "rank": 71, "score": 140363.94905117588 }, { "content": "fn check_agent_image_version_nested(agent_image: &str) -> CheckResult {\n\n // We don't match the repo mcr.microsoft.com because in nested edge we expect the repo to be $upstream:443\n\n //\n\n // If the image spec doesn't match what we expected, it's a custom image, and we can't make\n\n // any determination of whether it's the right version or not. In that case we assume it is right.\n\n\n\n let re = Regex::new(r\".*?/azureiotedge-agent:(?P<Major>\\d+)\\.(?P<Minor>\\d+).*\")\n\n .expect(\"hard-coded regex cannot fail to parse\");\n\n\n\n if let Some(caps) = re.captures(agent_image) {\n\n let major = caps\n\n .name(\"Major\")\n\n .and_then(|version| version.as_str().parse::<u32>().ok());\n\n let minor = caps\n\n .name(\"Minor\")\n\n .and_then(|version| version.as_str().parse::<u32>().ok());\n\n\n\n if let (Some(major), Some(minor)) = (major, minor) {\n\n if major < 1 || (major == 1) && (minor < 2) {\n\n return CheckResult::Failed(anyhow::anyhow!(\n", "file_path": "edgelet/iotedge/src/check/checks/check_agent_image.rs", "rank": 72, "score": 139665.22755832959 }, { "content": "#[derive(serde::Deserialize, serde::Serialize, Clone)]\n\nstruct DaemonConfigLogOpts {\n\n #[serde(rename = \"max-file\")]\n\n max_file: Option<String>,\n\n\n\n #[serde(rename = \"max-size\")]\n\n max_size: Option<String>,\n\n}\n", "file_path": "edgelet/iotedge/src/check/checks/container_engine_logrotate.rs", "rank": 73, "score": 139250.0278729301 }, { "content": "fn nginx_command(\n\n proxy_name: &str,\n\n program_path: &str,\n\n proxy_command: &[String],\n\n command_name: &str,\n\n) -> Result<Child, Error> {\n\n Command::new(program_path)\n\n .args(proxy_command)\n\n .stdout(Stdio::inherit())\n\n .spawn()\n\n .with_context(|| format!(\"Failed to {} {}\", command_name, proxy_name))\n\n .context(\"Cannot execute command\")\n\n}\n", "file_path": "edge-modules/api-proxy-module/src/main.rs", "rank": 74, "score": 138060.69602369357 }, { "content": "fn get_ipv6_settings(network_configuration: &MobyNetwork) -> (bool, Option<Ipam>) {\n\n if let MobyNetwork::Network(network) = network_configuration {\n\n let ipv6 = network.ipv6().unwrap_or_default();\n\n network.ipam().and_then(CoreIpam::config).map_or_else(\n\n || (ipv6, None),\n\n |ipam_config| {\n\n let config = ipam_config\n\n .iter()\n\n .map(|ipam_config| {\n\n let mut config_map = HashMap::new();\n\n if let Some(gateway_config) = ipam_config.gateway() {\n\n config_map.insert(\"Gateway\".to_string(), gateway_config.to_string());\n\n };\n\n\n\n if let Some(subnet_config) = ipam_config.subnet() {\n\n config_map.insert(\"Subnet\".to_string(), subnet_config.to_string());\n\n };\n\n\n\n if let Some(ip_range_config) = ipam_config.ip_range() {\n\n config_map.insert(\"IPRange\".to_string(), ip_range_config.to_string());\n", "file_path": "edgelet/edgelet-docker/src/runtime.rs", "rank": 75, "score": 136849.3698838108 }, { "content": "fn should_skip_instead(upp: UpstreamProtocolPort, up: UpstreamProtocol) -> bool {\n\n match upp {\n\n UpstreamProtocolPort::Amqp => matches!(up, UpstreamProtocol::Mqtt),\n\n UpstreamProtocolPort::Https => {\n\n matches!(up, UpstreamProtocol::Amqp | UpstreamProtocol::Mqtt)\n\n }\n\n UpstreamProtocolPort::Mqtt => matches!(up, UpstreamProtocol::Amqp),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[allow(clippy::bool_assert_comparison)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn should_skip_instead_is_true_if_testing_amqp_and_protocol_is_mqtt() {\n\n assert_eq!(\n\n should_skip_instead(UpstreamProtocolPort::Amqp, UpstreamProtocol::Mqtt),\n\n true\n", "file_path": "edgelet/iotedge/src/check/checks/container_connect_upstream.rs", "rank": 76, "score": 133196.6941379681 }, { "content": "fn format_time<Tz>(time: DateTime<Tz>, tense: Tense) -> String\n\nwhere\n\n Tz: TimeZone,\n\n{\n\n let ht = HumanTime::from(Utc::now().signed_duration_since(time));\n\n if ht <= HumanTime::from(Duration::seconds(20)) {\n\n ht.to_text_en(Accuracy::Precise, tense)\n\n } else {\n\n ht.to_text_en(Accuracy::Rough, tense)\n\n }\n\n}\n", "file_path": "edgelet/iotedge/src/list.rs", "rank": 77, "score": 132987.21779473405 }, { "content": " class EdgeHubExtensionConfigProvider : IExtensionConfigProvider\n\n {\n\n public void Initialize(ExtensionConfigContext context)\n\n {\n\n if (context == null)\n\n {\n\n throw new ArgumentNullException(nameof(context));\n\n }\n\n\n\n var bindingProvider = new EdgeHubTriggerBindingProvider();\n\n var rule = context.AddBindingRule<EdgeHubTriggerAttribute>();\n\n rule.AddConverter<Message, string>(ConvertMessageToString);\n\n rule.AddConverter<Message, byte[]>(ConvertMessageToBytes);\n\n rule.BindToTrigger<Message>(bindingProvider);\n\n\n\n var rule2 = context.AddBindingRule<EdgeHubAttribute>();\n\n rule2.BindToCollector<Message>(typeof(EdgeHubCollectorBuilder));\n\n rule2.AddConverter<string, Message>(ConvertStringToMessage);\n\n rule2.AddConverter<byte[], Message>(ConvertBytesToMessage);\n\n rule2.AddOpenConverter<OpenType.Poco, Message>(this.ConvertPocoToMessage);\n", "file_path": "edge-modules/functions/binding/src/Microsoft.Azure.WebJobs.Extensions.EdgeHub/config/EdgeHubExtensionConfigProvider.cs", "rank": 78, "score": 132935.2940706186 }, { "content": "// Copyright (c) Microsoft. All rights reserved.\n\nnamespace Microsoft.Azure.Devices.Edge.Hub.Core.Config\n\n{\n\n using System;\n\n using System.Collections.Generic;\n\n using Microsoft.Azure.Devices.Edge.Util;\n\n using Microsoft.Azure.Devices.Edge.Util.Json;\n\n using Newtonsoft.Json;\n\n\n\n /// <summary>\n\n /// Domain object that represents EdgeHub configuration.\n\n ///\n\n /// This object is being constructed from the EdgeHub twin's desired properties.\n\n /// See <see cref=\"EdgeHubDesiredProperties\"/> for DTO.\n\n /// </summary>\n\n public class EdgeHubConfig : IEquatable<EdgeHubConfig>\n\n {\n\n public EdgeHubConfig(\n\n string schemaVersion,\n\n IReadOnlyDictionary<string, RouteConfig> routes,\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfig.cs", "rank": 79, "score": 129192.19744965112 }, { "content": " [JsonConverter(typeof(OptionConverter<ManifestIntegrity>))]\n\n public Option<ManifestIntegrity> Integrity { get; }\n\n\n\n public static bool operator ==(EdgeHubConfig left, EdgeHubConfig right) => Equals(left, right);\n\n\n\n public static bool operator !=(EdgeHubConfig left, EdgeHubConfig right) => !Equals(left, right);\n\n\n\n public bool Equals(EdgeHubConfig other)\n\n {\n\n if (ReferenceEquals(null, other))\n\n {\n\n return false;\n\n }\n\n\n\n if (ReferenceEquals(this, other))\n\n {\n\n return true;\n\n }\n\n\n\n return string.Equals(this.SchemaVersion, other.SchemaVersion, StringComparison.OrdinalIgnoreCase)\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfig.cs", "rank": 80, "score": 129191.60505231544 }, { "content": " && new ReadOnlyDictionaryComparer<string, RouteConfig>().Equals(this.Routes, other.Routes)\n\n && Equals(this.StoreAndForwardConfiguration, other.StoreAndForwardConfiguration)\n\n && Equals(this.BrokerConfiguration, other.BrokerConfiguration)\n\n && Equals(this.Integrity, other.Integrity);\n\n }\n\n\n\n public override bool Equals(object obj)\n\n => this.Equals(obj as EdgeHubConfig);\n\n\n\n public override int GetHashCode()\n\n {\n\n unchecked\n\n {\n\n int hashCode = this.SchemaVersion != null ? StringComparer.OrdinalIgnoreCase.GetHashCode(this.SchemaVersion) : 0;\n\n hashCode = (hashCode * 397) ^ (this.Routes?.GetHashCode() ?? 0);\n\n hashCode = (hashCode * 397) ^ (this.StoreAndForwardConfiguration?.GetHashCode() ?? 0);\n\n hashCode = (hashCode * 397) ^ this.BrokerConfiguration.GetHashCode();\n\n hashCode = (hashCode * 397) ^ this.Integrity.GetHashCode();\n\n return hashCode;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfig.cs", "rank": 81, "score": 129185.23272766538 }, { "content": " StoreAndForwardConfiguration storeAndForwardConfiguration,\n\n Option<BrokerConfig> brokerConfiguration,\n\n Option<ManifestIntegrity> integrity)\n\n {\n\n this.SchemaVersion = Preconditions.CheckNonWhiteSpace(schemaVersion, nameof(schemaVersion));\n\n this.Routes = Preconditions.CheckNotNull(routes, nameof(routes));\n\n this.StoreAndForwardConfiguration = Preconditions.CheckNotNull(storeAndForwardConfiguration, nameof(storeAndForwardConfiguration));\n\n this.BrokerConfiguration = brokerConfiguration;\n\n this.Integrity = integrity;\n\n }\n\n\n\n public string SchemaVersion { get; }\n\n\n\n public IReadOnlyDictionary<string, RouteConfig> Routes { get; }\n\n\n\n public StoreAndForwardConfiguration StoreAndForwardConfiguration { get; }\n\n\n\n public Option<BrokerConfig> BrokerConfiguration { get; }\n\n\n\n [JsonProperty(\"integrity\", DefaultValueHandling = DefaultValueHandling.IgnoreAndPopulate)]\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfig.cs", "rank": 82, "score": 129175.29856120021 }, { "content": "fn used_memory_bytes(system_resources: &System) -> u64 {\n\n system_resources.used_memory()\n\n}\n\n\n", "file_path": "edgelet/edgelet-docker/src/runtime.rs", "rank": 83, "score": 127890.41362616491 }, { "content": " }\n\n }\n\n else\n\n {\n\n throw new InvalidSchemaVersionException(\"EdgeHub config missing SchemaVersion\");\n\n }\n\n\n\n return edgeHubConfig;\n\n }\n\n\n\n public EdgeHubConfig GetEdgeHubConfig(EdgeHubDesiredProperties_1_0 desiredProperties)\n\n {\n\n Preconditions.CheckNotNull(desiredProperties, nameof(desiredProperties));\n\n\n\n var routes = new Dictionary<string, RouteConfig>();\n\n if (desiredProperties.Routes != null)\n\n {\n\n foreach (KeyValuePair<string, string> inputRoute in desiredProperties.Routes)\n\n {\n\n try\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 84, "score": 127768.59724894041 }, { "content": "\n\n return new EdgeHubConfig(\n\n desiredProperties.SchemaVersion,\n\n routes,\n\n desiredProperties.StoreAndForwardConfiguration,\n\n brokerConfig,\n\n Option.None<ManifestIntegrity>());\n\n }\n\n\n\n public EdgeHubConfig GetEdgeHubConfig(EdgeHubDesiredProperties_1_3 desiredProperties)\n\n {\n\n Preconditions.CheckNotNull(desiredProperties, nameof(desiredProperties));\n\n ReadOnlyDictionary<string, RouteConfig> routes = ParseRoutesWithPriority(desiredProperties.Routes, this.routeFactory);\n\n Option<BrokerConfig> brokerConfig = this.ParseBrokerConfig(desiredProperties.BrokerConfiguration);\n\n\n\n return new EdgeHubConfig(\n\n desiredProperties.SchemaVersion,\n\n routes,\n\n desiredProperties.StoreAndForwardConfiguration,\n\n brokerConfig,\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 85, "score": 127765.14574081766 }, { "content": " }\n\n\n\n public EdgeHubConfig GetEdgeHubConfig(EdgeHubDesiredProperties_1_1 desiredProperties)\n\n {\n\n Preconditions.CheckNotNull(desiredProperties, nameof(desiredProperties));\n\n ReadOnlyDictionary<string, RouteConfig> routes = ParseRoutesWithPriority(desiredProperties.Routes, this.routeFactory);\n\n\n\n return new EdgeHubConfig(\n\n desiredProperties.SchemaVersion,\n\n routes,\n\n desiredProperties.StoreAndForwardConfiguration,\n\n Option.None<BrokerConfig>(),\n\n Option.None<ManifestIntegrity>());\n\n }\n\n\n\n public EdgeHubConfig GetEdgeHubConfig(EdgeHubDesiredProperties_1_2 desiredProperties)\n\n {\n\n Preconditions.CheckNotNull(desiredProperties, nameof(desiredProperties));\n\n ReadOnlyDictionary<string, RouteConfig> routes = ParseRoutesWithPriority(desiredProperties.Routes, this.routeFactory);\n\n Option<BrokerConfig> brokerConfig = this.ParseBrokerConfig(desiredProperties.BrokerConfiguration);\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 86, "score": 127764.49999265422 }, { "content": " try\n\n {\n\n version = new Version(token.ToString());\n\n }\n\n catch (Exception e)\n\n {\n\n throw new InvalidSchemaVersionException($\"Error parsing schema version string: {token}, Exception: {e.Message}\");\n\n }\n\n\n\n // Parse the JSON for 1.x\n\n if (version.Major == Core.Constants.SchemaVersion_1_0.Major)\n\n {\n\n if (version.Minor == Core.Constants.SchemaVersion_1_0.Minor)\n\n {\n\n var desiredProperties = JsonConvert.DeserializeObject<EdgeHubDesiredProperties_1_0>(twinJson);\n\n edgeHubConfig = this.GetEdgeHubConfig(desiredProperties);\n\n }\n\n else if (version.Minor == Core.Constants.SchemaVersion_1_1.Minor)\n\n {\n\n var desiredProperties = JsonConvert.DeserializeObject<EdgeHubDesiredProperties_1_1>(twinJson);\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 87, "score": 127764.23168406199 }, { "content": " {\n\n if (!string.IsNullOrWhiteSpace(inputRoute.Value))\n\n {\n\n Route route = this.routeFactory.Create(inputRoute.Value);\n\n routes.Add(inputRoute.Key, new RouteConfig(inputRoute.Key, inputRoute.Value, route));\n\n }\n\n }\n\n catch (Exception ex)\n\n {\n\n throw new InvalidOperationException($\"Error parsing route {inputRoute.Key} - {ex.Message}\", ex);\n\n }\n\n }\n\n }\n\n\n\n return new EdgeHubConfig(\n\n desiredProperties.SchemaVersion,\n\n new ReadOnlyDictionary<string, RouteConfig>(routes),\n\n desiredProperties.StoreAndForwardConfiguration,\n\n Option.None<BrokerConfig>(),\n\n Option.None<ManifestIntegrity>());\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 88, "score": 127764.14284036629 }, { "content": "// Copyright (c) Microsoft. All rights reserved.\n\nnamespace Microsoft.Azure.Devices.Edge.Hub.Core.Config\n\n{\n\n using System;\n\n using System.Collections.Generic;\n\n using System.Collections.ObjectModel;\n\n using Microsoft.Azure.Devices.Edge.Hub.Core;\n\n using Microsoft.Azure.Devices.Edge.Util;\n\n using Microsoft.Azure.Devices.Routing.Core;\n\n using Microsoft.Extensions.Logging;\n\n using Newtonsoft.Json;\n\n using Newtonsoft.Json.Linq;\n\n\n\n /// <summary>\n\n /// Creates EdgeHubConfig out of EdgeHubDesiredProperties. Also validates the\n\n /// desired properties. Throws an exception if validation failed.\n\n /// </summary>\n\n public class EdgeHubConfigParser\n\n {\n\n static readonly ILogger Log = Logger.Factory.CreateLogger<EdgeHubConfigParser>();\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 89, "score": 127763.28668809214 }, { "content": " edgeHubConfig = this.GetEdgeHubConfig(desiredProperties);\n\n }\n\n else if (version.Minor == Core.Constants.SchemaVersion_1_2.Minor)\n\n {\n\n var desiredProperties = JsonConvert.DeserializeObject<EdgeHubDesiredProperties_1_2>(twinJson);\n\n edgeHubConfig = this.GetEdgeHubConfig(desiredProperties);\n\n }\n\n else if (version.Minor == Core.Constants.SchemaVersion_1_3.Minor)\n\n {\n\n var desiredProperties = JsonConvert.DeserializeObject<EdgeHubDesiredProperties_1_3>(twinJson);\n\n edgeHubConfig = this.GetEdgeHubConfig(desiredProperties);\n\n }\n\n else\n\n {\n\n throw new InvalidSchemaVersionException($\"EdgeHub config contains unsupported SchemaVersion: {version}\");\n\n }\n\n }\n\n else\n\n {\n\n throw new InvalidSchemaVersionException($\"EdgeHub config contains unsupported SchemaVersion: {version}\");\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 90, "score": 127761.77515884372 }, { "content": " desiredProperties.Integrity);\n\n }\n\n\n\n static ReadOnlyDictionary<string, RouteConfig> ParseRoutesWithPriority(IDictionary<string, RouteSpec> routeSpecs, RouteFactory routeFactory)\n\n {\n\n var routes = new Dictionary<string, RouteConfig>();\n\n foreach (KeyValuePair<string, RouteSpec> inputRoute in routeSpecs)\n\n {\n\n try\n\n {\n\n Route route = routeFactory.Create(inputRoute.Value.Route, inputRoute.Value.Priority, inputRoute.Value.TimeToLiveSecs);\n\n routes.Add(inputRoute.Key, new RouteConfig(inputRoute.Key, inputRoute.Value.Route, route));\n\n }\n\n catch (Exception ex)\n\n {\n\n throw new InvalidOperationException($\"Error parsing route {inputRoute.Key} - {ex.Message}\", ex);\n\n }\n\n }\n\n\n\n return new ReadOnlyDictionary<string, RouteConfig>(routes);\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 91, "score": 127760.61388603163 }, { "content": " Option<AuthorizationConfig> ParseAuthorizationConfig(BrokerProperties properties)\n\n {\n\n if (properties.Authorizations.Count == 0)\n\n {\n\n return Option.None<AuthorizationConfig>();\n\n }\n\n\n\n IList<string> errors = this.validator.ValidateAuthorizationConfig(properties.Authorizations);\n\n if (errors.Count > 0)\n\n {\n\n string message = string.Join(\"; \", errors);\n\n throw new InvalidOperationException($\"Error validating authorization policy: {message}\");\n\n }\n\n\n\n var result = new List<Statement>(properties.Authorizations?.Count ?? 0);\n\n foreach (var statement in properties.Authorizations)\n\n {\n\n // parse deny rules first, since we agreed that they take precedence\n\n // in case of conflicting rules.\n\n foreach (var rule in statement.Deny)\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 92, "score": 127759.99759777024 }, { "content": "\n\n Option<BridgeConfig> ParseBridgeConfig(BrokerProperties properties)\n\n {\n\n if (properties.Bridges.Count == 0)\n\n {\n\n return Option.None<BridgeConfig>();\n\n }\n\n\n\n IList<string> errors = this.validator.ValidateBridgeConfig(properties.Bridges);\n\n if (errors.Count > 0)\n\n {\n\n string message = string.Join(\"; \", errors);\n\n throw new InvalidOperationException($\"Error validating bridge configuration: {message}\");\n\n }\n\n\n\n return Option.Some(properties.Bridges);\n\n }\n\n }\n\n}\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 93, "score": 127759.94048103064 }, { "content": "\n\n readonly RouteFactory routeFactory;\n\n\n\n readonly BrokerPropertiesValidator validator;\n\n\n\n public EdgeHubConfigParser(RouteFactory routeFactory, BrokerPropertiesValidator validator)\n\n {\n\n this.routeFactory = Preconditions.CheckNotNull(routeFactory, nameof(routeFactory));\n\n this.validator = Preconditions.CheckNotNull(validator, nameof(validator));\n\n }\n\n\n\n public EdgeHubConfig GetEdgeHubConfig(string twinJson)\n\n {\n\n EdgeHubConfig edgeHubConfig;\n\n\n\n var twinJObject = JObject.Parse(twinJson);\n\n if (twinJObject.TryGetValue(Core.Constants.SchemaVersionKey, out JToken token))\n\n {\n\n Version version;\n\n\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 94, "score": 127759.20168215674 }, { "content": " }\n\n\n\n Option<BrokerConfig> ParseBrokerConfig(BrokerProperties properties)\n\n {\n\n if (properties != null)\n\n {\n\n return Option.Some(\n\n new BrokerConfig(\n\n this.ParseBridgeConfig(properties),\n\n this.ParseAuthorizationConfig(properties)));\n\n }\n\n\n\n return Option.None<BrokerConfig>();\n\n }\n\n\n\n /// <summary>\n\n /// EH Twin and policy definition in the Broker have different json schemas.\n\n /// This method converts twin schema (BrokerProperties) into broker policy schema (AuthorizationConfig),\n\n /// and validates it.\n\n /// </summary>\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 95, "score": 127753.59446874926 }, { "content": " {\n\n result.Add(new Statement(\n\n Effect.Deny,\n\n statement.Identities,\n\n rule.Operations,\n\n rule.Resources));\n\n }\n\n\n\n foreach (var rule in statement.Allow)\n\n {\n\n result.Add(new Statement(\n\n Effect.Allow,\n\n statement.Identities,\n\n rule.Operations,\n\n rule.Resources));\n\n }\n\n }\n\n\n\n return Option.Some(new AuthorizationConfig(result));\n\n }\n", "file_path": "edge-hub/core/src/Microsoft.Azure.Devices.Edge.Hub.Core/config/EdgeHubConfigParser.cs", "rank": 96, "score": 127753.36217992156 }, { "content": " class EdgeHubConnectorsConfig\n\n {\n\n public EdgeHubConnectorsConfig(\n\n TransportType transportType,\n\n string directMethodTargetModuleId,\n\n string messageOutputEndpoint)\n\n {\n\n this.TransportType = transportType;\n\n this.DirectMethodTargetModuleId = Preconditions.CheckNonWhiteSpace(directMethodTargetModuleId, nameof(directMethodTargetModuleId));\n\n this.MessageOutputEndpoint = Preconditions.CheckNonWhiteSpace(messageOutputEndpoint, nameof(messageOutputEndpoint));\n\n }\n\n\n\n public TransportType TransportType { get; }\n\n public string DirectMethodTargetModuleId { get; }\n\n public string MessageOutputEndpoint { get; }\n\n }\n\n}", "file_path": "test/modules/EdgeHubRestartTester/src/EdgeHubConnectorConfig.cs", "rank": 97, "score": 127740.72469773729 }, { "content": "// Copyright (c) Microsoft. All rights reserved.\n\nnamespace Microsoft.Azure.Devices.Edge.Test.Common.Config\n\n{\n\n using System.Collections.Generic;\n\n using System.Linq;\n\n using Microsoft.Azure.Devices.Edge.Util;\n\n\n\n public class EdgeConfigBuilder\n\n {\n\n readonly string deviceId;\n\n readonly Dictionary<string, IModuleConfigBuilder> moduleBuilders;\n\n readonly List<Registry> registries;\n\n\n\n public EdgeConfigBuilder(string deviceId)\n\n {\n\n this.deviceId = deviceId;\n\n this.moduleBuilders = new Dictionary<string, IModuleConfigBuilder>();\n\n this.registries = new List<Registry>();\n\n }\n\n\n", "file_path": "test/Microsoft.Azure.Devices.Edge.Test.Common/config/EdgeConfigBuilder.cs", "rank": 98, "score": 127557.84247728196 }, { "content": " {\n\n Option<string> imageOption = Option.Maybe(image);\n\n var builder = new HubModuleConfigBuilder(imageOption, optimizeForPerformance);\n\n this.moduleBuilders.Add(builder.Name, builder);\n\n return builder;\n\n }\n\n\n\n public IModuleConfigBuilder AddModule(string name, string image, bool shouldRestart = true)\n\n {\n\n var builder = new ModuleConfigBuilder(name, image, shouldRestart);\n\n this.moduleBuilders.Add(builder.Name, builder);\n\n return builder;\n\n }\n\n\n\n public EdgeConfiguration Build()\n\n {\n\n // Edge agent is not optional; add if necessary\n\n if (!this.moduleBuilders.ContainsKey(ModuleName.EdgeAgent))\n\n {\n\n this.AddEdgeAgent();\n", "file_path": "test/Microsoft.Azure.Devices.Edge.Test.Common/config/EdgeConfigBuilder.cs", "rank": 99, "score": 127553.92469260146 } ]
Rust
httpfs/tests/httpfs.rs
obonobo/ecurl
715d407f287728933cadc35eaad927d02e10ec73
#![allow(clippy::type_complexity)] #[cfg(test)] pub mod test_utils; use crate::test_utils::*; use core::panic; use httpfs::bullshit_scanner::BullshitScanner; use std::{ io::Write, net::TcpStream, sync::{mpsc, Arc, Mutex}, thread, }; use test_utils::better_ureq::*; lazy_static::lazy_static! { static ref SERVERS: Mutex<AddressCountingServerFactory> = Mutex::new( AddressCountingServerFactory::default(), ); } fn server() -> ServerDropper { SERVERS.lock().unwrap().next_server() } #[test] fn test_simple_get() { let handle = server(); let contents = "Hello world!\n"; let file = TempFile::new_or_panic("hello!.txt", contents); let got = ureq::get(&handle.file_addr(&file.name)) .call() .unwrap() .into_string() .unwrap(); assert_eq!(contents, &got); } #[test] fn test_simple_post() { let handle = server(); let contents = "Hello world!\n"; let file = TempFile::new_or_panic("hello.txt", ""); let posted = ureq::post(&handle.file_addr(&file.name)) .send_string(contents) .unwrap(); assert_eq!(posted.status(), 201); let got = ureq::get(&handle.file_addr(&file.name)) .call() .unwrap() .into_string() .unwrap(); assert_eq!(contents, &got); } #[test] fn test_not_found() { let handle = server(); assertions::assert_request_returns_error( ureq::get(&handle.file_addr("hello.txt")), 404, Some("File '/hello.txt' could not be found on the server (directory being served is ./)\n"), ); } #[test] fn test_forbidden() { let handle = server(); let request = "GET /../../hello.txt HTTP/1.1\r\n\r\n"; let mut sock = TcpStream::connect(handle.addr().trim_start_matches("http://")).unwrap(); sock.write_all(request.as_bytes()).unwrap(); let mut scnr = BullshitScanner::new(&mut sock); let status = scnr .next_line() .unwrap() .0 .split_once(' ') .map(|pair| String::from(pair.1)) .unwrap(); assert_eq!("403 Forbidden", status); let body = scnr .lines() .map(|l| l.0) .skip_while(|line| !line.is_empty()) .collect::<Vec<_>>() .join("\n"); assert!(body.contains("hello.txt' is located outside the directory that is being served")) } #[test] fn test_multiple_clients_get_same_file() { let server = server(); let contents = "Hello world\n"; let file = TempFile::new("hello.txt", contents).unwrap(); let n = 25; let mut threads = Vec::with_capacity(n); let (taskout, taskin) = mpsc::channel::<Result<(u16, String), ureq::Error>>(); let addr = server.file_addr(&file.name); for _ in 0..n { let (out, addr) = (taskout.clone(), addr.clone()); threads.push(thread::spawn(move || { out.send(ureq_get_errors_are_ok(&addr)).unwrap() })); } threads.into_iter().for_each(|t| t.join().unwrap()); for (i, res) in taskin.iter().take(n).enumerate() { match res { Ok((code, body)) => { assert_eq!(200, code); assert_eq!(contents, body); } Err(e) => panic!("Got an error on request {}: {}", i, e), } } } #[test] fn test_multiple_clients_reading_and_writing_same_file() { let handle = server(); let contents = "Hello world\n"; let file = TempFile::new("hello.txt", contents).unwrap(); let n = 25; let mut threads = Vec::with_capacity(n); let (taskout, taskin) = mpsc::channel::<Result<(u16, String), ureq::Error>>(); let addr = handle.file_addr(&file.name); let mut read = 0; let mut task = || -> Arc<dyn Fn(&str, &str) -> Result<(u16, String), ureq::Error> + Send + Sync> { read += 1; Arc::new(if read % 2 == 0 { |path, _| ureq_get_errors_are_ok(path) } else { ureq_post_errors_are_ok }) }; for i in 0..n { let (out, path, task) = (taskout.clone(), addr.clone(), task()); let body = format!("From thread {}", i); threads.push(thread::spawn(move || out.send(task(&path, &body)).unwrap())); } threads.into_iter().for_each(|t| t.join().unwrap()); let results = taskin.iter().take(n).collect::<Vec<_>>(); for (i, res) in results.iter().enumerate() { match res { Ok((code, body)) => match code { 200 => assert!( body.contains("From thread") || body.contains(contents), "Body: {}", body ), 201 => assert_eq!("", body), code => panic!("Expected status 200 or 201 but got {}", code), }, Err(e) => panic!("Got an error on request {}: {}", i, e), } } }
#![allow(clippy::type_complexity)] #[cfg(test)] pub mod test_utils; use crate::test_utils::*; use core::panic; use httpfs::bullshit_scanner::BullshitScanner; use std::{ io::Write, net::TcpStream, sync::{mpsc, Arc, Mutex}, thread, }; use test_utils::better_ureq::*; lazy_static::lazy_static! { static ref SERVERS: Mutex<AddressCountingServerFactory> = Mutex::new( AddressCountingServerFactory::default(), ); } fn server() -> ServerDropper { SERVERS.lock().unwrap().next_server() } #[test] fn test_simple_get() { let handle = server(); let contents = "Hello world!\n"; let file = TempFile::new_or_panic("hello!.txt", contents); let got = ureq::get(&handle.file_addr(&file.name)) .call() .unwrap() .into_string() .unwrap(); assert_eq!(contents, &got); } #[test] fn test_simple_post() { let handle = server(); let contents = "Hello world!\n"; let file = TempFile::new_or_panic("hello.txt", ""); let posted = ureq::post(&handle.file_addr(&file.name)) .send_string(contents) .unwrap(); assert_eq!(posted.status(), 201); let got = ureq::get(&handle.file_addr(&file.name)) .call() .unwrap() .into_string() .unwrap(); assert_eq!(contents, &got); } #[test] fn test_not_found() { let handle = server(); assertions::assert_request_returns_error( ureq::get(&handle.file_addr("hello.txt")), 404, Some("File '/hello.txt' could not be found on the server (directory being served is ./)\n"), ); } #[test] fn test_forbidden() { let handle = server(); let request = "GET /../../hello.txt HTTP/1.1\r\n\r\n"; let mut sock = TcpStream::connect(handle.addr().trim_start_matches("http://")).unwrap(); sock.write_all(request.as_bytes()).unwrap(); let mut scnr = BullshitScanner::new(&mut sock); let status = scnr .next_line() .unwrap() .0 .split_once(' ') .map(|pair| String::from(pair.1)) .unwrap(); assert_eq!("403 Forbidden", status); let body = scnr .lines() .map(|l| l.0) .skip_while(|line| !line.is_empty()) .colle
#[test] fn test_multiple_clients_get_same_file() { let server = server(); let contents = "Hello world\n"; let file = TempFile::new("hello.txt", contents).unwrap(); let n = 25; let mut threads = Vec::with_capacity(n); let (taskout, taskin) = mpsc::channel::<Result<(u16, String), ureq::Error>>(); let addr = server.file_addr(&file.name); for _ in 0..n { let (out, addr) = (taskout.clone(), addr.clone()); threads.push(thread::spawn(move || { out.send(ureq_get_errors_are_ok(&addr)).unwrap() })); } threads.into_iter().for_each(|t| t.join().unwrap()); for (i, res) in taskin.iter().take(n).enumerate() { match res { Ok((code, body)) => { assert_eq!(200, code); assert_eq!(contents, body); } Err(e) => panic!("Got an error on request {}: {}", i, e), } } } #[test] fn test_multiple_clients_reading_and_writing_same_file() { let handle = server(); let contents = "Hello world\n"; let file = TempFile::new("hello.txt", contents).unwrap(); let n = 25; let mut threads = Vec::with_capacity(n); let (taskout, taskin) = mpsc::channel::<Result<(u16, String), ureq::Error>>(); let addr = handle.file_addr(&file.name); let mut read = 0; let mut task = || -> Arc<dyn Fn(&str, &str) -> Result<(u16, String), ureq::Error> + Send + Sync> { read += 1; Arc::new(if read % 2 == 0 { |path, _| ureq_get_errors_are_ok(path) } else { ureq_post_errors_are_ok }) }; for i in 0..n { let (out, path, task) = (taskout.clone(), addr.clone(), task()); let body = format!("From thread {}", i); threads.push(thread::spawn(move || out.send(task(&path, &body)).unwrap())); } threads.into_iter().for_each(|t| t.join().unwrap()); let results = taskin.iter().take(n).collect::<Vec<_>>(); for (i, res) in results.iter().enumerate() { match res { Ok((code, body)) => match code { 200 => assert!( body.contains("From thread") || body.contains(contents), "Body: {}", body ), 201 => assert_eq!("", body), code => panic!("Expected status 200 or 201 but got {}", code), }, Err(e) => panic!("Got an error on request {}: {}", i, e), } } }
ct::<Vec<_>>() .join("\n"); assert!(body.contains("hello.txt' is located outside the directory that is being served")) }
function_block-function_prefixed
[ { "content": "fn parse_request_line(scnr: &mut BullshitScanner) -> Result<(Proto, Method, String), ServerError> {\n\n let words = scnr\n\n .next_line()\n\n .map(|l| l.0)\n\n .map_err(|e| ServerError::new().msg(&format!(\"{}\", e)))?\n\n .split_whitespace()\n\n .map(String::from)\n\n .collect::<Vec<_>>();\n\n\n\n let map_err = |word| {\n\n ServerError::wrapping(Box::new(MalformedRequestError(Some(format!(\n\n \"no {} found in request line\",\n\n word\n\n )))))\n\n };\n\n\n\n let proto = (match words.get(2) {\n\n Some(proto) => match Proto::from(proto) {\n\n Proto::Unsupported => Err(ServerError::wrapping(Box::new(UnsupportedProtoError(\n\n Some(String::from(proto)),\n", "file_path": "httpfs/src/parse.rs", "rank": 0, "score": 186075.32824171256 }, { "content": "/// Saves the given file with the provided file name\n\nfn accept_file_upload(filename: &str, body: &mut dyn Read) -> Result<(), ServerError> {\n\n let path = Path::new(filename);\n\n if path.is_dir() {\n\n return Err(ServerError::writing_to_directory());\n\n } else if path.is_symlink() {\n\n return Err(ServerError::writing_to_symlink());\n\n }\n\n\n\n let mut fh = OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .open(filename)\n\n .map_err(wrap)?;\n\n\n\n std::io::copy(body, &mut fh).map(|_| ()).map_err(wrap)\n\n}\n\n\n", "file_path": "httpfs/src/server.rs", "rank": 1, "score": 164286.25440946335 }, { "content": "/// Writes a file response\n\nfn write_file(stream: &mut TcpStream, mut fh: File, filename: &str) -> Result<(), ServerError> {\n\n write_response_with_headers(\n\n stream,\n\n \"200 OK\",\n\n fh.metadata().map_err(wrap)?.len(),\n\n Some(HashMap::from([\n\n (\"Content-Type\", parse_mimetype(filename).as_str()),\n\n (\n\n \"Content-Disposition\",\n\n &format!(\n\n r#\"attachment; filename=\"{}\"\"#,\n\n filename.split('/').last().unwrap_or(filename)\n\n ),\n\n ),\n\n ])),\n\n Some(&mut fh),\n\n )\n\n}\n\n\n", "file_path": "httpfs/src/server.rs", "rank": 5, "score": 162081.84821190042 }, { "content": "fn wrap<E: std::error::Error + 'static>(err: E) -> ServerError {\n\n ServerError::wrap_err(err)\n\n}\n\n\n", "file_path": "httpfs/src/server.rs", "rank": 9, "score": 150040.3829194756 }, { "content": "/// Routes requests to the appropriate handler\n\nfn handle_connection(stream: &mut TcpStream, dir: &str) -> Result<(), ServerError> {\n\n // let mut reader = BufReader::with_capacity(BUFSIZE, stream.as_ref());\n\n let scnr = BullshitScanner::new(stream);\n\n let mut req = parse_http_request(scnr)?;\n\n log::info!(\"{}\", req);\n\n\n\n let filename = req.file.as_str();\n\n match Requested::parse(dir, &req) {\n\n Requested::Dir(file) => write_dir_listing(stream, &file),\n\n Requested::File(file) => match open_file(&file) {\n\n Ok((name, fh)) => write_file(stream, fh, &name),\n\n Err(_) => write_404(stream, filename, dir),\n\n },\n\n Requested::Upload(filename) => {\n\n accept_file_upload(&filename, &mut req.body)?;\n\n write_response::<File>(stream, \"201 Created\", 0, \"\", None)\n\n }\n\n Requested::None => write_404(stream, filename, dir),\n\n Requested::NotAllowed(filename) => write_not_allowed(stream, &filename, dir),\n\n }\n\n}\n\n\n", "file_path": "httpfs/src/server.rs", "rank": 10, "score": 140645.86675926685 }, { "content": "pub fn parse_http_request(\n\n mut scnr: BullshitScanner,\n\n) -> Result<Request<Take<BullshitScanner>>, ServerError> {\n\n let (proto, method, file) = parse_request_line(&mut scnr)?;\n\n let headers = parse_headers(&mut scnr)?;\n\n let limit = headers\n\n .get(CONTENT_LENGTH)\n\n .map(|l| l.parse::<u64>().ok().unwrap_or(0))\n\n .unwrap_or(0);\n\n\n\n Ok(Request {\n\n proto,\n\n method,\n\n file,\n\n headers,\n\n body: scnr.take(limit),\n\n })\n\n}\n\n\n", "file_path": "httpfs/src/parse.rs", "rank": 11, "score": 139775.62751368395 }, { "content": "fn open_file(file: &str) -> Result<(String, File), ServerError> {\n\n let fh = File::open(file).map_err(wrap)?;\n\n log::debug!(\"Opening file {}\", file);\n\n Ok((String::from(file), fh))\n\n}\n\n\n", "file_path": "httpfs/src/server.rs", "rank": 12, "score": 139769.41005346228 }, { "content": "fn parse_headers(scnr: &mut BullshitScanner) -> Result<HashMap<String, String>, ServerError> {\n\n // Headers we read line-by-line\n\n let mut headers = HashMap::with_capacity(64);\n\n loop {\n\n let line = scnr.next_line().map(|l| l.0).map_err(|_| {\n\n ServerError::new().wrap(Box::new(MalformedRequestError(Some(String::from(\n\n \"invalid request headers, headers must end with '\\\\r\\\\n'\",\n\n )))))\n\n })?;\n\n\n\n if line.is_empty() {\n\n return Ok(headers);\n\n }\n\n\n\n let (left, right) = line.split_once(':').ok_or_else(|| {\n\n ServerError::new().wrap(Box::new(MalformedRequestError(Some(format!(\n\n \"failed to parse request header '{}'\",\n\n line\n\n )))))\n\n })?;\n\n\n\n headers.insert(String::from(left.trim()), String::from(right.trim()));\n\n }\n\n}\n\n\n", "file_path": "httpfs/src/parse.rs", "rank": 13, "score": 131912.60515276543 }, { "content": "fn abs_path(file: &str) -> String {\n\n Path::new(file)\n\n .canonicalize()\n\n .ok()\n\n .map(|p| p.to_string_lossy().to_string())\n\n .unwrap_or_else(|| String::from(file))\n\n}\n\n\n", "file_path": "httpfs/src/server.rs", "rank": 15, "score": 125955.40964820959 }, { "content": "fn write_500(stream: &mut TcpStream, msg: &str) {\n\n if let Err(e) = write_response(\n\n stream,\n\n \"500 Internal Server Error\",\n\n msg.len().try_into().unwrap_or(0),\n\n \"text/plain\",\n\n Some(&mut stringreader::StringReader::new(msg)),\n\n ) {\n\n log::debug!(\"{}\", e);\n\n };\n\n}\n\n\n", "file_path": "httpfs/src/server.rs", "rank": 16, "score": 114839.57871729281 }, { "content": "fn write_dir_listing(stream: &mut TcpStream, dir: &str) -> Result<(), ServerError> {\n\n log::debug!(\"Listing directory {}\", dir);\n\n\n\n // Gather a list of files and inject it into the template\n\n let template = template(\n\n fs::read_dir(dir)\n\n .map_err(wrap)?\n\n .flat_map(Result::ok)\n\n .map(|file| (file.file_type(), file))\n\n .filter(|(ft, _)| ft.as_ref().map(|t| !t.is_symlink()).unwrap_or(false))\n\n .map(|(ft, f)| {\n\n (\n\n ft.map(|x| x.is_dir()).unwrap_or(false),\n\n String::from(f.file_name().to_string_lossy()),\n\n )\n\n })\n\n .map(|(ft, f)| if ft { format!(\"{}/\", f) } else { f }),\n\n );\n\n\n\n write_response(\n\n stream,\n\n \"200 OK\",\n\n template.len().try_into().map_err(wrap)?,\n\n \"text/html\",\n\n Some(&mut stringreader::StringReader::new(template.as_str())),\n\n )\n\n}\n\n\n", "file_path": "httpfs/src/server.rs", "rank": 17, "score": 110143.86450917533 }, { "content": "/// Template generation - insert a list of file names as links into our html doc\n\npub fn template(files: impl IntoIterator<Item = String>) -> String {\n\n let links = files\n\n .into_iter()\n\n .map(|file| format!(\" <a href=\\\"{}\\\">{}</a>\\n\", file, file))\n\n .collect::<String>();\n\n\n\n HTML.replacen(\" {LINKS}\", links.as_str(), 1)\n\n}\n\n\n\n/// This is the html document that is returned by the dir listing function\n\npub const HTML: &str = r#\"\n\n<!DOCTYPE html>\n\n<html>\n\n\n\n<head>\n\n <meta charset=\"utf-8\">\n\n <meta http-equiv=\"X-UA-Compatible\" content=\"IE=edge\">\n\n <title>HTTPFS</title>\n\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1\">\n\n <style>\n", "file_path": "httpfs/src/html.rs", "rank": 18, "score": 109103.69237402015 }, { "content": "/// Writes a '404 Not Found' response\n\nfn write_404(stream: &mut TcpStream, filename: &str, dir: &str) -> Result<(), ServerError> {\n\n let body = format!(\n\n \"File '{}' could not be found on the server (directory being served is {})\\n\",\n\n filename, dir\n\n );\n\n\n\n write_response(\n\n stream,\n\n \"404 Not Found\",\n\n body.len().try_into().map_err(|e| {\n\n ServerError::new()\n\n .msg(\"bad numerical conversion\")\n\n .wrap(Box::new(e))\n\n })?,\n\n \"text/plain\",\n\n Some(&mut stringreader::StringReader::new(body.as_str())),\n\n )\n\n}\n\n\n", "file_path": "httpfs/src/server.rs", "rank": 19, "score": 107522.18692538922 }, { "content": "fn write_not_allowed(stream: &mut TcpStream, filename: &str, dir: &str) -> Result<(), ServerError> {\n\n let body = format!(\n\n concat!(\n\n \"File '{}' is located outside the directory that is being served\\r\\n\\r\\n\",\n\n \"Only files in directory '{}' may be accessed\\r\\n\"\n\n ),\n\n abs_path(filename),\n\n abs_path(dir)\n\n );\n\n\n\n write_response(\n\n stream,\n\n \"403 Forbidden\",\n\n body.len().try_into().map_err(|e| {\n\n ServerError::new()\n\n .msg(\"bad numerical conversion\")\n\n .wrap(Box::new(e))\n\n })?,\n\n \"text/plain\",\n\n Some(&mut stringreader::StringReader::new(body.as_str())),\n\n )\n\n}\n\n\n", "file_path": "httpfs/src/server.rs", "rank": 20, "score": 105659.61714134699 }, { "content": "func TestPostDataFromFile(t *testing.T) {\n\n\tclose := testutils.MustBackgroundServer(t)\n\n\tdefer close()\n\n\n\n\t// Function for creating the CLI args\n\n\tcmd := func(file string, verbose bool) []string {\n\n\t\tret := make([]string, 0, 5)\n\n\t\tret = append(ret, []string{tool, POST}...)\n\n\t\tif verbose {\n\n\t\t\tret = append(ret, \"--verbose\")\n\n\t\t}\n\n\t\tret = append(ret, []string{\"--file\", file}...)\n\n\t\tret = append(ret, url)\n\n\t\treturn ret\n\n\t}\n\n\n\n\tfor _, tc := range []struct {\n\n\t\tname string\n\n\t\tdata string\n\n\t\tverbose bool\n\n\t\texit int\n\n\t\toutput string\n\n\t}{\n\n\t\t{\n\n\t\t\tname: \"Hello World\",\n\n\t\t\tverbose: false,\n\n\t\t\texit: 0,\n\n\t\t\tdata: \"Hello World\",\n\n\t\t\toutput: `\n\n\t\t\tPOST / HTTP/1.1\n\n\t\t\tHost: localhost\n\n\t\t\tAccept: */*\n\n\t\t\tAccept-Encoding: gzip\n\n\t\t\tContent-Length: 11\n\n\t\t\tConnection: close\n\n\t\t\tUser-Agent: ecurl/0.1.0\n\n\n\n\t\t\tHello World\n\n\t\t\t`,\n\n\t\t},\n\n\t} {\n\n\t\tt.Run(tc.name, func(t *testing.T) {\n\n\t\t\ttmp, delete := mustCreateTempFile(t, \"tmp-TestPostDataFromFile-*.txt\", tc.data)\n\n\t\t\tdefer delete()\n\n\t\t\targs := cmd(tmp.Name(), tc.verbose)\n\n\t\t\tassertCliOutput(t, args, tc.exit, tc.output)\n\n\t\t})\n\n\t}\n", "file_path": "httpc/cmd/cli_test.go", "rank": 21, "score": 100786.80062636429 }, { "content": "func TestGetAndPostSuccess(t *testing.T) {\n\n\tclose := testutils.MustBackgroundServer(t)\n\n\tdefer close()\n\n\n\n\tfor _, tc := range []struct {\n\n\t\tname string\n\n\t\targs []string\n\n\t\texit int\n\n\t\toutput string\n\n\t}{\n\n\t\t// GET\n\n\t\t{\n\n\t\t\tname: fmt.Sprintf(\"%v %v\", GET, url),\n\n\t\t\targs: []string{tool, GET, url},\n\n\t\t\texit: 0,\n\n\t\t\toutput: `\n\n\t\t\tGET / HTTP/1.1\n\n\t\t\tHost: localhost\n\n\t\t\tAccept: */*\n\n\t\t\tAccept-Encoding: gzip\n\n\t\t\tConnection: close\n\n\t\t\tUser-Agent: ecurl/0.1.0\n\n\t\t\t`,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: fmt.Sprintf(\"%v --verbose %v\", GET, url),\n\n\t\t\targs: []string{tool, GET, \"--verbose\", url},\n\n\t\t\texit: 0,\n\n\t\t\toutput: `\n\n\t\t\tHTTP/1.1 200 OK\n\n\t\t\tContent-Length: 113\n\n\t\t\tContent-Type: text/plain; charset=utf-8\n\n\t\t\tDate: Mon, 07 Feb 2022 18:11:54 GMT\n\n\n\n\t\t\tGET / HTTP/1.1\n\n\t\t\tHost: localhost\n\n\t\t\tAccept: */*\n\n\t\t\tAccept-Encoding: gzip\n\n\t\t\tConnection: close\n\n\t\t\tUser-Agent: ecurl/0.1.0\n\n\t\t\t`,\n\n\t\t},\n\n\n\n\t\t// POST, no body data\n\n\t\t{\n\n\t\t\tname: fmt.Sprintf(\"no body %v %v\", POST, url),\n\n\t\t\targs: []string{tool, POST, url},\n\n\t\t\texit: 0,\n\n\t\t\toutput: `\n\n\t\t\tPOST / HTTP/1.1\n\n\t\t\tHost: localhost\n\n\t\t\tAccept: */*\n\n\t\t\tAccept-Encoding: gzip\n\n\t\t\tContent-Length: 0\n\n\t\t\tUser-Agent: ecurl/0.1.0\n\n\t\t\tConnection: close\n\n\t\t\t`,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: fmt.Sprintf(\"no body %v --verbose %v\", POST, url),\n\n\t\t\targs: []string{tool, POST, \"--verbose\", url},\n\n\t\t\texit: 0,\n\n\t\t\toutput: `\n\n\t\t\tHTTP/1.1 200 OK\n\n\t\t\tContent-Length: 133\n\n\t\t\tContent-Type: text/plain; charset=utf-8\n\n\t\t\tDate: Mon, 07 Feb 2022 18:11:54 GMT\n\n\n\n\t\t\tPOST / HTTP/1.1\n\n\t\t\tHost: localhost\n\n\t\t\tAccept: */*\n\n\t\t\tAccept-Encoding: gzip\n\n\t\t\tContent-Length: 0\n\n\t\t\tUser-Agent: ecurl/0.1.0\n\n\t\t\tConnection: close\n\n\t\t\t`,\n\n\t\t},\n\n\n\n\t\t// POST, with inline body data\n\n\t\t{\n\n\t\t\tname: fmt.Sprintf(\"inline body %v --data 'Hello\\\\n' %v\", POST, url),\n\n\t\t\targs: []string{tool, POST, \"--data\", \"Hello\\n\", url},\n\n\t\t\texit: 0,\n\n\t\t\toutput: `\n\n\t\t\tPOST / HTTP/1.1\n\n\t\t\tHost: localhost\n\n\t\t\tAccept: */*\n\n\t\t\tAccept-Encoding: gzip\n\n\t\t\tContent-Length: 6\n\n\t\t\tConnection: close\n\n\t\t\tUser-Agent: ecurl/0.1.0\n\n\n\n\t\t\tHello\n\n\t\t\t`,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: fmt.Sprintf(\"inline body %v --data 'Hello\\\\n' --verbose %v\", POST, url),\n\n\t\t\targs: []string{tool, POST, \"--data\", \"Hello\\n\", \"--verbose\", url},\n\n\t\t\texit: 0,\n\n\t\t\toutput: `\n\n\t\t\tHTTP/1.1 200 OK\n\n\t\t\tContent-Length: 141\n\n\t\t\tContent-Type: text/plain; charset=utf-8\n\n\t\t\tDate: Mon, 07 Feb 2022 18:11:54 GMT\n\n\n\n\t\t\tPOST / HTTP/1.1\n\n\t\t\tHost: localhost\n\n\t\t\tAccept: */*\n\n\t\t\tAccept-Encoding: gzip\n\n\t\t\tContent-Length: 6\n\n\t\t\tConnection: close\n\n\t\t\tUser-Agent: ecurl/0.1.0\n\n\n\n\t\t\tHello\n\n\t\t\t`,\n\n\t\t},\n\n\t} {\n\n\t\tt.Run(tc.name, func(t *testing.T) {\n\n\t\t\tassertCliOutput(t, tc.args, tc.exit, trim(tc.output))\n\n\t\t})\n\n\t}\n", "file_path": "httpc/cmd/cli_test.go", "rank": 22, "score": 100686.1487777054 }, { "content": "\tInlineData, File string\n", "file_path": "httpc/cmd/post.go", "rank": 23, "score": 99018.4973536029 }, { "content": "\tscnr *BufferedScanner // Scanner for reading from TCP connection\n", "file_path": "httpc/ecurl/content_length_reader.go", "rank": 24, "score": 95531.60112404537 }, { "content": "fn write_response_with_headers(\n\n stream: &mut TcpStream,\n\n status: &str,\n\n body_length: u64,\n\n headers: Option<HashMap<&str, &str>>,\n\n body: Option<&mut impl Read>,\n\n) -> Result<(), ServerError> {\n\n let headers = headers.unwrap_or_default();\n\n log::debug!(\n\n \"Writing response {}, length {}, headers {:?}\",\n\n status,\n\n body_length,\n\n headers\n\n );\n\n\n\n let mut out = vec![format!(\"HTTP/1.1 {}\", status)];\n\n\n\n if !headers.contains_key(\"Content-Length\") {\n\n out.push(format!(\"Content-Length: {}\", body_length));\n\n }\n", "file_path": "httpfs/src/server.rs", "rank": 25, "score": 92898.72726734183 }, { "content": "func (e *FailedToReadFileError) Unwrap() error {\n\n\treturn e.Err\n", "file_path": "httpc/cmd/post.go", "rank": 26, "score": 91856.24569119532 }, { "content": "/// Writes a response to the stream\n\nfn write_response<R: Read>(\n\n stream: &mut TcpStream,\n\n status: &str,\n\n body_length: u64,\n\n content_type: &str,\n\n body: Option<&mut R>,\n\n) -> Result<(), ServerError> {\n\n write_response_with_headers(\n\n stream,\n\n status,\n\n body_length,\n\n Some(HashMap::from([(\"Content-Type\", content_type)])),\n\n body,\n\n )\n\n}\n\n\n", "file_path": "httpfs/src/server.rs", "rank": 27, "score": 83330.83126420627 }, { "content": "use std::{\n\n fs,\n\n io::{Error, Write},\n\n net::IpAddr,\n\n};\n\n\n\nuse httpfs::{\n\n errors::ServerError,\n\n server::{Handle, Server},\n\n};\n\n\n\nuse rand::{distributions::Alphanumeric, thread_rng, Rng};\n\n\n\npub type ServerConfig = (IpAddr, u32, &'static str, usize);\n\n\n\n/// When [dropped](Drop), the [TempFile] gets deleted.\n\npub struct TempFile {\n\n pub name: String,\n\n}\n\n\n", "file_path": "httpfs/tests/test_utils.rs", "rank": 28, "score": 82520.88911963996 }, { "content": "/// Parses the mime type from a non-exhaustive list\n\nfn parse_mimetype(filename: &str) -> String {\n\n match filename.split('/').last().unwrap_or(filename) {\n\n \"Makefile\" => mime::TEXT_PLAIN,\n\n other => match other.split('.').last() {\n\n Some(x) => match x {\n\n \"png\" => mime::IMAGE_PNG,\n\n \"jpg\" => mime::IMAGE_JPEG,\n\n \"txt\" => mime::TEXT_PLAIN,\n\n \"js\" => mime::APPLICATION_JAVASCRIPT,\n\n \"css\" => mime::TEXT_CSS,\n\n \"xml\" => mime::TEXT_XML,\n\n \"json\" => mime::APPLICATION_JSON,\n\n \"html\" => mime::TEXT_HTML,\n\n \"pdf\" => mime::APPLICATION_PDF,\n\n \"gitignore\" => mime::TEXT_PLAIN,\n\n \"lock\" => mime::TEXT_PLAIN,\n\n \"md\" => return String::from(\"text/markdown\"),\n\n \"toml\" => return String::from(\"application/toml\"),\n\n _ => mime::TEXT_PLAIN,\n\n },\n\n None => mime::APPLICATION_OCTET_STREAM,\n\n },\n\n }\n\n .to_string()\n\n}\n", "file_path": "httpfs/src/server.rs", "rank": 29, "score": 79245.42757708291 }, { "content": "use std::{\n\n collections::HashMap,\n\n fs::{self, File, OpenOptions},\n\n io::{Read, Write},\n\n net::{IpAddr, Ipv4Addr, TcpListener, TcpStream},\n\n path::{Path, PathBuf},\n\n sync::{\n\n atomic::{AtomicBool, Ordering},\n\n Arc, Barrier, Mutex,\n\n },\n\n thread::{self, JoinHandle},\n\n time::Duration,\n\n};\n\n\n\nuse threadpool::ThreadPool;\n\n\n\nuse crate::{\n\n bullshit_scanner::BullshitScanner,\n\n errors::ServerError,\n\n html::template,\n", "file_path": "httpfs/src/server.rs", "rank": 30, "score": 76840.44998555878 }, { "content": "type Request struct {\n\n\tMethod string\n\n\tHost string\n\n\tPath string\n\n\tPort int\n\n\tHeaders Headers\n\n\tBody io.Reader\n\n\n\n\turl string\n\n\ttls bool\n", "file_path": "httpc/ecurl/request.go", "rank": 32, "score": 74240.6764128597 }, { "content": "func Post(params PostParams) (exit int) {\n\n\tbody, size, err := bodyReader(params)\n\n\tif err != nil {\n\n\t\tfmt.Fprintln(os.Stderr, err)\n\n\t\treturn 1\n\n\t}\n\n\tdefer body.Close()\n\n\treturn makeRequest(params.GetParams, ecurl.POST, body, size)\n", "file_path": "httpc/cmd/post.go", "rank": 33, "score": 74213.09955382103 }, { "content": "const POST = \"post\"\n", "file_path": "httpc/cmd/post.go", "rank": 34, "score": 74213.09955382103 }, { "content": "func Get(params GetParams) (exit int) {\n\n\treturn makeRequest(params, ecurl.GET, nil, 0)\n", "file_path": "httpc/cmd/get.go", "rank": 35, "score": 74194.58767868974 }, { "content": "const GET = \"get\"\n", "file_path": "httpc/cmd/get.go", "rank": 36, "score": 74194.58767868974 }, { "content": "/// Represents the file server operation that the user is requesting\n\nenum Requested {\n\n Dir(String),\n\n File(String),\n\n Upload(String),\n\n NotAllowed(String),\n\n None,\n\n}\n\n\n\nimpl Requested {\n\n fn parse<R: Read>(dir: &str, req: &Request<R>) -> Requested {\n\n let dir = Path::new(dir)\n\n .canonicalize()\n\n .ok()\n\n .unwrap_or_else(|| PathBuf::from(dir));\n\n\n\n let file = dir.join(req.file.trim_start_matches('/'));\n\n let file = file\n\n .canonicalize()\n\n .ok()\n\n .unwrap_or(file)\n", "file_path": "httpfs/src/server.rs", "rank": 37, "score": 74194.30380168553 }, { "content": "\tBody io.Reader\n", "file_path": "httpc/ecurl/request.go", "rank": 38, "score": 68253.41830863875 }, { "content": "func TestChunkedBigMessageFromEchoServer(t *testing.T) {\n\n\tclose := testutils.MustBackgroundServer(t, port)\n\n\tdefer close()\n\n\n\n\tmsg := strings.Repeat(\"Hello World!\\n\", 1<<10)\n\n\tresp, err := Post(url, \"text/plain\", bytes.NewBufferString(msg))\n\n\tif err != nil {\n\n\t\tt.Fatalf(\"Expected POST to succeed but got err: %v\", err)\n\n\t}\n\n\tdefer resp.Body.Close()\n\n\n\n\tbod, err := io.ReadAll(resp.Body)\n\n\tif err != nil {\n\n\t\tt.Fatalf(\"Expected body to be read but got err: %v\", err)\n\n\t}\n\n\n\n\t// Trim the first few lines from the response (the echo of our request headers)\n\n\tactual := testutils.TrimWhiteSpace(testutils.Tail(string(bod), -8))\n\n\texpected := testutils.TrimWhiteSpace(msg)\n\n\tif expected != actual {\n\n\t\tt.Log(resp)\n\n\t\tt.Fatalf(\"Expected response body to be '%v' but got '%v'\", expected, actual)\n\n\t}\n", "file_path": "httpc/ecurl/ecurl_test.go", "rank": 39, "score": 67115.60229166652 }, { "content": "func bodyReader(params PostParams) (io.ReadCloser, int, error) {\n\n\tif params.InlineData != \"\" || params.File == \"\" {\n\n\t\tdata := bytes.NewBufferString(params.InlineData)\n\n\t\treturn io.NopCloser(data), data.Len(), nil\n\n\t}\n\n\tfh, err := os.Open(params.File)\n\n\tif err != nil {\n\n\t\treturn nil, 0, &FailedToReadFileError{params.File, err}\n\n\t}\n\n\tsize, err := fileSize(fh)\n\n\tif err != nil {\n\n\t\tfh.Close()\n\n\t\treturn nil, 0, err\n\n\t}\n\n\treturn fh, size, nil\n", "file_path": "httpc/cmd/post.go", "rank": 40, "score": 66355.63825451811 }, { "content": "func fileSize(fh *os.File) (int, error) {\n\n\tstat, err := fh.Stat()\n\n\tif err != nil {\n\n\t\treturn -1, fmt.Errorf(\"error trying to determine file size: %w\", err)\n\n\t}\n\n\treturn int(stat.Size()), nil\n", "file_path": "httpc/cmd/post.go", "rank": 41, "score": 66259.13963472501 }, { "content": "func makeRequest(params GetParams, method string, body io.Reader, length int) (exit int) {\n\n\tbodyCopy := []byte{}\n\n\tif body != nil {\n\n\t\tcp, err := io.ReadAll(body)\n\n\t\tif err != nil {\n\n\t\t\tfmt.Fprintf(os.Stderr, \"Failed to read request body: %v\\n\", err)\n\n\t\t\treturn 1\n\n\t\t}\n\n\t\tbodyCopy = cp\n\n\t}\n\n\n\n\treq, err := ecurl.NewRequest(method, params.Url, io.NopCloser(bytes.NewBuffer(bodyCopy)))\n\n\tif err != nil {\n\n\t\tfmt.Fprintln(os.Stderr, err)\n\n\t\treturn 1\n\n\t}\n\n\n\n\t// Add headers\n\n\treq.Headers.AddAll(params.Headers)\n\n\tif strings.ToLower(method) != GET {\n\n\t\treq.Headers.Add(\"Content-Length\", fmt.Sprintf(\"%v\", length))\n\n\t}\n\n\n\n\tr, err := ecurl.Do(req)\n\n\tif err != nil {\n\n\t\tfmt.Fprintln(os.Stderr, err)\n\n\t\treturn 1\n\n\t}\n\n\trrr := r\n\n\tdefer rrr.Body.Close()\n\n\n\n\tbodyOut := os.Stdout\n\n\tif params.Output != \"\" {\n\n\t\tfh, err := os.Create(params.Output)\n\n\t\tif err != nil {\n\n\t\t\tfmt.Fprintf(os.Stderr, \"Failed to open file: %v\\n\", err)\n\n\t\t}\n\n\t\tdefer fh.Close()\n\n\t\tbodyOut = fh\n\n\t}\n\n\n\n\tfollowRedirects := 5\n\n\tif !params.FollowRedirects {\n\n\t\tfollowRedirects = 0\n\n\t}\n\n\n\n\tfor i := 0; ; i++ {\n\n\t\tif followRedirects == 0 {\n\n\t\t\tbreak\n\n\t\t}\n\n\n\n\t\trr, needs := needsRedirect(r, req)\n\n\t\tif !needs {\n\n\t\t\tbreak\n\n\t\t}\n\n\t\tif i >= followRedirects {\n\n\t\t\tfmt.Fprintf(os.Stderr,\n\n\t\t\t\t\"Maximum number of redirects (%v) exceeded...\\n\", followRedirects)\n\n\t\t\treturn 1\n\n\t\t}\n\n\n\n\t\t// If we need to follow a redirect, then print this current response\n\n\t\t// without a body\n\n\t\tclone := r.Clone()\n\n\t\tclone.Body = io.NopCloser(bytes.NewBufferString(\"\"))\n\n\t\tprintResponse(bodyOut, nil, clone, params.Verbose)\n\n\n\n\t\trr.Body = io.NopCloser(bytes.NewBuffer(bodyCopy))\n\n\t\tr, err = ecurl.Do(rr)\n\n\t\tif err != nil {\n\n\t\t\tfmt.Fprintln(os.Stderr, err)\n\n\t\t\treturn 1\n\n\t\t}\n\n\t\trrr := r\n\n\t\tdefer rrr.Body.Close()\n\n\t}\n\n\n\n\treturn printResponse(bodyOut, nil, r, params.Verbose)\n", "file_path": "httpc/cmd/get.go", "rank": 42, "score": 66132.98045165822 }, { "content": "func Post(url, contentType string, body io.Reader) (*Response, error) {\n\n\treq, err := NewRequest(POST, url, body)\n\n\tif err != nil {\n\n\t\treturn nil, fmt.Errorf(\"Post(%v, ...) failed: %w\", url, err)\n\n\t}\n\n\treq.Headers.Add(\"Content-Type\", contentType)\n\n\treturn Do(req)\n", "file_path": "httpc/ecurl/lib.go", "rank": 43, "score": 64895.91890763128 }, { "content": "\tPOST = \"POST\" // Acceptable method #1\n", "file_path": "httpc/ecurl/constants.go", "rank": 44, "score": 64883.79510969658 }, { "content": "func Get(url string) (*Response, error) {\n\n\treq, err := NewRequest(GET, url, nil)\n\n\tif err != nil {\n\n\t\treturn nil, fmt.Errorf(\"Get(%v) failed: %w\", url, err)\n\n\t}\n\n\treturn Do(req)\n", "file_path": "httpc/ecurl/lib.go", "rank": 45, "score": 64876.08686603754 }, { "content": "\tGET = \"GET\" // Acceptable method #1\n", "file_path": "httpc/ecurl/constants.go", "rank": 46, "score": 64869.26593510168 }, { "content": "func writeRequestLine(w io.Writer, req *Request) error {\n\n\t_, err := fmt.Fprintf(w,\n\n\t\t\"%v %v %v\\r\\n\",\n\n\t\tstrings.ToUpper(req.Method),\n\n\t\treq.Path,\n\n\t\t\"HTTP/1.1\")\n\n\tif err != nil {\n\n\t\treturn fmt.Errorf(\"error writing http request line: %w\", err)\n\n\t}\n\n\treturn nil\n", "file_path": "httpc/ecurl/lib.go", "rank": 47, "score": 64493.70733546184 }, { "content": "\tscnr *BufferedScanner // Scanner to read from conn\n", "file_path": "httpc/ecurl/chunked_reader.go", "rank": 48, "score": 64137.64657864208 }, { "content": "\tscnr *BufferedScanner\n", "file_path": "httpc/ecurl/infinite_reader.go", "rank": 49, "score": 64137.64657864208 }, { "content": "func lineSet(s string) (lineSet map[string]struct{}) {\n\n\tlines := strings.Split(s, \"\\n\")\n\n\tlineSet = make(map[string]struct{}, len(lines))\n\n\tfor _, l := range lines {\n\n\t\t// Ignore \"Data: ...\" headers which will never match\n\n\t\tif strings.HasPrefix(l, \"Date\") {\n\n\t\t\tcontinue\n\n\t\t}\n\n\t\tlineSet[l] = struct{}{}\n\n\t}\n\n\treturn lineSet\n", "file_path": "httpc/cmd/cli_test.go", "rank": 50, "score": 63917.076558445326 }, { "content": "type FailedToReadFileError struct {\n\n\tName string\n\n\tErr error\n", "file_path": "httpc/cmd/post.go", "rank": 51, "score": 62743.99316831898 }, { "content": "func lineSetEqual(name1, name2 string, set1, set2 map[string]struct{}) error {\n\n\tsetOneSubSetTwo := make([]string, 0, len(set1))\n\n\tsetTwoSubSetOne := make([]string, 0, len(set2))\n\n\n\n\t// Fill setOneSubSetTwo\n\n\tfor k := range set1 {\n\n\t\tif _, ok := set2[k]; !ok {\n\n\t\t\tsetOneSubSetTwo = append(setOneSubSetTwo, k)\n\n\t\t}\n\n\t}\n\n\n\n\t// Fill setTwoSubSetOne\n\n\tfor k := range set2 {\n\n\t\tif _, ok := set1[k]; !ok {\n\n\t\t\tsetTwoSubSetOne = append(setTwoSubSetOne, k)\n\n\t\t}\n\n\t}\n\n\n\n\ttoString := func(strs []string) (ret string) {\n\n\t\tret = \"{\"\n\n\t\tfor _, s := range strs {\n\n\t\t\tret += fmt.Sprintf(`\"%s\", `, s)\n\n\t\t}\n\n\t\treturn ret[:len(ret)-2] + \"}\"\n\n\t}\n\n\n\n\tl1, l2 := len(setOneSubSetTwo), len(setTwoSubSetOne)\n\n\tswitch {\n\n\tcase l1 > 0 && l2 > 0:\n\n\t\treturn fmt.Errorf(\n\n\t\t\t\"%v is missing %v from %v, %v is missing %v from %v\",\n\n\t\t\tname2, toString(setOneSubSetTwo), name1,\n\n\t\t\tname1, toString(setTwoSubSetOne), name2)\n\n\tcase l1 > 0:\n\n\t\treturn fmt.Errorf(\n\n\t\t\t\"%v is missing %v from %v\",\n\n\t\t\tname2, toString(setOneSubSetTwo), name1)\n\n\tcase l2 > 0:\n\n\t\treturn fmt.Errorf(\n\n\t\t\t\"%v is missing %v from %v\",\n\n\t\t\tname1, toString(setTwoSubSetOne), name2)\n\n\tdefault:\n\n\t\treturn nil\n\n\t}\n", "file_path": "httpc/cmd/cli_test.go", "rank": 52, "score": 62218.39421779536 }, { "content": "func RedirectingBackgroundServer(\n\n\tt *testing.T,\n\n\tport, maxRedirects, statusCode int,\n\n) (close func(), resetCount func()) {\n\n\tmaxRedirects++\n\n\tvar redirects int\n\n\treturn testutils.MustCustomBackgroundServer(t, port, func(rw http.ResponseWriter, r *http.Request) {\n\n\t\tredirects = (redirects + 1) % maxRedirects\n\n\t\tif redirects == 0 {\n\n\t\t\trw.WriteHeader(http.StatusOK)\n\n\t\t\treturn\n\n\t\t}\n\n\t\trw.Header().Add(\"Location\", fmt.Sprintf(\"http://localhost:%v/redirect\", port))\n\n\t\trw.WriteHeader(statusCode)\n\n\t}), func() { redirects = 0 }\n", "file_path": "httpc/cmd/cli_test.go", "rank": 53, "score": 61813.34424817138 }, { "content": "func (e *FailedToReadFileError) Error() string {\n\n\tret := fmt.Sprintf(\"failed to read file '%v'\", e.Name)\n\n\tif e.Err != nil {\n\n\t\tret += fmt.Sprintf(\": %v\", e.Err)\n\n\t}\n\n\treturn ret\n", "file_path": "httpc/cmd/post.go", "rank": 54, "score": 61122.668668985374 }, { "content": "func mustCreateTempFile(\n\n\tt *testing.T,\n\n\tnamePattern, contents string,\n\n) (file *os.File, delete func()) {\n\n\tfh, err := os.CreateTemp(\".\", namePattern)\n\n\tif err != nil {\n\n\t\tt.Fatalf(\"Got an error when trying to create \"+\n\n\t\t\t\"file (pattern '%v'): %v\", namePattern, err)\n\n\t}\n\n\tdelete = func() {\n\n\t\tfh.Close()\n\n\t\tos.Remove(fh.Name())\n\n\t}\n\n\tif _, err := fh.Write([]byte(contents)); err != nil {\n\n\t\tdelete()\n\n\t\tt.Fatalf(\"Failed to write data to file '%v'\", fh.Name())\n\n\t}\n\n\tif _, err := fh.Seek(0, 0); err != nil {\n\n\t\tdelete()\n\n\t\tt.Fatalf(\"Failed to seek to beginning of file '%v'\", fh.Name())\n\n\t}\n\n\treturn fh, delete\n", "file_path": "httpc/cmd/cli_test.go", "rank": 55, "score": 60566.70077254003 }, { "content": "/// ```\n\n/// lazy_static::lazy_static! {\n\n/// static ref SERVERS: Mutex<AddressCountingServerFactory> = Mutex::new(\n\n/// AddressCountingServerFactory::default(),\n\n/// );\n\n/// }\n\n/// ```\n\npub struct AddressCountingServerFactory {\n\n next: u32,\n\n}\n\n\n\nimpl AddressCountingServerFactory {\n\n pub fn new(starting_port: u32) -> Self {\n\n Self {\n\n next: starting_port,\n\n }\n\n }\n\n\n\n pub fn next_server(&mut self) -> ServerDropper {\n\n let mut cfg = ServerDropper::DEFAULT_SERVER_CONFIG;\n", "file_path": "httpfs/tests/test_utils.rs", "rank": 56, "score": 45058.623243905626 }, { "content": " cfg.1 = self.next;\n\n self.next += 1;\n\n ServerDropper::new_or_panic(cfg)\n\n }\n\n}\n\n\n\nimpl Default for AddressCountingServerFactory {\n\n fn default() -> Self {\n\n Self {\n\n next: ServerDropper::DEFAULT_SERVER_CONFIG.1,\n\n }\n\n }\n\n}\n\n\n\npub mod better_ureq {\n\n use ureq::{get, post, Error};\n\n\n\n /// Calls ureq GET but treats [ureq::Error::Status] errors as still being valid.\n\n /// Returns a tuple of status code and response body string.\n\n pub fn ureq_get_errors_are_ok(path: &str) -> Result<(u16, String), Error> {\n", "file_path": "httpfs/tests/test_utils.rs", "rank": 57, "score": 45057.50146548164 }, { "content": " use ureq::{Error::Status, Request};\n\n\n\n /// Asserts that a given [Request] returns an HTTP error code with a\n\n /// specific body. Pass [Option::None] if you don't want to assert the body\n\n pub fn assert_request_returns_error(req: Request, status: u16, body: Option<&str>) {\n\n match req.call().err().unwrap() {\n\n Status(code, res) => {\n\n assert_eq!(status, code,);\n\n if let Some(body) = body {\n\n let actual_body = res.into_string().unwrap();\n\n assert_eq!(body, actual_body);\n\n }\n\n }\n\n err => panic!(\n\n \"expected request to return an error status code and a message but got err {}\",\n\n err\n\n ),\n\n }\n\n }\n\n}\n", "file_path": "httpfs/tests/test_utils.rs", "rank": 58, "score": 45057.15077507527 }, { "content": " ureq_errors_are_ok(|| get(path).call())\n\n }\n\n\n\n pub fn ureq_post_errors_are_ok(path: &str, body: &str) -> Result<(u16, String), Error> {\n\n ureq_errors_are_ok(|| post(path).send_string(body))\n\n }\n\n\n\n fn ureq_errors_are_ok(\n\n callable: impl FnOnce() -> Result<ureq::Response, Error>,\n\n ) -> Result<(u16, String), Error> {\n\n match callable() {\n\n Ok(response) | Err(Error::Status(_, response)) => Ok((\n\n response.status(),\n\n response.into_string().unwrap_or_default(),\n\n )),\n\n Err(e) => Err(e),\n\n }\n\n }\n\n}\n\npub mod assertions {\n", "file_path": "httpfs/tests/test_utils.rs", "rank": 59, "score": 45056.078858697256 }, { "content": " /// Note: this panics if the server cannot be created\n\n fn default() -> Self {\n\n Self::new_or_panic(Self::DEFAULT_SERVER_CONFIG)\n\n }\n\n}\n\n\n\nimpl Drop for ServerDropper {\n\n fn drop(&mut self) {\n\n self.handle.shutdown();\n\n }\n\n}\n\n\n\n/// Spawns [ServerDroppers](ServerDropper) on an auto-incrementing port starting\n\n/// at some provided port number. Used for concurrent tests.\n\n///\n\n/// The way to use this is to make a global singleton that is reused for all\n\n/// your tests.\n\n///\n\n/// ### Examples\n\n///\n", "file_path": "httpfs/tests/test_utils.rs", "rank": 60, "score": 45055.24061525889 }, { "content": " }\n\n\n\n pub fn new_or_panic(filename: &str, contents: &str) -> Self {\n\n Self::new(filename, contents).unwrap()\n\n }\n\n}\n\n\n\nimpl Drop for TempFile {\n\n fn drop(&mut self) {\n\n fs::remove_file(&self.name).unwrap();\n\n }\n\n}\n\n\n\nimpl Default for TempFile {\n\n /// Creates an empty temp file\n\n fn default() -> Self {\n\n Self::new_or_panic(\"file.tmp\", \"\")\n\n }\n\n}\n\n\n", "file_path": "httpfs/tests/test_utils.rs", "rank": 61, "score": 45055.22305183549 }, { "content": "/// A wrapper around the server handle. Implements a [Drop::drop] method that\n\n/// calls [Handle::shutdown]. Warning: [Handle::shutdown] may block for a short\n\n/// time while it waits for the server to stop. That's the reason why this is\n\n/// not implemented for the general [Server] type.\n\npub struct ServerDropper {\n\n handle: Handle,\n\n cfg: ServerConfig,\n\n}\n\n\n\nimpl ServerDropper {\n\n pub const DEFAULT_SERVER_CONFIG: ServerConfig = (Server::LOCALHOST, 8666, \"./\", 2);\n\n\n\n pub fn new(cfg: ServerConfig) -> Result<Self, ServerError> {\n\n Ok(Self {\n\n cfg,\n\n handle: Server {\n\n addr: cfg.0,\n\n port: cfg.1,\n\n dir: String::from(cfg.2),\n\n n_workers: cfg.3,\n", "file_path": "httpfs/tests/test_utils.rs", "rank": 62, "score": 45054.727305429784 }, { "content": " }\n\n .serve()?,\n\n })\n\n }\n\n\n\n pub fn new_or_panic(cfg: ServerConfig) -> Self {\n\n Self::new(cfg).unwrap()\n\n }\n\n\n\n /// Returns a formatted string containing the address of this server\n\n pub fn addr(&self) -> String {\n\n format!(\"http://{}:{}\", self.cfg.0, self.cfg.1)\n\n }\n\n\n\n pub fn file_addr(&self, filename: &str) -> String {\n\n format!(\"{}/{}\", self.addr(), filename)\n\n }\n\n}\n\n\n\nimpl Default for ServerDropper {\n", "file_path": "httpfs/tests/test_utils.rs", "rank": 63, "score": 45053.32550921444 }, { "content": "impl TempFile {\n\n /// Creates a temporary file with the provided contents. To avoid filename\n\n /// conflicts, the filename will be prefixed with a random string\n\n pub fn new(filename: &str, contents: &str) -> Result<Self, Error> {\n\n let filename = vec![\n\n \"TEMP_\",\n\n thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(16)\n\n .map(char::from)\n\n .collect::<String>()\n\n .as_str(),\n\n \"_\",\n\n filename,\n\n ]\n\n .into_iter()\n\n .collect::<String>();\n\n\n\n fs::File::create(&filename)?.write_all(contents.as_bytes())?;\n\n Ok(Self { name: filename })\n", "file_path": "httpfs/tests/test_utils.rs", "rank": 64, "score": 45051.487279672576 }, { "content": "#[derive(Debug)]\n\nstruct ServerRunner {\n\n addr: IpAddr,\n\n port: u32,\n\n dir: String,\n\n threads: Arc<Mutex<ThreadPool>>,\n\n}\n\n\n\nimpl ServerRunner {\n\n fn serve(&self) -> Result<Handle, ServerError> {\n\n let addr = self.addr_str();\n\n log::info!(\"Starting server on {}\", addr);\n\n\n\n let listener = TcpListener::bind(addr).map_err(wrap)?;\n\n listener\n\n .set_nonblocking(true)\n\n .map_err(ServerError::wrap_err)?;\n\n\n\n let mut handle = Handle::new();\n\n\n\n // Spin up a request handler loop in a new thread\n", "file_path": "httpfs/src/server.rs", "rank": 65, "score": 44642.60538672923 }, { "content": "func postCmd(config *Config) (usage func(), action func(args []string) int) {\n\n\tpostCmd := flag.NewFlagSet(GET, flag.ExitOnError)\n\n\tpostCmd.Usage = func() {\n\n\t\tfmt.Printf(\n\n\t\t\tPostUsage,\n\n\t\t\tconfig.Command,\n\n\t\t\tPOST,\n\n\t\t\tstrings.ToUpper(POST[:1])+POST[1:])\n\n\t}\n\n\n\n\t// Verbose\n\n\tpostCmdVerbose := postCmd.Bool(\"verbose\", false, \"\")\n\n\tpostCmd.BoolVar(postCmdVerbose, \"v\", false, \"\")\n\n\n\n\t// Headers\n\n\thfv := make(HeadersFlagValue, 10)\n\n\tpostCmd.Var(&hfv, \"h\", \"\")\n\n\tpostCmd.Var(&hfv, \"header\", \"\")\n\n\n\n\t// Inline data\n\n\tpostCmdData := postCmd.String(\"data\", \"\", \"\")\n\n\tpostCmd.StringVar(postCmdData, \"d\", \"\", \"\")\n\n\n\n\t// File\n\n\tpostCmdFile := postCmd.String(\"file\", \"\", \"\")\n\n\tpostCmd.StringVar(postCmdFile, \"f\", \"\", \"\")\n\n\n\n\t// Output file\n\n\tpostCmdOutputFile := postCmd.String(\"output\", \"\", \"\")\n\n\tpostCmd.StringVar(postCmdOutputFile, \"o\", \"\", \"\")\n\n\n\n\t// Follow redirects\n\n\tpostCmdFollowRedirects := postCmd.Bool(\"location\", false, \"\")\n\n\tpostCmd.BoolVar(postCmdFollowRedirects, \"L\", false, \"\")\n\n\n\n\treturn postCmd.Usage, func(args []string) int {\n\n\t\tpostCmd.Parse(args)\n\n\n\n\t\tparams := PostParams{\n\n\t\t\tInlineData: *postCmdData,\n\n\t\t\tFile: *postCmdFile,\n\n\t\t\tGetParams: GetParams{\n\n\t\t\t\tFollowRedirects: *postCmdFollowRedirects,\n\n\t\t\t\tUrl: postCmd.Arg(0),\n\n\t\t\t\tOutput: *postCmdOutputFile,\n\n\t\t\t\tVerbose: *postCmdVerbose,\n\n\t\t\t\tHeaders: hfv,\n\n\t\t\t},\n\n\t\t}\n\n\n\n\t\tif exit := checkPostParams(params, config.Command); exit != 0 {\n\n\t\t\treturn exit\n\n\t\t}\n\n\n\n\t\treturn Post(params)\n\n\t}\n", "file_path": "httpc/cmd/post.go", "rank": 66, "score": 42581.49702074844 }, { "content": "var PostUsage = strings.TrimLeft(`\n\nusage: %v %v [-v] [-h \"k:v\"]* [-d inline-data] [-f file] [-o file] [-L] URL\n\n\n\n%v performs an HTTP POST request on URL\n\n\n\nFlags:\n\n\n\n\t-v, --verbose\n\n\t\tEnables verbose output.\n\n\n\n\t-h, --header\n\n\t\tAdds a header to your request.\n\n\n\n\t-d, --data\n\n\t\tAdd inline data to the body of your request.\n\n\n\n\t-f, --file\n\n\t\tRead the body of your request from a file. This flag takes lower\n\n\t\tprecedence than the -d, --data flag; if both flags are specified\n\n\t\tthe inline data will be used as the body of the request and the\n\n\t\tfile will be ignored.\n\n\n\n\t-o, --output\n\n\t\tSaves the response body to a file. Verbose output will still be\n\n\t\tprinted to STDERR, not to the file specified by this flag.\n\n\n\n\t-L, --location\n\n\t\tFollow redirects up to 5 times.\n", "file_path": "httpc/cmd/post.go", "rank": 67, "score": 42581.49702074844 }, { "content": "type PostParams struct {\n\n\tGetParams\n\n\tInlineData, File string\n", "file_path": "httpc/cmd/post.go", "rank": 68, "score": 42581.49702074844 }, { "content": "func NewRequest(method string, url string, body io.Reader) (*Request, error) {\n\n\tr, host, err := newBlankRequest(method, url, body)\n\n\tif err != nil {\n\n\t\treturn r, err\n\n\t}\n\n\n\n\t// Request comes with some default headers...\n\n\tr.Headers.AddAll(defaultHeaders)\n\n\tr.Headers.Add(\"Host\", host)\n\n\n\n\t// If the body is of a type that supports reporting its length, then we can\n\n\t// automatically compute the Content-Length header\n\n\tif x, ok := body.(interface{ Len() int }); ok {\n\n\t\tr.Headers.Add(\"Content-Length\", fmt.Sprintf(\"%v\", x.Len()))\n\n\t} else if body == nil && strings.ToUpper(method) != GET {\n\n\t\tr.Headers.Add(\"Content-Length\", \"0\")\n\n\t}\n\n\n\n\treturn r, nil\n", "file_path": "httpc/ecurl/request.go", "rank": 69, "score": 42566.497510277506 }, { "content": "type GetParams struct {\n\n\tFollowRedirects bool\n\n\tOutput string\n\n\tUrl string\n\n\tVerbose bool\n\n\tHeaders map[string]string\n", "file_path": "httpc/cmd/get.go", "rank": 70, "score": 42563.318886243345 }, { "content": "func getCmd(config *Config) (usage func(), action func(args []string) int) {\n\n\tgetCmd := flag.NewFlagSet(GET, flag.ExitOnError)\n\n\tgetCmd.Usage = func() {\n\n\t\tfmt.Printf(\n\n\t\t\tGetUsage,\n\n\t\t\tconfig.Command,\n\n\t\t\tGET,\n\n\t\t\tstrings.ToUpper(GET[:1])+GET[1:])\n\n\t}\n\n\n\n\t// Verbose\n\n\tgetCmdVerbose := getCmd.Bool(\"verbose\", false, \"\")\n\n\tgetCmd.BoolVar(getCmdVerbose, \"v\", false, \"\")\n\n\n\n\t// Headers\n\n\thfv := make(HeadersFlagValue, 10)\n\n\tgetCmd.Var(&hfv, \"h\", \"\")\n\n\tgetCmd.Var(&hfv, \"header\", \"\")\n\n\n\n\t// Output file\n\n\tgetCmdOutputFile := getCmd.String(\"output\", \"\", \"\")\n\n\tgetCmd.StringVar(getCmdOutputFile, \"o\", \"\", \"\")\n\n\n\n\t// Follow redirects\n\n\tgetCmdFollowRedirects := getCmd.Bool(\"location\", false, \"\")\n\n\tgetCmd.BoolVar(getCmdFollowRedirects, \"L\", false, \"\")\n\n\n\n\treturn getCmd.Usage, func(args []string) int {\n\n\t\tgetCmd.Parse(args)\n\n\n\n\t\turl := getCmd.Arg(0)\n\n\n\n\t\tif url == \"\" {\n\n\t\t\tfmt.Fprintln(os.Stderr, \"Please provide a url.\")\n\n\t\t\tfmt.Fprintf(os.Stderr,\n\n\t\t\t\t`usage: %v %v [-v] [-h \"k:v\"]* URL`+\"\\n\", config.Command, GET)\n\n\t\t\treturn 2\n\n\t\t}\n\n\n\n\t\treturn Get(GetParams{\n\n\t\t\tUrl: url,\n\n\t\t\tFollowRedirects: *getCmdFollowRedirects,\n\n\t\t\tOutput: *getCmdOutputFile,\n\n\t\t\tVerbose: *getCmdVerbose,\n\n\t\t\tHeaders: hfv,\n\n\t\t})\n\n\t}\n", "file_path": "httpc/cmd/get.go", "rank": 71, "score": 42563.318886243345 }, { "content": "func (r *Request) String() string {\n\n\treturn fmt.Sprintf(\"\"+\n\n\t\t\"Request[Method=%v, Host=%v, \"+\n\n\t\t\"Path=%v, Port=%v, Headers=%v, Body=%v]\",\n\n\t\tr.Method, r.Host, r.Path, r.Port, r.Headers, r.Body)\n", "file_path": "httpc/ecurl/request.go", "rank": 72, "score": 42563.318886243345 }, { "content": "var GetUsage = strings.TrimLeft(`\n\nusage: %v %v [-v] [-h \"k:v\"]* [-o file] [-L] URL\n\n\n\n%v performs an HTTP GET request on URL\n\n\n\nFlags:\n\n\n\n\t-v, --verbose\n\n\t\tEnables verbose output.\n\n\n\n\t-h, --header\n\n\t\tAdds a header to your request.\n\n\n\n\t-o, --output\n\n\t\tSaves the response body to a file. Verbose output will still be\n\n\t\tprinted to STDERR, not to the file specified by this flag.\n\n\n\n\t-L, --location\n\n\t\tFollow redirects up to 5 times.\n", "file_path": "httpc/cmd/get.go", "rank": 73, "score": 42563.318886243345 }, { "content": "func (r *Request) Clone() (*Request, error) {\n\n\trr, err := NewRequest(r.Method, r.url, r.Body)\n\n\tif err != nil {\n\n\t\treturn nil, fmt.Errorf(\"failed to clone request: %w\", err)\n\n\t}\n\n\trr.Headers.AddAll(r.Headers)\n\n\treturn rr, nil\n", "file_path": "httpc/ecurl/request.go", "rank": 74, "score": 42563.318886243345 }, { "content": "func checkPostParams(params PostParams, command string) (exit int) {\n\n\tif params.File != \"\" && params.InlineData != \"\" {\n\n\t\tfmt.Fprintln(os.Stderr, \"WARNING: flag -f/--file has no \"+\n\n\t\t\t\"effect when used alongside flag -d/--data\")\n\n\t}\n\n\n\n\tif params.Url == \"\" {\n\n\t\tfmt.Fprintln(os.Stderr, \"Please provide a url.\")\n\n\t\tfmt.Fprintf(os.Stderr,\n\n\t\t\t`%v %v [-v] [-h \"k:v\"]* [-d inline-data] [-f file] [-o file] [-L] URL`+\"\\n\",\n\n\t\t\tcommand, POST)\n\n\t\treturn 2\n\n\t}\n\n\n\n\treturn 0\n", "file_path": "httpc/cmd/post.go", "rank": 75, "score": 41827.41311802692 }, { "content": "func NewBlankRequest(method string, url string, body io.Reader) (*Request, error) {\n\n\tr, _, err := newBlankRequest(method, url, body)\n\n\treturn r, err\n", "file_path": "httpc/ecurl/request.go", "rank": 76, "score": 41812.73955281234 }, { "content": "func newBlankRequest(\n\n\tmethod string,\n\n\turll string,\n\n\tbody io.Reader,\n\n) (\n\n\tr *Request,\n\n\thost string,\n\n\terr error,\n\n) {\n\n\tmethod = strings.ToUpper(method)\n\n\tif !isAcceptableMethod(method) {\n\n\t\treturn nil, \"\", UnsupportedHttpMethod(method)\n\n\t}\n\n\n\n\t_, host, path, port, tls, err := splitUrl(urll)\n\n\tif err != nil {\n\n\t\treturn nil, \"\", fmt.Errorf(\"error parsing url: %w\", err)\n\n\t}\n\n\n\n\tr = &Request{\n\n\t\turl: urll,\n\n\t\ttls: tls,\n\n\t\tMethod: method,\n\n\t\tPort: port,\n\n\t\tPath: path,\n\n\t\tHost: host,\n\n\t\tBody: body,\n\n\t\tHeaders: make(Headers, 20),\n\n\t}\n\n\n\n\treturn r, host, nil\n", "file_path": "httpc/ecurl/request.go", "rank": 77, "score": 41809.55690359441 }, { "content": "func TestGzip(t *testing.T) {\n\n\tport := port + 1\n\n\tclose := testutils.MustCustomBackgroundServer(t, port, gzipEchoHandlerFunc)\n\n\tdefer close()\n\n\tfor _, tc := range []struct {\n\n\t\tname string\n\n\t\tdata string\n\n\t}{\n\n\t\t{\n\n\t\t\tname: \"empty\",\n\n\t\t\tdata: \"\",\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"hello world\",\n\n\t\t\tdata: \"Hello World!\",\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"big\",\n\n\t\t\tdata: strings.Repeat(\"big!?\\r\\n\\tasdasdasd\", 1024),\n\n\t\t},\n\n\t} {\n\n\t\tt.Run(tc.name, func(t *testing.T) {\n\n\t\t\tresp, err := Post(\n\n\t\t\t\tfmt.Sprintf(\"http://localhost:%v\", port),\n\n\t\t\t\t\"text/plain\",\n\n\t\t\t\tbytes.NewBufferString(tc.data))\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatalf(\"Expected POST to succeed but got err: %v\", err)\n\n\t\t\t}\n\n\n\n\t\t\tdefer resp.Body.Close()\n\n\t\t\tbod, err := io.ReadAll(resp.Body)\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatalf(\"Got an error reading response body: %v\", err)\n\n\t\t\t}\n\n\t\t\tif actual, expected := string(bod), tc.data; actual != expected {\n\n\t\t\t\tt.Fatalf(\"Expected body '%v' but got '%v'\", expected, actual)\n\n\t\t\t}\n\n\t\t})\n\n\t}\n", "file_path": "httpc/ecurl/ecurl_test.go", "rank": 78, "score": 41062.04692993023 }, { "content": "type contentLengthReader struct {\n\n\tconn net.Conn // TCP connection\n\n\tscnr *BufferedScanner // Scanner for reading from TCP connection\n\n\tclen int // Content-Length\n\n\tred int // Amount read\n\n\terr error // Recorded error\n", "file_path": "httpc/ecurl/content_length_reader.go", "rank": 79, "score": 40550.015925529275 }, { "content": "func TestDecodeBasic(t *testing.T) {\n\n\tfor _, tc := range []struct {\n\n\t\tname string\n\n\t\tinput string\n\n\t}{\n\n\t\t{\n\n\t\t\tname: \"empty\",\n\n\t\t\tinput: \"\",\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"hello world\",\n\n\t\t\tinput: \"Hello World!\",\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"asd123\",\n\n\t\t\tinput: \"asd123\",\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"big!\",\n\n\t\t\tinput: strings.Repeat(\"big!\\r\\n\\t\", 1024),\n\n\t\t},\n\n\t} {\n\n\t\ttc := tc\n\n\t\tt.Run(tc.name, func(t *testing.T) {\n\n\t\t\tt.Parallel()\n\n\t\t\tzipped, err := testutils.Gzipup(tc.input)\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatalf(\"Failed to gzip string '%v': %v\", tc.input, err)\n\n\t\t\t}\n\n\t\t\tgzipper, err := NewGzipper(io.NopCloser(bytes.NewBufferString(zipped)))\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatalf(\"Failed to create gzipper: %v\", err)\n\n\t\t\t}\n\n\t\t\tred, err := io.ReadAll(gzipper)\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatalf(\"Failed to read from gzipper: %v\", err)\n\n\t\t\t}\n\n\t\t\tif expected, actual := tc.input, string(red); expected != actual {\n\n\t\t\t\tt.Fatalf(\"Expected '%v' but got '%v'\", expected, actual)\n\n\t\t\t}\n\n\t\t})\n\n\t}\n", "file_path": "httpc/ecurl/gzip_test.go", "rank": 80, "score": 40344.47688616606 }, { "content": "func TestFollowRedirects(t *testing.T) {\n\n\tfor i, tc := range []struct {\n\n\t\tname string\n\n\t\tport int\n\n\t\tstatusCode int\n\n\t\tredirects int\n\n\t\texitCode int\n\n\t\toutput func(port int) string\n\n\t}{\n\n\t\t{\n\n\t\t\tname: \"301 Moved Permanently\",\n\n\t\t\tport: port,\n\n\t\t\tstatusCode: http.StatusMovedPermanently,\n\n\t\t\tredirects: 1,\n\n\t\t\texitCode: 0,\n\n\t\t\toutput: func(port int) string {\n\n\t\t\t\treturn fmt.Sprintf(`\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\n\n\t\t\t\t\tHTTP/1.1 200 OK\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\t`, port)\n\n\t\t\t},\n\n\t\t},\n\n\t\t{\n\n\t\t\t// Note that temporary redirect may be written as \"302 Found\" or as\n\n\t\t\t// \"302 Moved Temporarily\" by the server. Our test server returns\n\n\t\t\t// \"302 Found\"\n\n\t\t\tname: \"302 Found\",\n\n\t\t\tport: port,\n\n\t\t\tstatusCode: http.StatusFound,\n\n\t\t\tredirects: 1,\n\n\t\t\texitCode: 0,\n\n\t\t\toutput: func(port int) string {\n\n\t\t\t\treturn fmt.Sprintf(`\n\n\t\t\t\t\tHTTP/1.1 302 Found\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\n\n\t\t\t\t\tHTTP/1.1 200 OK\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\t`, port)\n\n\t\t\t},\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"300 Multiple Choices\",\n\n\t\t\tport: port,\n\n\t\t\tstatusCode: http.StatusMultipleChoices,\n\n\t\t\tredirects: 1,\n\n\t\t\texitCode: 0,\n\n\t\t\toutput: func(port int) string {\n\n\t\t\t\treturn fmt.Sprintf(`\n\n\t\t\t\t\tHTTP/1.1 300 Multiple Choices\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\n\n\t\t\t\t\tHTTP/1.1 200 OK\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\t`, port)\n\n\t\t\t},\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"status code=399\",\n\n\t\t\tport: port,\n\n\t\t\tstatusCode: 399,\n\n\t\t\tredirects: 1,\n\n\t\t\texitCode: 0,\n\n\t\t\toutput: func(port int) string {\n\n\t\t\t\treturn fmt.Sprintf(`\n\n\t\t\t\t\tHTTP/1.1 399 status code 399\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\n\n\t\t\t\t\tHTTP/1.1 200 OK\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\t`, port)\n\n\t\t\t},\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"status code=375\",\n\n\t\t\tport: port,\n\n\t\t\tstatusCode: 375,\n\n\t\t\tredirects: 1,\n\n\t\t\texitCode: 0,\n\n\t\t\toutput: func(port int) string {\n\n\t\t\t\treturn fmt.Sprintf(`\n\n\t\t\t\t\tHTTP/1.1 375 status code 375\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\n\n\t\t\t\t\tHTTP/1.1 200 OK\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\t`, port)\n\n\t\t\t},\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"many redirects\",\n\n\t\t\tport: port,\n\n\t\t\tstatusCode: http.StatusMovedPermanently,\n\n\t\t\tredirects: 5,\n\n\t\t\texitCode: 0,\n\n\t\t\toutput: func(port int) string {\n\n\t\t\t\treturn fmt.Sprintf(`\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\t\t\t\t\tConnection: close\n\n\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\t\t\t\t\tConnection: close\n\n\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\t\t\t\t\tConnection: close\n\n\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\t\t\t\t\tConnection: close\n\n\n\n\t\t\t\t\tHTTP/1.1 200 OK\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\t`, port, port, port, port, port)\n\n\t\t\t},\n\n\t\t},\n\n\t\t{\n\n\t\t\t// Client should follow up to 5 redirects per RFC\n\n\t\t\t// For more information: https://www.w3.org/Protocols/HTTP/1.0/spec.html#Code3xx\n\n\t\t\tname: \"too many redirects\",\n\n\t\t\tport: port,\n\n\t\t\tstatusCode: http.StatusMovedPermanently,\n\n\t\t\tredirects: 6,\n\n\t\t\texitCode: 1, // If there are too many redirects, should return an error code\n\n\t\t\toutput: func(port int) string {\n\n\t\t\t\treturn fmt.Sprintf(`\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tConnection: close\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\t\t\t\t\tConnection: close\n\n\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\t\t\t\t\tConnection: close\n\n\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\t\t\t\t\tConnection: close\n\n\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\t\t\t\t\tConnection: close\n\n\n\n\t\t\t\t\tHTTP/1.1 301 Moved Permanently\n\n\t\t\t\t\tDate: Sat, 19 Feb 2022 05:17:58 GMT\n\n\t\t\t\t\tContent-Length: 0\n\n\t\t\t\t\tLocation: http://localhost:%v/redirect\n\n\t\t\t\t\tConnection: close\n\n\n\n\t\t\t\t\tMaximum number of redirects (5) exceeded...\n\n\t\t\t\t\t`, port, port, port, port, port, port)\n\n\t\t\t},\n\n\t\t},\n\n\t} {\n\n\t\ttc := tc\n\n\t\tp := tc.port + i\n\n\t\tt.Run(fmt.Sprintf(\n\n\t\t\t\"%v[status=%v,redirects=%v]\",\n\n\t\t\ttc.name, tc.statusCode, tc.redirects),\n\n\t\t\tfunc(t *testing.T) {\n\n\t\t\t\tclose, reset := RedirectingBackgroundServer(t, p, tc.redirects, tc.statusCode)\n\n\t\t\t\tdefer close()\n\n\n\n\t\t\t\tt.Run(GET, func(t *testing.T) {\n\n\t\t\t\t\tassertCliOutput(t, []string{\n\n\t\t\t\t\t\ttool, GET, \"-v\", \"--location\",\n\n\t\t\t\t\t\tfmt.Sprintf(\"http://localhost:%v/\", p),\n\n\t\t\t\t\t}, tc.exitCode, tc.output(p))\n\n\t\t\t\t})\n\n\n\n\t\t\t\treset()\n\n\t\t\t\tt.Run(POST, func(t *testing.T) {\n\n\t\t\t\t\tassertCliOutput(t, []string{\n\n\t\t\t\t\t\ttool, POST, \"-v\", \"--location\",\n\n\t\t\t\t\t\tfmt.Sprintf(\"http://localhost:%v/\", p),\n\n\t\t\t\t\t}, tc.exitCode, tc.output(p))\n\n\t\t\t\t})\n\n\t\t\t})\n\n\t}\n", "file_path": "httpc/cmd/cli_test.go", "rank": 81, "score": 40344.383675628065 }, { "content": "func (r *contentLengthReader) Read(b []byte) (int, error) {\n\n\tif r.err != nil {\n\n\t\treturn 0, r.err\n\n\t}\n\n\n\n\tremaining := r.clen - r.red\n\n\tif remaining <= 0 {\n\n\t\tr.err = io.EOF\n\n\t\treturn 0, r.err\n\n\t}\n\n\tremaining = min(remaining, len(b))\n\n\n\n\tn, err := r.scnr.Read(b[:remaining])\n\n\tr.red += n\n\n\treturn n, err\n", "file_path": "httpc/ecurl/content_length_reader.go", "rank": 82, "score": 39868.13461249739 }, { "content": "func (r *contentLengthReader) Close() error {\n\n\tr.err = ErrResponseBodyClosed\n\n\treturn r.conn.Close()\n", "file_path": "httpc/ecurl/content_length_reader.go", "rank": 83, "score": 39868.13461249739 }, { "content": "func TestChunkedTransferCoding(t *testing.T) {\n\n\tfor _, tc := range []struct {\n\n\t\tname string // Test case name\n\n\t\tdata string // Socket data\n\n\t\tout string // Expected output after chunked decoding\n\n\t}{\n\n\t\t{\n\n\t\t\tname: \"wikipedia no trailer\",\n\n\t\t\tout: \"Wikipedia in \\r\\n\\r\\nchunks.\",\n\n\t\t\tdata: responseHeadersNoTrailer + \"\\r\\n\\r\\n\" +\n\n\t\t\t\t\"4\\r\\n\" +\n\n\t\t\t\t\"Wiki\\r\\n\" +\n\n\t\t\t\t\"6\\r\\n\" +\n\n\t\t\t\t\"pedia \\r\\n\" +\n\n\t\t\t\t\"E\\r\\n\" +\n\n\t\t\t\t\"in \\r\\n\" +\n\n\t\t\t\t\"\\r\\n\" +\n\n\t\t\t\t\"chunks.\\r\\n\" +\n\n\t\t\t\t\"0\\r\\n\" +\n\n\t\t\t\t\"\\r\\n\",\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"wikipedia with trailer\",\n\n\t\t\tout: \"Wikipedia in \\r\\n\\r\\nchunks.\",\n\n\t\t\tdata: responseHeadersWithTrailer + \"\\r\\n\\r\\n\" +\n\n\t\t\t\t\"4\\r\\n\" +\n\n\t\t\t\t\"Wiki\\r\\n\" +\n\n\t\t\t\t\"6\\r\\n\" +\n\n\t\t\t\t\"pedia \\r\\n\" +\n\n\t\t\t\t\"E\\r\\n\" +\n\n\t\t\t\t\"in \\r\\n\" +\n\n\t\t\t\t\"\\r\\n\" +\n\n\t\t\t\t\"chunks.\\r\\n\" +\n\n\t\t\t\t\"0\\r\\n\" +\n\n\t\t\t\t\"\\r\\n\" +\n\n\t\t\t\t\"Expires: Sat, 27 Mar 2004 21:12:00 GMT\\r\\n\",\n\n\t\t},\n\n\t} {\n\n\t\ttc := tc\n\n\t\tt.Run(tc.name, func(t *testing.T) {\n\n\t\t\t// t.Parallel()\n\n\t\t\tconn := &mocks.MockNetConn{Reader: bytes.NewBufferString(tc.data)}\n\n\t\t\tresp, err := readResponse(conn, 0)\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatalf(\"Expected response to succeed but got err: %v\", err)\n\n\t\t\t}\n\n\t\t\tdefer resp.Body.Close()\n\n\t\t\tif resp.StatusCode != http.StatusOK {\n\n\t\t\t\tt.Fatalf(\"Expected status code %v but got %v\", http.StatusOK, resp.StatusCode)\n\n\t\t\t}\n\n\n\n\t\t\tbod, err := io.ReadAll(resp.Body)\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatalf(\"Got an error reading response body: %v\", err)\n\n\t\t\t}\n\n\n\n\t\t\texpected := tc.out\n\n\t\t\tif actual := string(bod); actual != expected {\n\n\t\t\t\tt.Fatalf(\"Expected response body to be '%v' but got '%v'\", expected, actual)\n\n\t\t\t}\n\n\t\t})\n\n\t}\n", "file_path": "httpc/ecurl/ecurl_test.go", "rank": 84, "score": 39654.50271913368 }, { "content": "func TestBufferedScannerRead(t *testing.T) {\n\n\t// Some basic buffer sizes to use for testing\n\n\tbufsizes := func() []int {\n\n\t\treturn []int{\n\n\t\t\t1, 1 << 1, 1 << 2,\n\n\t\t\t1 << 4, 1 << 6, 1 << 10,\n\n\t\t\t1 << 20, 1 << 27,\n\n\t\t}\n\n\t}\n\n\n\n\tfor _, tc := range []struct {\n\n\t\tname string\n\n\t\tinput string\n\n\t\tbufsizes []int\n\n\t}{\n\n\t\t{\n\n\t\t\tname: \"empty\",\n\n\t\t\tinput: \"\",\n\n\t\t\tbufsizes: bufsizes(),\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"hello world\",\n\n\t\t\tinput: \"Hello world!\",\n\n\t\t\tbufsizes: bufsizes(),\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"lorem ipsum\",\n\n\t\t\tbufsizes: bufsizes(),\n\n\t\t\tinput: `\n\n\t\t\tLorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor\n\n\t\t\tincididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis\n\n\t\t\tnostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat.\n\n\t\t\tDuis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu\n\n\t\t\tfugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in\n\n\t\t\tculpa qui officia deserunt mollit anim id est laborum.\n\n\t\t\t`,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"big\",\n\n\t\t\tinput: strings.Repeat(\"big!\\n\", 1000),\n\n\t\t\tbufsizes: bufsizes(),\n\n\t\t},\n\n\n\n\t\t{\n\n\t\t\tname: \"very big\",\n\n\t\t\tinput: strings.Repeat(\"very big!\\n\", 1<<16),\n\n\t\t\tbufsizes: bufsizes(),\n\n\t\t},\n\n\t} {\n\n\t\ttc := tc\n\n\t\tt.Run(tc.name, func(t *testing.T) {\n\n\t\t\tt.Parallel()\n\n\t\t\tfor _, size := range tc.bufsizes {\n\n\t\t\t\tsize := size\n\n\t\t\t\tt.Run(fmt.Sprintf(\"size=%v\", size), func(t *testing.T) {\n\n\t\t\t\t\tt.Parallel()\n\n\t\t\t\t\tr := bytes.NewBufferString(tc.input)\n\n\t\t\t\t\tscnr := &BufferedScanner{reader: r, buf: buffer{make([]byte, 0, size), 0}}\n\n\t\t\t\t\tred, err := io.ReadAll(scnr)\n\n\t\t\t\t\tif err != nil {\n\n\t\t\t\t\t\tt.Fatalf(\"Expected scanner not to return an error but got: %v\", err)\n\n\t\t\t\t\t}\n\n\t\t\t\t\tif actual, expected := string(red), tc.input; actual != expected {\n\n\t\t\t\t\t\tt.Fatalf(\"Expected scanner to return '%v' but got '%v'\", expected, actual)\n\n\t\t\t\t\t}\n\n\t\t\t\t})\n\n\t\t\t}\n\n\t\t})\n\n\t}\n", "file_path": "httpc/ecurl/buffered_scanner_test.go", "rank": 85, "score": 38987.73349938606 }, { "content": "use std::{\n\n collections::HashMap,\n\n fmt::{Debug, Display},\n\n io::{Read, Take},\n\n str,\n\n};\n\n\n\nuse crate::{\n\n bullshit_scanner::BullshitScanner,\n\n errors::{MalformedRequestError, ServerError, UnsupportedMethodError, UnsupportedProtoError},\n\n};\n\n\n\nconst CONTENT_LENGTH: &str = \"Content-Length\";\n\n\n\n/// HTTP request methods\n\n#[derive(Debug)]\n\npub enum Method {\n\n GET,\n\n POST,\n\n\n", "file_path": "httpfs/src/parse.rs", "rank": 86, "score": 38692.89508728922 }, { "content": "pub mod bullshit_scanner;\n\npub mod errors;\n\npub mod html;\n\npub mod parse;\n\npub mod server;\n", "file_path": "httpfs/src/lib.rs", "rank": 87, "score": 38689.329519261184 }, { "content": "use std::{\n\n any::type_name,\n\n error::Error,\n\n fmt::{self, Display, Formatter},\n\n};\n\n\n\n/// This is the catch-all error returned the library. It provides factory\n\n/// functions that give the error a different print out. There is no way to\n\n/// distinguish between the errors created by the factory functions; they are\n\n/// all ServerErrors.\n\n#[derive(Debug)]\n\npub struct ServerError {\n\n /// Optional source error\n\n src: Option<Box<dyn Error>>,\n\n msg: String,\n\n}\n\n\n\nimpl ServerError {\n\n /// An empty ServerError\n\n pub fn new() -> Self {\n", "file_path": "httpfs/src/errors.rs", "rank": 88, "score": 38686.4210800639 }, { "content": "///\n\n/// This module contains the webpage stuff for the dir listing of the file\n\n/// server\n\n///\n\n\n\n/// Template generation - insert a list of file names as links into our html doc\n", "file_path": "httpfs/src/html.rs", "rank": 89, "score": 38681.06747810015 }, { "content": "func TestReadResponseVariousBufSizes(t *testing.T) {\n\n\tclose := testutils.MustBackgroundServer(t)\n\n\tdefer close()\n\n\n\n\tfor _, tc := range []struct {\n\n\t\tname string\n\n\t\tsize int\n\n\t}{\n\n\t\t{\n\n\t\t\tname: \"zero should use default\",\n\n\t\t\tsize: 0,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"negative should use default\",\n\n\t\t\tsize: -999,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"too big should use default\",\n\n\t\t\tsize: 1 << 30,\n\n\t\t},\n\n\n\n\t\t{\n\n\t\t\tname: \"tiny buffer\",\n\n\t\t\tsize: 50,\n\n\t\t},\n\n\n\n\t\t{\n\n\t\t\tname: \"1KB\",\n\n\t\t\tsize: 1 << 10,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"64KB\",\n\n\t\t\tsize: 1 << 16,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"128KB\",\n\n\t\t\tsize: 1 << 17,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"512KB\",\n\n\t\t\tsize: 1 << 19,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"1MB\",\n\n\t\t\tsize: 1 << 20,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"1MB\",\n\n\t\t\tsize: 1 << 20,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"16MB\",\n\n\t\t\tsize: 1 << 24,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"max (128MB)\",\n\n\t\t\tsize: 1 << 27,\n\n\t\t},\n\n\t} {\n\n\t\tt.Run(tc.name, func(t *testing.T) {\n\n\t\t\t// The request needs to have a decent body so that we can verify the\n\n\t\t\t// smaller buffers are still able to read the response (which will\n\n\t\t\t// also contain said body) properly\n\n\t\t\tbody := strings.Repeat(\"Hello world!\\n\", 20)\n\n\t\t\treq, err := NewRequest(POST, url, bytes.NewBufferString(body))\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatalf(\"Failed to create request: %v\", err)\n\n\t\t\t}\n\n\n\n\t\t\tresp, err := do(req, tc.size)\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatalf(\"Request failed: %v\", err)\n\n\t\t\t}\n\n\t\t\tdefer resp.Body.Close()\n\n\t\t\tif resp.StatusCode != http.StatusOK {\n\n\t\t\t\tt.Errorf(\n\n\t\t\t\t\t\"Expected status code %v but got %v\",\n\n\t\t\t\t\thttp.StatusOK,\n\n\t\t\t\t\tresp.StatusCode)\n\n\t\t\t}\n\n\t\t})\n\n\t}\n", "file_path": "httpc/ecurl/ecurl_test.go", "rank": 90, "score": 38342.92474605933 }, { "content": "func TestAccumulatorBufferSizes(t *testing.T) {\n\n\tsrc := func(boundary string) string {\n\n\t\treturn strings.TrimLeft(clean(fmt.Sprintf(`\n\n\t\t--%v\n\n\t\tContent-Type: text/html\n\n\t\tContent-Range: bytes 0-50/1270\n\n\n\n\t\t<!doctype html>\n\n\t\t<html>\n\n\t\t<head>\n\n\t\t\t<title>Example Do\n\n\t\t--%v\n\n\t\tContent-Type: text/html\n\n\t\tContent-Range: bytes 100-150/1270\n\n\n\n\t\teta http-equiv=\"Content-type\" content=\"text/html; c\n\n\t\t--%v--\n\n\t\t`, boundary, boundary, boundary)), \"\\n\")\n\n\t}\n\n\n\n\texpectedOutput := strings.Trim(clean(`\n\n\t<!doctype html>\n\n\t<html>\n\n\t<head>\n\n\t\t<title>Example Doeta http-equiv=\"Content-type\" content=\"text/html; c\n\n\t`), \"\\n\\t\\r\")\n\n\n\n\tfor _, tc := range []struct {\n\n\t\tname string\n\n\t\tboundary string\n\n\t\tbufferSize int\n\n\t}{\n\n\t\t{\n\n\t\t\tname: \"smaller than min/defaults to min\",\n\n\t\t\tboundary: \"asd\",\n\n\t\t\tbufferSize: -666,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"min size\",\n\n\t\t\tboundary: \"asd\",\n\n\t\t\tbufferSize: MIN_ACC_SIZE,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"tiny\",\n\n\t\t\tboundary: \"asd\",\n\n\t\t\tbufferSize: 2 * MIN_ACC_SIZE,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"small\",\n\n\t\t\tboundary: \"asd\",\n\n\t\t\tbufferSize: 10 * MIN_ACC_SIZE,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"max size\",\n\n\t\t\tboundary: \"asd\",\n\n\t\t\tbufferSize: MAX_ACC_SIZE,\n\n\t\t},\n\n\t} {\n\n\t\ttc := tc\n\n\t\tt.Run(tc.name, func(t *testing.T) {\n\n\t\t\tt.Parallel()\n\n\n\n\t\t\tinput := src(tc.boundary)\n\n\t\t\toutput := expectedOutput\n\n\n\n\t\t\tred, err := io.ReadAll(&MultipartByteRangesReader{\n\n\t\t\t\tAccumulatorSize: tc.bufferSize,\n\n\t\t\t\tBoundary: tc.boundary,\n\n\t\t\t\tReader: bytes.NewBufferString(input),\n\n\t\t\t})\n\n\n\n\t\t\t// Assertions\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatalf(\"Expected no error but got '%v'\", err)\n\n\t\t\t}\n\n\t\t\tif expected, actual := output, string(red); expected != actual {\n\n\t\t\t\tt.Fatalf(\"Expected output to be '%v' but got '%v'\", expected, actual)\n\n\t\t\t}\n\n\t\t})\n\n\t}\n", "file_path": "httpc/ecurl/multipart_byteranges_reader_test.go", "rank": 91, "score": 38339.8474600769 }, { "content": "func TestErrUnexpectedEOF(t *testing.T) {\n\n\tfor _, tc := range []struct {\n\n\t\tname string\n\n\t\tinput string\n\n\t\toutput string\n\n\t\tboundary string\n\n\t}{\n\n\t\t{\n\n\t\t\tname: \"EOF after first boundary\",\n\n\t\t\tboundary: \"asd\",\n\n\t\t\tinput: `\n\n\t\t\t--asd\n\n\t\t\tContent-Type: text/plain\n\n\t\t \tContent-Length: bytes 0-50/100\n\n\t\t\t`,\n\n\t\t\toutput: ``,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"no headers and EOF after first boundary\",\n\n\t\t\tboundary: \"asd\",\n\n\t\t\tinput: `\n\n\t\t\t--asd\n\n\n\n\t\t\tsdsdas\n\n\t\t\t`,\n\n\t\t\toutput: `sdsdas\n\n\t\t\t`,\n\n\t\t},\n\n\t\t{\n\n\t\t\tname: \"EOF on closing boundary delim\",\n\n\t\t\tboundary: \"asd\",\n\n\t\t\tinput: `\n\n\t\t\t--asd\n\n\n\n\t\t\tsdsdas\n\n\t\t\t--asd\n\n\n\n\t\t\t123\n\n\t\t\t--asd-`,\n\n\t\t\toutput: `sdsdas123\n\n\t\t\t--asd-`,\n\n\t\t},\n\n\t} {\n\n\t\ttc := tc\n\n\t\tt.Run(tc.name, func(t *testing.T) {\n\n\t\t\tt.Parallel()\n\n\n\n\t\t\ttc.input = clean(tc.input)\n\n\t\t\ttc.output = clean(tc.output)\n\n\t\t\tred, err := io.ReadAll(&MultipartByteRangesReader{\n\n\t\t\t\tBoundary: tc.boundary,\n\n\t\t\t\tReader: bytes.NewBufferString(strings.TrimLeft(tc.input, \"\\n\")),\n\n\t\t\t})\n\n\n\n\t\t\t// Assert that the error is an ErrMalformedByterange error\n\n\t\t\tif e := new(ErrMalformedByterange); !errors.As(err, &e) {\n\n\t\t\t\tt.Fatalf(\"Expected reader to return err '%v' but got '%v'\", e, err)\n\n\t\t\t}\n\n\n\n\t\t\t// Assert that the error is an ErrUnexpectedEOF error\n\n\t\t\tif e := (ErrUnexpectedEOF{}); !errors.As(err, &e) {\n\n\t\t\t\tt.Fatalf(\"Expected reader to return err '%v' but got '%v'\", e, err)\n\n\t\t\t}\n\n\n\n\t\t\t// Assert expected output\n\n\t\t\tif expected, actual := tc.output, string(red); actual != expected {\n\n\t\t\t\tt.Fatalf(\"Expected output '%v' but got '%v'\", expected, actual)\n\n\t\t\t}\n\n\t\t})\n\n\t}\n", "file_path": "httpc/ecurl/multipart_byteranges_reader_test.go", "rank": 92, "score": 38339.8474600769 }, { "content": "func TestParseHappyPath(t *testing.T) {\n\n\tfor _, tc := range []struct {\n\n\t\tname string\n\n\t\tinput string\n\n\t\toutput string\n\n\t\tboundary string\n\n\t}{\n\n\t\t{\n\n\t\t\tname: \"basic 1\",\n\n\t\t\tboundary: \"3d6b6a416f9b5\",\n\n\t\t\tinput: `\n\n\t\t\t--3d6b6a416f9b5\n\n\t\t\tContent-Type: text/html\n\n\t\t\tContent-Range: bytes 0-50/1270\n\n\n\n\t\t\t<!doctype html>\n\n\t\t\t<html>\n\n\t\t\t<head>\n\n\t\t\t\t<title>Example Do\n\n\t\t\t--3d6b6a416f9b5\n\n\t\t\tContent-Type: text/html\n\n\t\t\tContent-Range: bytes 100-150/1270\n\n\n\n\t\t\teta http-equiv=\"Content-type\" content=\"text/html; c\n\n\t\t\t--3d6b6a416f9b5--\n\n\t\t\t`,\n\n\t\t\toutput: `\n\n\t\t\t<!doctype html>\n\n\t\t\t<html>\n\n\t\t\t<head>\n\n\t\t\t\t<title>Example Doeta http-equiv=\"Content-type\" content=\"text/html; c\n\n\t\t\t`,\n\n\t\t},\n\n\n\n\t\t{\n\n\t\t\tname: \"basic 2\",\n\n\t\t\tboundary: \"asd123asd123\",\n\n\t\t\tinput: `\n\n\t\t\t--asd123asd123\n\n\t\t\tContent-Type: text/plain\n\n\t\t\tContent-Range: bytes 0-50/1270\n\n\n\n\t\t\t123asd123asd123asd123asd123asd123asd123asd123asd12\n\n\t\t\t--asd123asd123\n\n\t\t\tContent-Type: text/plain\n\n\t\t\tContent-Range: bytes 50-100/1270\n\n\n\n\t\t\t123asd123asd123asd123asd123asd123asd123asd123asd12\n\n\t\t\t--asd123asd123\n\n\t\t\tContent-Type: text/plain\n\n\t\t\tContent-Range: bytes 100-150/1270\n\n\n\n\t\t\t123asd123asd123asd123asd123asd123asd123asd123asd12\n\n\t\t\t--asd123asd123\n\n\t\t\tContent-Type: text/plain\n\n\t\t\tContent-Range: bytes 150-200/1270\n\n\n\n\t\t\t123asd123asd123asd123asd123asd123asd123asd123asd12\n\n\t\t\t--asd123asd123--\n\n\t\t\t`,\n\n\t\t\toutput: `123asd123asd123asd123asd123asd123asd123asd123asd12123asd123asd123asd123asd123asd123asd123asd123asd12123asd123asd123asd123asd123asd123asd123asd123asd12123asd123asd123asd123asd123asd123asd123asd123asd12`,\n\n\t\t},\n\n\t} {\n\n\t\ttc := tc\n\n\t\tt.Run(tc.name, func(t *testing.T) {\n\n\t\t\tt.Parallel()\n\n\t\t\ttc.input = clean(tc.input)\n\n\t\t\ttc.output = strings.Trim(clean(tc.output), \"\\n\\t\\r\")\n\n\t\t\tconn := &mocks.MockNetConn{Reader: bytes.NewBufferString(tc.input)}\n\n\t\t\tr := &MultipartByteRangesReader{\n\n\t\t\t\tBoundary: tc.boundary,\n\n\t\t\t\tConn: conn,\n\n\t\t\t\tReader: NewDefaultBufferedScanner(conn),\n\n\t\t\t}\n\n\n\n\t\t\tred, err := io.ReadAll(r)\n\n\t\t\tif err != nil {\n\n\t\t\t\tt.Fatalf(\"Failed to read from byte ranges reader: %v\", err)\n\n\t\t\t}\n\n\t\t\tif expected, actual := tc.output, string(red); actual != expected {\n\n\t\t\t\tt.Fatalf(\"Expected output '%v' but got '%v'\", expected, actual)\n\n\t\t\t}\n\n\t\t})\n\n\t}\n", "file_path": "httpc/ecurl/multipart_byteranges_reader_test.go", "rank": 93, "score": 38339.8474600769 }, { "content": "func TestEmptyStringAndErrors(t *testing.T) {\n\n\tfor _, tc := range []struct {\n\n\t\tname string\n\n\t\tinput string\n\n\t\toutput string\n\n\t\tboundary string\n\n\t\terr *string\n\n\t}{\n\n\t\t{\n\n\t\t\t// According to the rules of our decoder, empty string is fine\n\n\t\t\t// actually. It represents content split into 0 byteranges\n\n\t\t\tname: \"empty\",\n\n\t\t\tinput: ``,\n\n\t\t\toutput: ``,\n\n\t\t\tboundary: \"asd\",\n\n\t\t\terr: nil,\n\n\t\t},\n\n\t} {\n\n\t\ttc := tc\n\n\t\tt.Run(tc.name, func(t *testing.T) {\n\n\t\t\tt.Parallel()\n\n\t\t\ttc.input = clean(tc.input)\n\n\t\t\ttc.output = strings.Trim(clean(tc.output), \"\\n\\t\\r\")\n\n\t\t\tconn := &mocks.MockNetConn{Reader: bytes.NewBufferString(tc.input)}\n\n\t\t\tr := &MultipartByteRangesReader{\n\n\t\t\t\tBoundary: tc.boundary,\n\n\t\t\t\tConn: conn,\n\n\t\t\t\tReader: NewDefaultBufferedScanner(conn),\n\n\t\t\t}\n\n\n\n\t\t\tred, err := io.ReadAll(r)\n\n\t\t\tif expected, actual := tc.err, err; !(expected == nil && actual == nil) &&\n\n\t\t\t\t(actual == nil && expected != nil ||\n\n\t\t\t\t\tactual != nil && expected == nil ||\n\n\t\t\t\t\tactual.Error() != *expected) {\n\n\t\t\t\tt.Fatalf(\"Expected err '%v' but got '%v'\", expected, actual)\n\n\t\t\t}\n\n\t\t\tif expected, actual := tc.output, string(red); expected != actual {\n\n\t\t\t\tt.Fatalf(\"Expected output '%v' but got '%v'\", expected, actual)\n\n\t\t\t}\n\n\t\t})\n\n\t}\n", "file_path": "httpc/ecurl/multipart_byteranges_reader_test.go", "rank": 94, "score": 38339.8474600769 }, { "content": "\n\n/// Represents a running [Server] that can be shutdown\n\n#[derive(Debug)]\n\npub struct Handle {\n\n /// The [ServerRunner] thread will poll this shared variable in between\n\n /// accepting connections. If the value contained within the [mutex](Mutex)\n\n /// is true, then the server thread will stop accepting requests.\n\n exit: Arc<AtomicBool>,\n\n done: Arc<Barrier>,\n\n main: Option<JoinHandle<()>>,\n\n}\n\n\n\nimpl Handle {\n\n pub fn new() -> Self {\n\n Self {\n\n exit: Arc::new(AtomicBool::new(false)),\n\n done: Arc::new(Barrier::new(2)),\n\n main: None,\n\n }\n\n }\n", "file_path": "httpfs/src/server.rs", "rank": 95, "score": 38163.95508960349 }, { "content": " parse::{parse_http_request, Method, Request},\n\n};\n\n\n\n/// 1MB\n\npub const BUFSIZE: usize = 1 << 20;\n\n\n\npub struct Server {\n\n pub addr: IpAddr,\n\n pub port: u32,\n\n pub dir: String,\n\n pub n_workers: usize,\n\n}\n\n\n\nimpl Server {\n\n pub const LOCALHOST: IpAddr = IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1));\n\n pub const DEFAULT_PORT: u32 = 8080;\n\n pub const DEFAULT_DIR: &'static str = \"./\";\n\n pub const DEFAULT_NUM_THREADS: usize = 4;\n\n\n\n pub fn serve(self) -> Result<Handle, ServerError> {\n", "file_path": "httpfs/src/server.rs", "rank": 96, "score": 38162.669654841906 }, { "content": "\n\n /// Gracefully shutdown the server\n\n pub fn shutdown(&mut self) {\n\n self.exit.store(true, Ordering::SeqCst);\n\n self.done.wait();\n\n }\n\n\n\n /// Waits on the main thread contained within this handle\n\n pub fn join(self) {\n\n if let Some(main) = self.main {\n\n main.join().unwrap();\n\n }\n\n }\n\n\n\n fn set_main(&mut self, handle: JoinHandle<()>) {\n\n self.main = Some(handle);\n\n }\n\n}\n\n\n\nimpl Default for Handle {\n", "file_path": "httpfs/src/server.rs", "rank": 97, "score": 38162.52029557456 }, { "content": " fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Clone for Handle {\n\n /// Note: this does not clone the main thread of the handle, which is not\n\n /// clonable. Only the original handle may control its main thread. Cloneed\n\n /// versions of the handle can still remotely shutdown the main thread, but\n\n /// only the original handle can call [Handle::join]\n\n fn clone(&self) -> Self {\n\n Self {\n\n exit: self.exit.clone(),\n\n done: self.done.clone(),\n\n main: None,\n\n }\n\n }\n\n}\n\n\n\n/// The [ServerRunner] is the object that actually initiates the request\n\n/// handling thread. It is mod-private, the only way to instantiate it is\n\n/// through the [Server] public struct.\n\n#[derive(Debug)]\n", "file_path": "httpfs/src/server.rs", "rank": 98, "score": 38161.26131297009 }, { "content": " ServerRunner {\n\n addr: self.addr,\n\n dir: self.dir,\n\n port: self.port,\n\n threads: Arc::new(Mutex::new(ThreadPool::new(self.n_workers))),\n\n }\n\n .serve()\n\n }\n\n}\n\n\n\nimpl Default for Server {\n\n fn default() -> Self {\n\n Self {\n\n addr: Self::LOCALHOST,\n\n port: Self::DEFAULT_PORT,\n\n dir: String::from(Self::DEFAULT_DIR),\n\n n_workers: Self::DEFAULT_NUM_THREADS,\n\n }\n\n }\n\n}\n", "file_path": "httpfs/src/server.rs", "rank": 99, "score": 38159.19260979429 } ]